riscv: More insns, operands and arg slots
[tinycc.git] / tccgen.c
blob09157659fba9c758fc7f9c245c893fb1cd2eeed8
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
880 else if (vtop->r == VT_CONST)
881 vset_VT_CMP(vtop->c.i != 0);
882 else
883 tcc_error("ICE");
885 p = inv ? &vtop->jfalse : &vtop->jtrue;
886 *p = gjmp_append(*p, t);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv, int t)
894 int op, u, x;
896 gvtst_set(inv, t);
898 t = vtop->jtrue, u = vtop->jfalse;
899 if (inv)
900 x = u, u = t, t = x;
901 op = vtop->cmp_op;
903 /* jump to the wanted target */
904 if (op > 1)
905 t = gjmp_cond(op ^ inv, t);
906 else if (op != inv)
907 t = gjmp(t);
908 /* resolve complementary jumps to here */
909 gsym(u);
911 vtop--;
912 return t;
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType *type, Sym *sym)
919 CValue cval;
920 cval.i = 0;
921 vsetc(type, VT_CONST | VT_SYM, &cval);
922 vtop->sym = sym;
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
928 int v;
929 Sym *sym;
931 v = anon_sym++;
932 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
933 sym->type.t |= VT_STATIC;
934 put_extern_sym(sym, sec, offset, size);
935 return sym;
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
941 vpushsym(type, get_sym_ref(type, sec, offset, size));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym *external_global_sym(int v, CType *type)
947 Sym *s;
949 s = sym_find(v);
950 if (!s) {
951 /* push forward reference */
952 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
953 s->type.ref = type->ref;
954 } else if (IS_ASM_SYM(s)) {
955 s->type.t = type->t | (s->type.t & VT_EXTERN);
956 s->type.ref = type->ref;
957 update_storage(s);
959 return s;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
965 if (sa1->aligned && !sa->aligned)
966 sa->aligned = sa1->aligned;
967 sa->packed |= sa1->packed;
968 sa->weak |= sa1->weak;
969 if (sa1->visibility != STV_DEFAULT) {
970 int vis = sa->visibility;
971 if (vis == STV_DEFAULT
972 || vis > sa1->visibility)
973 vis = sa1->visibility;
974 sa->visibility = vis;
976 sa->dllexport |= sa1->dllexport;
977 sa->nodecorate |= sa1->nodecorate;
978 sa->dllimport |= sa1->dllimport;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
984 if (fa1->func_call && !fa->func_call)
985 fa->func_call = fa1->func_call;
986 if (fa1->func_type && !fa->func_type)
987 fa->func_type = fa1->func_type;
988 if (fa1->func_args && !fa->func_args)
989 fa->func_args = fa1->func_args;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
995 merge_symattr(&ad->a, &ad1->a);
996 merge_funcattr(&ad->f, &ad1->f);
998 if (ad1->section)
999 ad->section = ad1->section;
1000 if (ad1->alias_target)
1001 ad->alias_target = ad1->alias_target;
1002 if (ad1->asm_label)
1003 ad->asm_label = ad1->asm_label;
1004 if (ad1->attr_mode)
1005 ad->attr_mode = ad1->attr_mode;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym *sym, CType *type)
1011 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1012 if (!(sym->type.t & VT_EXTERN))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1014 sym->type.t &= ~VT_EXTERN;
1017 if (IS_ASM_SYM(sym)) {
1018 /* stay static if both are static */
1019 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1020 sym->type.ref = type->ref;
1023 if (!is_compatible_types(&sym->type, type)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym->v, NULL));
1027 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1028 int static_proto = sym->type.t & VT_STATIC;
1029 /* warn if static follows non-static function declaration */
1030 if ((type->t & VT_STATIC) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type->t | sym->type.t) & VT_INLINE))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym->v, NULL));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type->t | sym->type.t) & VT_INLINE) {
1040 if (!((type->t ^ sym->type.t) & VT_INLINE)
1041 || ((type->t | sym->type.t) & VT_STATIC))
1042 static_proto |= VT_INLINE;
1045 if (0 == (type->t & VT_EXTERN)) {
1046 /* put complete type, use static from prototype */
1047 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1048 sym->type.ref = type->ref;
1049 } else {
1050 sym->type.t &= ~VT_INLINE | static_proto;
1053 if (sym->type.ref->f.func_type == FUNC_OLD
1054 && type->ref->f.func_type != FUNC_OLD) {
1055 sym->type.ref = type->ref;
1058 } else {
1059 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym->type.ref->c = type->ref->c;
1063 if ((type->t ^ sym->type.t) & VT_STATIC)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym->v, NULL));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1072 if (type)
1073 patch_type(sym, type);
1075 #ifdef TCC_TARGET_PE
1076 if (sym->a.dllimport != ad->a.dllimport)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym->v, NULL));
1079 #endif
1080 merge_symattr(&sym->a, &ad->a);
1081 if (ad->asm_label)
1082 sym->asm_label = ad->asm_label;
1083 update_storage(sym);
1086 /* copy sym to other stack */
1087 static Sym *sym_copy(Sym *s0, Sym **ps)
1089 Sym *s;
1090 s = sym_malloc(), *s = *s0;
1091 s->prev = *ps, *ps = s;
1092 if (s->v < SYM_FIRST_ANOM) {
1093 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1094 s->prev_tok = *ps, *ps = s;
1096 return s;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym *s0, Sym **ps)
1102 Sym *s, **sp = &s0->type.ref;
1103 for (s = *sp, *sp = NULL; s; s = s->next)
1104 sp = &(*sp = sym_copy(s, ps))->next;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1110 Sym *s; int bt;
1112 /* look for global symbol */
1113 s = sym_find(v);
1114 while (s && s->sym_scope)
1115 s = s->prev_tok;
1117 if (!s) {
1118 /* push forward reference */
1119 s = global_identifier_push(v, type->t, 0);
1120 s->r |= r;
1121 s->a = ad->a;
1122 s->asm_label = ad->asm_label;
1123 s->type.ref = type->ref;
1124 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1125 /* copy type to the global stack also */
1126 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1127 sym_copy_ref(s, &global_stack);
1128 } else {
1129 patch_storage(s, ad, type);
1130 bt = s->type.t & VT_BTYPE;
1132 /* push variables to local scope if any */
1133 if (local_stack && bt != VT_FUNC)
1134 s = sym_copy(s, &local_stack);
1135 return s;
1138 /* push a reference to global symbol v */
1139 ST_FUNC void vpush_global_sym(CType *type, int v)
1141 vpushsym(type, external_global_sym(v, type));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC void save_regs(int n)
1147 SValue *p, *p1;
1148 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1149 save_reg(p->r);
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC void save_reg(int r)
1155 save_reg_upstack(r, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC void save_reg_upstack(int r, int n)
1162 int l, saved, size, align;
1163 SValue *p, *p1, sv;
1164 CType *type;
1166 if ((r &= VT_VALMASK) >= VT_CONST)
1167 return;
1168 if (nocode_wanted)
1169 return;
1171 /* modify all stack values */
1172 saved = 0;
1173 l = 0;
1174 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1175 if ((p->r & VT_VALMASK) == r ||
1176 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1177 /* must save value on stack if not already done */
1178 if (!saved) {
1179 /* NOTE: must reload 'r' because r might be equal to r2 */
1180 r = p->r & VT_VALMASK;
1181 /* store register in the stack */
1182 type = &p->type;
1183 if ((p->r & VT_LVAL) ||
1184 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1185 #if PTR_SIZE == 8
1186 type = &char_pointer_type;
1187 #else
1188 type = &int_type;
1189 #endif
1190 size = type_size(type, &align);
1191 l=get_temp_local_var(size,align);
1192 sv.type.t = type->t;
1193 sv.r = VT_LOCAL | VT_LVAL;
1194 sv.c.i = l;
1195 store(r, &sv);
1196 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1197 /* x86 specific: need to pop fp register ST0 if saved */
1198 if (r == TREG_ST0) {
1199 o(0xd8dd); /* fstp %st(0) */
1201 #endif
1202 #if PTR_SIZE == 4
1203 /* special long long case */
1204 if ((type->t & VT_BTYPE) == VT_LLONG) {
1205 sv.c.i += 4;
1206 store(p->r2, &sv);
1208 #endif
1209 saved = 1;
1211 /* mark that stack entry as being saved on the stack */
1212 if (p->r & VT_LVAL) {
1213 /* also clear the bounded flag because the
1214 relocation address of the function was stored in
1215 p->c.i */
1216 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1217 } else {
1218 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1220 p->r2 = VT_CONST;
1221 p->c.i = l;
1226 #ifdef TCC_TARGET_ARM
1227 /* find a register of class 'rc2' with at most one reference on stack.
1228 * If none, call get_reg(rc) */
1229 ST_FUNC int get_reg_ex(int rc, int rc2)
1231 int r;
1232 SValue *p;
1234 for(r=0;r<NB_REGS;r++) {
1235 if (reg_classes[r] & rc2) {
1236 int n;
1237 n=0;
1238 for(p = vstack; p <= vtop; p++) {
1239 if ((p->r & VT_VALMASK) == r ||
1240 (p->r2 & VT_VALMASK) == r)
1241 n++;
1243 if (n <= 1)
1244 return r;
1247 return get_reg(rc);
1249 #endif
1251 /* find a free register of class 'rc'. If none, save one register */
1252 ST_FUNC int get_reg(int rc)
1254 int r;
1255 SValue *p;
1257 /* find a free register */
1258 for(r=0;r<NB_REGS;r++) {
1259 if (reg_classes[r] & rc) {
1260 if (nocode_wanted)
1261 return r;
1262 for(p=vstack;p<=vtop;p++) {
1263 if ((p->r & VT_VALMASK) == r ||
1264 (p->r2 & VT_VALMASK) == r)
1265 goto notfound;
1267 return r;
1269 notfound: ;
1272 /* no register left : free the first one on the stack (VERY
1273 IMPORTANT to start from the bottom to ensure that we don't
1274 spill registers used in gen_opi()) */
1275 for(p=vstack;p<=vtop;p++) {
1276 /* look at second register (if long long) */
1277 r = p->r2 & VT_VALMASK;
1278 if (r < VT_CONST && (reg_classes[r] & rc))
1279 goto save_found;
1280 r = p->r & VT_VALMASK;
1281 if (r < VT_CONST && (reg_classes[r] & rc)) {
1282 save_found:
1283 save_reg(r);
1284 return r;
1287 /* Should never comes here */
1288 return -1;
1291 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1292 static int get_temp_local_var(int size,int align){
1293 int i;
1294 struct temp_local_variable *temp_var;
1295 int found_var;
1296 SValue *p;
1297 int r;
1298 char free;
1299 char found;
1300 found=0;
1301 for(i=0;i<nb_temp_local_vars;i++){
1302 temp_var=&arr_temp_local_vars[i];
1303 if(temp_var->size<size||align!=temp_var->align){
1304 continue;
1306 /*check if temp_var is free*/
1307 free=1;
1308 for(p=vstack;p<=vtop;p++) {
1309 r=p->r&VT_VALMASK;
1310 if(r==VT_LOCAL||r==VT_LLOCAL){
1311 if(p->c.i==temp_var->location){
1312 free=0;
1313 break;
1317 if(free){
1318 found_var=temp_var->location;
1319 found=1;
1320 break;
1323 if(!found){
1324 loc = (loc - size) & -align;
1325 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1326 temp_var=&arr_temp_local_vars[i];
1327 temp_var->location=loc;
1328 temp_var->size=size;
1329 temp_var->align=align;
1330 nb_temp_local_vars++;
1332 found_var=loc;
1334 return found_var;
1337 static void clear_temp_local_var_list(){
1338 nb_temp_local_vars=0;
1341 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1342 if needed */
1343 static void move_reg(int r, int s, int t)
1345 SValue sv;
1347 if (r != s) {
1348 save_reg(r);
1349 sv.type.t = t;
1350 sv.type.ref = NULL;
1351 sv.r = s;
1352 sv.c.i = 0;
1353 load(r, &sv);
1357 /* get address of vtop (vtop MUST BE an lvalue) */
1358 ST_FUNC void gaddrof(void)
1360 vtop->r &= ~VT_LVAL;
1361 /* tricky: if saved lvalue, then we can go back to lvalue */
1362 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1363 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1368 #ifdef CONFIG_TCC_BCHECK
1369 /* generate lvalue bound code */
1370 static void gbound(void)
1372 int lval_type;
1373 CType type1;
1375 vtop->r &= ~VT_MUSTBOUND;
1376 /* if lvalue, then use checking code before dereferencing */
1377 if (vtop->r & VT_LVAL) {
1378 /* if not VT_BOUNDED value, then make one */
1379 if (!(vtop->r & VT_BOUNDED)) {
1380 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1381 /* must save type because we must set it to int to get pointer */
1382 type1 = vtop->type;
1383 vtop->type.t = VT_PTR;
1384 gaddrof();
1385 vpushi(0);
1386 gen_bounded_ptr_add();
1387 vtop->r |= lval_type;
1388 vtop->type = type1;
1390 /* then check for dereferencing */
1391 gen_bounded_ptr_deref();
1394 #endif
1396 static void incr_bf_adr(int o)
1398 vtop->type = char_pointer_type;
1399 gaddrof();
1400 vpushi(o);
1401 gen_op('+');
1402 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1403 | (VT_BYTE|VT_UNSIGNED);
1404 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1405 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1408 /* single-byte load mode for packed or otherwise unaligned bitfields */
1409 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1411 int n, o, bits;
1412 save_reg_upstack(vtop->r, 1);
1413 vpush64(type->t & VT_BTYPE, 0); // B X
1414 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1415 do {
1416 vswap(); // X B
1417 incr_bf_adr(o);
1418 vdup(); // X B B
1419 n = 8 - bit_pos;
1420 if (n > bit_size)
1421 n = bit_size;
1422 if (bit_pos)
1423 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1424 if (n < 8)
1425 vpushi((1 << n) - 1), gen_op('&');
1426 gen_cast(type);
1427 if (bits)
1428 vpushi(bits), gen_op(TOK_SHL);
1429 vrotb(3); // B Y X
1430 gen_op('|'); // B X
1431 bits += n, bit_size -= n, o = 1;
1432 } while (bit_size);
1433 vswap(), vpop();
1434 if (!(type->t & VT_UNSIGNED)) {
1435 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1436 vpushi(n), gen_op(TOK_SHL);
1437 vpushi(n), gen_op(TOK_SAR);
1441 /* single-byte store mode for packed or otherwise unaligned bitfields */
1442 static void store_packed_bf(int bit_pos, int bit_size)
1444 int bits, n, o, m, c;
1446 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1447 vswap(); // X B
1448 save_reg_upstack(vtop->r, 1);
1449 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1450 do {
1451 incr_bf_adr(o); // X B
1452 vswap(); //B X
1453 c ? vdup() : gv_dup(); // B V X
1454 vrott(3); // X B V
1455 if (bits)
1456 vpushi(bits), gen_op(TOK_SHR);
1457 if (bit_pos)
1458 vpushi(bit_pos), gen_op(TOK_SHL);
1459 n = 8 - bit_pos;
1460 if (n > bit_size)
1461 n = bit_size;
1462 if (n < 8) {
1463 m = ((1 << n) - 1) << bit_pos;
1464 vpushi(m), gen_op('&'); // X B V1
1465 vpushv(vtop-1); // X B V1 B
1466 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1467 gen_op('&'); // X B V1 B1
1468 gen_op('|'); // X B V2
1470 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1471 vstore(), vpop(); // X B
1472 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1473 } while (bit_size);
1474 vpop(), vpop();
1477 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1479 int t;
1480 if (0 == sv->type.ref)
1481 return 0;
1482 t = sv->type.ref->auxtype;
1483 if (t != -1 && t != VT_STRUCT) {
1484 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1485 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1487 return t;
1490 /* store vtop a register belonging to class 'rc'. lvalues are
1491 converted to values. Cannot be used if cannot be converted to
1492 register value (such as structures). */
1493 ST_FUNC int gv(int rc)
1495 int r, bit_pos, bit_size, size, align, rc2;
1497 /* NOTE: get_reg can modify vstack[] */
1498 if (vtop->type.t & VT_BITFIELD) {
1499 CType type;
1501 bit_pos = BIT_POS(vtop->type.t);
1502 bit_size = BIT_SIZE(vtop->type.t);
1503 /* remove bit field info to avoid loops */
1504 vtop->type.t &= ~VT_STRUCT_MASK;
1506 type.ref = NULL;
1507 type.t = vtop->type.t & VT_UNSIGNED;
1508 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1509 type.t |= VT_UNSIGNED;
1511 r = adjust_bf(vtop, bit_pos, bit_size);
1513 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1514 type.t |= VT_LLONG;
1515 else
1516 type.t |= VT_INT;
1518 if (r == VT_STRUCT) {
1519 load_packed_bf(&type, bit_pos, bit_size);
1520 } else {
1521 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1522 /* cast to int to propagate signedness in following ops */
1523 gen_cast(&type);
1524 /* generate shifts */
1525 vpushi(bits - (bit_pos + bit_size));
1526 gen_op(TOK_SHL);
1527 vpushi(bits - bit_size);
1528 /* NOTE: transformed to SHR if unsigned */
1529 gen_op(TOK_SAR);
1531 r = gv(rc);
1532 } else {
1533 if (is_float(vtop->type.t) &&
1534 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1535 unsigned long offset;
1536 /* CPUs usually cannot use float constants, so we store them
1537 generically in data segment */
1538 size = type_size(&vtop->type, &align);
1539 if (NODATA_WANTED)
1540 size = 0, align = 1;
1541 offset = section_add(data_section, size, align);
1542 vpush_ref(&vtop->type, data_section, offset, size);
1543 vswap();
1544 init_putv(&vtop->type, data_section, offset);
1545 vtop->r |= VT_LVAL;
1547 #ifdef CONFIG_TCC_BCHECK
1548 if (vtop->r & VT_MUSTBOUND)
1549 gbound();
1550 #endif
1552 r = vtop->r & VT_VALMASK;
1553 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1554 #ifndef TCC_TARGET_ARM64
1555 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1556 if (rc == RC_IRET)
1557 rc2 = RC_LRET;
1558 #ifdef TCC_TARGET_X86_64
1559 else if (rc == RC_FRET)
1560 rc2 = RC_QRET;
1561 #endif
1562 #endif
1563 #endif
1564 /* need to reload if:
1565 - constant
1566 - lvalue (need to dereference pointer)
1567 - already a register, but not in the right class */
1568 if (r >= VT_CONST
1569 || (vtop->r & VT_LVAL)
1570 || !(reg_classes[r] & rc)
1571 #if PTR_SIZE == 8
1572 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1573 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1574 #else
1575 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1576 #endif
1579 r = get_reg(rc);
1580 #if PTR_SIZE == 8
1581 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1582 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1583 #else
1584 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1585 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1586 unsigned long long ll;
1587 #endif
1588 int r2, original_type;
1589 original_type = vtop->type.t;
1590 /* two register type load : expand to two words
1591 temporarily */
1592 #if PTR_SIZE == 4
1593 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1594 /* load constant */
1595 ll = vtop->c.i;
1596 vtop->c.i = ll; /* first word */
1597 load(r, vtop);
1598 vtop->r = r; /* save register value */
1599 vpushi(ll >> 32); /* second word */
1600 } else
1601 #endif
1602 if (vtop->r & VT_LVAL) {
1603 /* We do not want to modifier the long long
1604 pointer here, so the safest (and less
1605 efficient) is to save all the other registers
1606 in the stack. XXX: totally inefficient. */
1607 #if 0
1608 save_regs(1);
1609 #else
1610 /* lvalue_save: save only if used further down the stack */
1611 save_reg_upstack(vtop->r, 1);
1612 #endif
1613 /* load from memory */
1614 vtop->type.t = load_type;
1615 load(r, vtop);
1616 vdup();
1617 vtop[-1].r = r; /* save register value */
1618 /* increment pointer to get second word */
1619 vtop->type.t = addr_type;
1620 gaddrof();
1621 vpushi(load_size);
1622 gen_op('+');
1623 vtop->r |= VT_LVAL;
1624 vtop->type.t = load_type;
1625 } else {
1626 /* move registers */
1627 load(r, vtop);
1628 vdup();
1629 vtop[-1].r = r; /* save register value */
1630 vtop->r = vtop[-1].r2;
1632 /* Allocate second register. Here we rely on the fact that
1633 get_reg() tries first to free r2 of an SValue. */
1634 r2 = get_reg(rc2);
1635 load(r2, vtop);
1636 vpop();
1637 /* write second register */
1638 vtop->r2 = r2;
1639 vtop->type.t = original_type;
1640 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1641 int t1, t;
1642 /* lvalue of scalar type : need to use lvalue type
1643 because of possible cast */
1644 t = vtop->type.t;
1645 t1 = t;
1646 /* compute memory access type */
1647 if (vtop->r & VT_LVAL_BYTE)
1648 t = VT_BYTE;
1649 else if (vtop->r & VT_LVAL_SHORT)
1650 t = VT_SHORT;
1651 if (vtop->r & VT_LVAL_UNSIGNED)
1652 t |= VT_UNSIGNED;
1653 vtop->type.t = t;
1654 load(r, vtop);
1655 /* restore wanted type */
1656 vtop->type.t = t1;
1657 } else {
1658 if (vtop->r == VT_CMP)
1659 vset_VT_JMP();
1660 /* one register type load */
1661 load(r, vtop);
1664 vtop->r = r;
1665 #ifdef TCC_TARGET_C67
1666 /* uses register pairs for doubles */
1667 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1668 vtop->r2 = r+1;
1669 #endif
1671 return r;
1674 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1675 ST_FUNC void gv2(int rc1, int rc2)
1677 /* generate more generic register first. But VT_JMP or VT_CMP
1678 values must be generated first in all cases to avoid possible
1679 reload errors */
1680 if (vtop->r != VT_CMP && rc1 <= rc2) {
1681 vswap();
1682 gv(rc1);
1683 vswap();
1684 gv(rc2);
1685 /* test if reload is needed for first register */
1686 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1687 vswap();
1688 gv(rc1);
1689 vswap();
1691 } else {
1692 gv(rc2);
1693 vswap();
1694 gv(rc1);
1695 vswap();
1696 /* test if reload is needed for first register */
1697 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1698 gv(rc2);
1703 #ifndef TCC_TARGET_ARM64
1704 /* wrapper around RC_FRET to return a register by type */
1705 static int rc_fret(int t)
1707 #ifdef TCC_TARGET_X86_64
1708 if (t == VT_LDOUBLE) {
1709 return RC_ST0;
1711 #endif
1712 return RC_FRET;
1714 #endif
1716 /* wrapper around REG_FRET to return a register by type */
1717 static int reg_fret(int t)
1719 #ifdef TCC_TARGET_X86_64
1720 if (t == VT_LDOUBLE) {
1721 return TREG_ST0;
1723 #endif
1724 return REG_FRET;
1727 #if PTR_SIZE == 4
1728 /* expand 64bit on stack in two ints */
1729 ST_FUNC void lexpand(void)
1731 int u, v;
1732 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1733 v = vtop->r & (VT_VALMASK | VT_LVAL);
1734 if (v == VT_CONST) {
1735 vdup();
1736 vtop[0].c.i >>= 32;
1737 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1738 vdup();
1739 vtop[0].c.i += 4;
1740 } else {
1741 gv(RC_INT);
1742 vdup();
1743 vtop[0].r = vtop[-1].r2;
1744 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1746 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1748 #endif
1750 #if PTR_SIZE == 4
1751 /* build a long long from two ints */
1752 static void lbuild(int t)
1754 gv2(RC_INT, RC_INT);
1755 vtop[-1].r2 = vtop[0].r;
1756 vtop[-1].type.t = t;
1757 vpop();
1759 #endif
1761 /* convert stack entry to register and duplicate its value in another
1762 register */
1763 static void gv_dup(void)
1765 int rc, t, r, r1;
1766 SValue sv;
1768 t = vtop->type.t;
1769 #if PTR_SIZE == 4
1770 if ((t & VT_BTYPE) == VT_LLONG) {
1771 if (t & VT_BITFIELD) {
1772 gv(RC_INT);
1773 t = vtop->type.t;
1775 lexpand();
1776 gv_dup();
1777 vswap();
1778 vrotb(3);
1779 gv_dup();
1780 vrotb(4);
1781 /* stack: H L L1 H1 */
1782 lbuild(t);
1783 vrotb(3);
1784 vrotb(3);
1785 vswap();
1786 lbuild(t);
1787 vswap();
1788 } else
1789 #endif
1791 /* duplicate value */
1792 rc = RC_INT;
1793 sv.type.t = VT_INT;
1794 if (is_float(t)) {
1795 rc = RC_FLOAT;
1796 #ifdef TCC_TARGET_X86_64
1797 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1798 rc = RC_ST0;
1800 #endif
1801 sv.type.t = t;
1803 r = gv(rc);
1804 r1 = get_reg(rc);
1805 sv.r = r;
1806 sv.c.i = 0;
1807 load(r1, &sv); /* move r to r1 */
1808 vdup();
1809 /* duplicates value */
1810 if (r != r1)
1811 vtop->r = r1;
1815 #if PTR_SIZE == 4
1816 /* generate CPU independent (unsigned) long long operations */
1817 static void gen_opl(int op)
1819 int t, a, b, op1, c, i;
1820 int func;
1821 unsigned short reg_iret = REG_IRET;
1822 unsigned short reg_lret = REG_LRET;
1823 SValue tmp;
1825 switch(op) {
1826 case '/':
1827 case TOK_PDIV:
1828 func = TOK___divdi3;
1829 goto gen_func;
1830 case TOK_UDIV:
1831 func = TOK___udivdi3;
1832 goto gen_func;
1833 case '%':
1834 func = TOK___moddi3;
1835 goto gen_mod_func;
1836 case TOK_UMOD:
1837 func = TOK___umoddi3;
1838 gen_mod_func:
1839 #ifdef TCC_ARM_EABI
1840 reg_iret = TREG_R2;
1841 reg_lret = TREG_R3;
1842 #endif
1843 gen_func:
1844 /* call generic long long function */
1845 vpush_global_sym(&func_old_type, func);
1846 vrott(3);
1847 gfunc_call(2);
1848 vpushi(0);
1849 vtop->r = reg_iret;
1850 vtop->r2 = reg_lret;
1851 break;
1852 case '^':
1853 case '&':
1854 case '|':
1855 case '*':
1856 case '+':
1857 case '-':
1858 //pv("gen_opl A",0,2);
1859 t = vtop->type.t;
1860 vswap();
1861 lexpand();
1862 vrotb(3);
1863 lexpand();
1864 /* stack: L1 H1 L2 H2 */
1865 tmp = vtop[0];
1866 vtop[0] = vtop[-3];
1867 vtop[-3] = tmp;
1868 tmp = vtop[-2];
1869 vtop[-2] = vtop[-3];
1870 vtop[-3] = tmp;
1871 vswap();
1872 /* stack: H1 H2 L1 L2 */
1873 //pv("gen_opl B",0,4);
1874 if (op == '*') {
1875 vpushv(vtop - 1);
1876 vpushv(vtop - 1);
1877 gen_op(TOK_UMULL);
1878 lexpand();
1879 /* stack: H1 H2 L1 L2 ML MH */
1880 for(i=0;i<4;i++)
1881 vrotb(6);
1882 /* stack: ML MH H1 H2 L1 L2 */
1883 tmp = vtop[0];
1884 vtop[0] = vtop[-2];
1885 vtop[-2] = tmp;
1886 /* stack: ML MH H1 L2 H2 L1 */
1887 gen_op('*');
1888 vrotb(3);
1889 vrotb(3);
1890 gen_op('*');
1891 /* stack: ML MH M1 M2 */
1892 gen_op('+');
1893 gen_op('+');
1894 } else if (op == '+' || op == '-') {
1895 /* XXX: add non carry method too (for MIPS or alpha) */
1896 if (op == '+')
1897 op1 = TOK_ADDC1;
1898 else
1899 op1 = TOK_SUBC1;
1900 gen_op(op1);
1901 /* stack: H1 H2 (L1 op L2) */
1902 vrotb(3);
1903 vrotb(3);
1904 gen_op(op1 + 1); /* TOK_xxxC2 */
1905 } else {
1906 gen_op(op);
1907 /* stack: H1 H2 (L1 op L2) */
1908 vrotb(3);
1909 vrotb(3);
1910 /* stack: (L1 op L2) H1 H2 */
1911 gen_op(op);
1912 /* stack: (L1 op L2) (H1 op H2) */
1914 /* stack: L H */
1915 lbuild(t);
1916 break;
1917 case TOK_SAR:
1918 case TOK_SHR:
1919 case TOK_SHL:
1920 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1921 t = vtop[-1].type.t;
1922 vswap();
1923 lexpand();
1924 vrotb(3);
1925 /* stack: L H shift */
1926 c = (int)vtop->c.i;
1927 /* constant: simpler */
1928 /* NOTE: all comments are for SHL. the other cases are
1929 done by swapping words */
1930 vpop();
1931 if (op != TOK_SHL)
1932 vswap();
1933 if (c >= 32) {
1934 /* stack: L H */
1935 vpop();
1936 if (c > 32) {
1937 vpushi(c - 32);
1938 gen_op(op);
1940 if (op != TOK_SAR) {
1941 vpushi(0);
1942 } else {
1943 gv_dup();
1944 vpushi(31);
1945 gen_op(TOK_SAR);
1947 vswap();
1948 } else {
1949 vswap();
1950 gv_dup();
1951 /* stack: H L L */
1952 vpushi(c);
1953 gen_op(op);
1954 vswap();
1955 vpushi(32 - c);
1956 if (op == TOK_SHL)
1957 gen_op(TOK_SHR);
1958 else
1959 gen_op(TOK_SHL);
1960 vrotb(3);
1961 /* stack: L L H */
1962 vpushi(c);
1963 if (op == TOK_SHL)
1964 gen_op(TOK_SHL);
1965 else
1966 gen_op(TOK_SHR);
1967 gen_op('|');
1969 if (op != TOK_SHL)
1970 vswap();
1971 lbuild(t);
1972 } else {
1973 /* XXX: should provide a faster fallback on x86 ? */
1974 switch(op) {
1975 case TOK_SAR:
1976 func = TOK___ashrdi3;
1977 goto gen_func;
1978 case TOK_SHR:
1979 func = TOK___lshrdi3;
1980 goto gen_func;
1981 case TOK_SHL:
1982 func = TOK___ashldi3;
1983 goto gen_func;
1986 break;
1987 default:
1988 /* compare operations */
1989 t = vtop->type.t;
1990 vswap();
1991 lexpand();
1992 vrotb(3);
1993 lexpand();
1994 /* stack: L1 H1 L2 H2 */
1995 tmp = vtop[-1];
1996 vtop[-1] = vtop[-2];
1997 vtop[-2] = tmp;
1998 /* stack: L1 L2 H1 H2 */
1999 save_regs(4);
2000 /* compare high */
2001 op1 = op;
2002 /* when values are equal, we need to compare low words. since
2003 the jump is inverted, we invert the test too. */
2004 if (op1 == TOK_LT)
2005 op1 = TOK_LE;
2006 else if (op1 == TOK_GT)
2007 op1 = TOK_GE;
2008 else if (op1 == TOK_ULT)
2009 op1 = TOK_ULE;
2010 else if (op1 == TOK_UGT)
2011 op1 = TOK_UGE;
2012 a = 0;
2013 b = 0;
2014 gen_op(op1);
2015 if (op == TOK_NE) {
2016 b = gvtst(0, 0);
2017 } else {
2018 a = gvtst(1, 0);
2019 if (op != TOK_EQ) {
2020 /* generate non equal test */
2021 vpushi(0);
2022 vset_VT_CMP(TOK_NE);
2023 b = gvtst(0, 0);
2026 /* compare low. Always unsigned */
2027 op1 = op;
2028 if (op1 == TOK_LT)
2029 op1 = TOK_ULT;
2030 else if (op1 == TOK_LE)
2031 op1 = TOK_ULE;
2032 else if (op1 == TOK_GT)
2033 op1 = TOK_UGT;
2034 else if (op1 == TOK_GE)
2035 op1 = TOK_UGE;
2036 gen_op(op1);
2037 #if 0//def TCC_TARGET_I386
2038 if (op == TOK_NE) { gsym(b); break; }
2039 if (op == TOK_EQ) { gsym(a); break; }
2040 #endif
2041 gvtst_set(1, a);
2042 gvtst_set(0, b);
2043 break;
2046 #endif
2048 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2050 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2051 return (a ^ b) >> 63 ? -x : x;
2054 static int gen_opic_lt(uint64_t a, uint64_t b)
2056 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2059 /* handle integer constant optimizations and various machine
2060 independent opt */
2061 static void gen_opic(int op)
2063 SValue *v1 = vtop - 1;
2064 SValue *v2 = vtop;
2065 int t1 = v1->type.t & VT_BTYPE;
2066 int t2 = v2->type.t & VT_BTYPE;
2067 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2068 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2069 uint64_t l1 = c1 ? v1->c.i : 0;
2070 uint64_t l2 = c2 ? v2->c.i : 0;
2071 int shm = (t1 == VT_LLONG) ? 63 : 31;
2073 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2074 l1 = ((uint32_t)l1 |
2075 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2076 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2077 l2 = ((uint32_t)l2 |
2078 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2080 if (c1 && c2) {
2081 switch(op) {
2082 case '+': l1 += l2; break;
2083 case '-': l1 -= l2; break;
2084 case '&': l1 &= l2; break;
2085 case '^': l1 ^= l2; break;
2086 case '|': l1 |= l2; break;
2087 case '*': l1 *= l2; break;
2089 case TOK_PDIV:
2090 case '/':
2091 case '%':
2092 case TOK_UDIV:
2093 case TOK_UMOD:
2094 /* if division by zero, generate explicit division */
2095 if (l2 == 0) {
2096 if (const_wanted)
2097 tcc_error("division by zero in constant");
2098 goto general_case;
2100 switch(op) {
2101 default: l1 = gen_opic_sdiv(l1, l2); break;
2102 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2103 case TOK_UDIV: l1 = l1 / l2; break;
2104 case TOK_UMOD: l1 = l1 % l2; break;
2106 break;
2107 case TOK_SHL: l1 <<= (l2 & shm); break;
2108 case TOK_SHR: l1 >>= (l2 & shm); break;
2109 case TOK_SAR:
2110 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2111 break;
2112 /* tests */
2113 case TOK_ULT: l1 = l1 < l2; break;
2114 case TOK_UGE: l1 = l1 >= l2; break;
2115 case TOK_EQ: l1 = l1 == l2; break;
2116 case TOK_NE: l1 = l1 != l2; break;
2117 case TOK_ULE: l1 = l1 <= l2; break;
2118 case TOK_UGT: l1 = l1 > l2; break;
2119 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2120 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2121 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2122 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2123 /* logical */
2124 case TOK_LAND: l1 = l1 && l2; break;
2125 case TOK_LOR: l1 = l1 || l2; break;
2126 default:
2127 goto general_case;
2129 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2130 l1 = ((uint32_t)l1 |
2131 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2132 v1->c.i = l1;
2133 vtop--;
2134 } else {
2135 /* if commutative ops, put c2 as constant */
2136 if (c1 && (op == '+' || op == '&' || op == '^' ||
2137 op == '|' || op == '*')) {
2138 vswap();
2139 c2 = c1; //c = c1, c1 = c2, c2 = c;
2140 l2 = l1; //l = l1, l1 = l2, l2 = l;
2142 if (!const_wanted &&
2143 c1 && ((l1 == 0 &&
2144 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2145 (l1 == -1 && op == TOK_SAR))) {
2146 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2147 vtop--;
2148 } else if (!const_wanted &&
2149 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2150 (op == '|' &&
2151 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2152 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2153 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2154 if (l2 == 1)
2155 vtop->c.i = 0;
2156 vswap();
2157 vtop--;
2158 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2159 op == TOK_PDIV) &&
2160 l2 == 1) ||
2161 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2162 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2163 l2 == 0) ||
2164 (op == '&' &&
2165 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2166 /* filter out NOP operations like x*1, x-0, x&-1... */
2167 vtop--;
2168 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2169 /* try to use shifts instead of muls or divs */
2170 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2171 int n = -1;
2172 while (l2) {
2173 l2 >>= 1;
2174 n++;
2176 vtop->c.i = n;
2177 if (op == '*')
2178 op = TOK_SHL;
2179 else if (op == TOK_PDIV)
2180 op = TOK_SAR;
2181 else
2182 op = TOK_SHR;
2184 goto general_case;
2185 } else if (c2 && (op == '+' || op == '-') &&
2186 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2187 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2188 /* symbol + constant case */
2189 if (op == '-')
2190 l2 = -l2;
2191 l2 += vtop[-1].c.i;
2192 /* The backends can't always deal with addends to symbols
2193 larger than +-1<<31. Don't construct such. */
2194 if ((int)l2 != l2)
2195 goto general_case;
2196 vtop--;
2197 vtop->c.i = l2;
2198 } else {
2199 general_case:
2200 /* call low level op generator */
2201 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2202 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2203 gen_opl(op);
2204 else
2205 gen_opi(op);
2210 /* generate a floating point operation with constant propagation */
2211 static void gen_opif(int op)
2213 int c1, c2;
2214 SValue *v1, *v2;
2215 #if defined _MSC_VER && defined __x86_64__
2216 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2217 volatile
2218 #endif
2219 long double f1, f2;
2221 v1 = vtop - 1;
2222 v2 = vtop;
2223 /* currently, we cannot do computations with forward symbols */
2224 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2225 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2226 if (c1 && c2) {
2227 if (v1->type.t == VT_FLOAT) {
2228 f1 = v1->c.f;
2229 f2 = v2->c.f;
2230 } else if (v1->type.t == VT_DOUBLE) {
2231 f1 = v1->c.d;
2232 f2 = v2->c.d;
2233 } else {
2234 f1 = v1->c.ld;
2235 f2 = v2->c.ld;
2238 /* NOTE: we only do constant propagation if finite number (not
2239 NaN or infinity) (ANSI spec) */
2240 if (!ieee_finite(f1) || !ieee_finite(f2))
2241 goto general_case;
2243 switch(op) {
2244 case '+': f1 += f2; break;
2245 case '-': f1 -= f2; break;
2246 case '*': f1 *= f2; break;
2247 case '/':
2248 if (f2 == 0.0) {
2249 /* If not in initializer we need to potentially generate
2250 FP exceptions at runtime, otherwise we want to fold. */
2251 if (!const_wanted)
2252 goto general_case;
2254 f1 /= f2;
2255 break;
2256 /* XXX: also handles tests ? */
2257 default:
2258 goto general_case;
2260 /* XXX: overflow test ? */
2261 if (v1->type.t == VT_FLOAT) {
2262 v1->c.f = f1;
2263 } else if (v1->type.t == VT_DOUBLE) {
2264 v1->c.d = f1;
2265 } else {
2266 v1->c.ld = f1;
2268 vtop--;
2269 } else {
2270 general_case:
2271 gen_opf(op);
2275 static int pointed_size(CType *type)
2277 int align;
2278 return type_size(pointed_type(type), &align);
2281 static void vla_runtime_pointed_size(CType *type)
2283 int align;
2284 vla_runtime_type_size(pointed_type(type), &align);
2287 static inline int is_null_pointer(SValue *p)
2289 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2290 return 0;
2291 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2292 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2293 ((p->type.t & VT_BTYPE) == VT_PTR &&
2294 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2295 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2296 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2299 static inline int is_integer_btype(int bt)
2301 return (bt == VT_BYTE || bt == VT_SHORT ||
2302 bt == VT_INT || bt == VT_LLONG);
2305 /* check types for comparison or subtraction of pointers */
2306 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2308 CType *type1, *type2, tmp_type1, tmp_type2;
2309 int bt1, bt2;
2311 /* null pointers are accepted for all comparisons as gcc */
2312 if (is_null_pointer(p1) || is_null_pointer(p2))
2313 return;
2314 type1 = &p1->type;
2315 type2 = &p2->type;
2316 bt1 = type1->t & VT_BTYPE;
2317 bt2 = type2->t & VT_BTYPE;
2318 /* accept comparison between pointer and integer with a warning */
2319 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2320 if (op != TOK_LOR && op != TOK_LAND )
2321 tcc_warning("comparison between pointer and integer");
2322 return;
2325 /* both must be pointers or implicit function pointers */
2326 if (bt1 == VT_PTR) {
2327 type1 = pointed_type(type1);
2328 } else if (bt1 != VT_FUNC)
2329 goto invalid_operands;
2331 if (bt2 == VT_PTR) {
2332 type2 = pointed_type(type2);
2333 } else if (bt2 != VT_FUNC) {
2334 invalid_operands:
2335 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2337 if ((type1->t & VT_BTYPE) == VT_VOID ||
2338 (type2->t & VT_BTYPE) == VT_VOID)
2339 return;
2340 tmp_type1 = *type1;
2341 tmp_type2 = *type2;
2342 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2343 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2344 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2345 /* gcc-like error if '-' is used */
2346 if (op == '-')
2347 goto invalid_operands;
2348 else
2349 tcc_warning("comparison of distinct pointer types lacks a cast");
2353 /* generic gen_op: handles types problems */
2354 ST_FUNC void gen_op(int op)
2356 int u, t1, t2, bt1, bt2, t;
2357 CType type1;
2359 redo:
2360 t1 = vtop[-1].type.t;
2361 t2 = vtop[0].type.t;
2362 bt1 = t1 & VT_BTYPE;
2363 bt2 = t2 & VT_BTYPE;
2365 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2366 tcc_error("operation on a struct");
2367 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2368 if (bt2 == VT_FUNC) {
2369 mk_pointer(&vtop->type);
2370 gaddrof();
2372 if (bt1 == VT_FUNC) {
2373 vswap();
2374 mk_pointer(&vtop->type);
2375 gaddrof();
2376 vswap();
2378 goto redo;
2379 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2380 /* at least one operand is a pointer */
2381 /* relational op: must be both pointers */
2382 if (op >= TOK_ULT && op <= TOK_LOR) {
2383 check_comparison_pointer_types(vtop - 1, vtop, op);
2384 /* pointers are handled are unsigned */
2385 #if PTR_SIZE == 8
2386 t = VT_LLONG | VT_UNSIGNED;
2387 #else
2388 t = VT_INT | VT_UNSIGNED;
2389 #endif
2390 goto std_op;
2392 /* if both pointers, then it must be the '-' op */
2393 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2394 if (op != '-')
2395 tcc_error("cannot use pointers here");
2396 check_comparison_pointer_types(vtop - 1, vtop, op);
2397 /* XXX: check that types are compatible */
2398 if (vtop[-1].type.t & VT_VLA) {
2399 vla_runtime_pointed_size(&vtop[-1].type);
2400 } else {
2401 vpushi(pointed_size(&vtop[-1].type));
2403 vrott(3);
2404 gen_opic(op);
2405 vtop->type.t = ptrdiff_type.t;
2406 vswap();
2407 gen_op(TOK_PDIV);
2408 } else {
2409 /* exactly one pointer : must be '+' or '-'. */
2410 if (op != '-' && op != '+')
2411 tcc_error("cannot use pointers here");
2412 /* Put pointer as first operand */
2413 if (bt2 == VT_PTR) {
2414 vswap();
2415 t = t1, t1 = t2, t2 = t;
2417 #if PTR_SIZE == 4
2418 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2419 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2420 gen_cast_s(VT_INT);
2421 #endif
2422 type1 = vtop[-1].type;
2423 type1.t &= ~VT_ARRAY;
2424 if (vtop[-1].type.t & VT_VLA)
2425 vla_runtime_pointed_size(&vtop[-1].type);
2426 else {
2427 u = pointed_size(&vtop[-1].type);
2428 if (u < 0)
2429 tcc_error("unknown array element size");
2430 #if PTR_SIZE == 8
2431 vpushll(u);
2432 #else
2433 /* XXX: cast to int ? (long long case) */
2434 vpushi(u);
2435 #endif
2437 gen_op('*');
2438 #if 0
2439 /* #ifdef CONFIG_TCC_BCHECK
2440 The main reason to removing this code:
2441 #include <stdio.h>
2442 int main ()
2444 int v[10];
2445 int i = 10;
2446 int j = 9;
2447 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2448 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2450 When this code is on. then the output looks like
2451 v+i-j = 0xfffffffe
2452 v+(i-j) = 0xbff84000
2454 /* if evaluating constant expression, no code should be
2455 generated, so no bound check */
2456 if (tcc_state->do_bounds_check && !const_wanted) {
2457 /* if bounded pointers, we generate a special code to
2458 test bounds */
2459 if (op == '-') {
2460 vpushi(0);
2461 vswap();
2462 gen_op('-');
2464 gen_bounded_ptr_add();
2465 } else
2466 #endif
2468 gen_opic(op);
2470 /* put again type if gen_opic() swaped operands */
2471 vtop->type = type1;
2473 } else if (is_float(bt1) || is_float(bt2)) {
2474 /* compute bigger type and do implicit casts */
2475 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2476 t = VT_LDOUBLE;
2477 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2478 t = VT_DOUBLE;
2479 } else {
2480 t = VT_FLOAT;
2482 /* floats can only be used for a few operations */
2483 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2484 (op < TOK_ULT || op > TOK_GT))
2485 tcc_error("invalid operands for binary operation");
2486 goto std_op;
2487 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2488 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2489 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2490 t |= VT_UNSIGNED;
2491 t |= (VT_LONG & t1);
2492 goto std_op;
2493 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2494 /* cast to biggest op */
2495 t = VT_LLONG | VT_LONG;
2496 if (bt1 == VT_LLONG)
2497 t &= t1;
2498 if (bt2 == VT_LLONG)
2499 t &= t2;
2500 /* convert to unsigned if it does not fit in a long long */
2501 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2502 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2503 t |= VT_UNSIGNED;
2504 goto std_op;
2505 } else {
2506 /* integer operations */
2507 t = VT_INT | (VT_LONG & (t1 | t2));
2508 /* convert to unsigned if it does not fit in an integer */
2509 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2510 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2511 t |= VT_UNSIGNED;
2512 std_op:
2513 /* XXX: currently, some unsigned operations are explicit, so
2514 we modify them here */
2515 if (t & VT_UNSIGNED) {
2516 if (op == TOK_SAR)
2517 op = TOK_SHR;
2518 else if (op == '/')
2519 op = TOK_UDIV;
2520 else if (op == '%')
2521 op = TOK_UMOD;
2522 else if (op == TOK_LT)
2523 op = TOK_ULT;
2524 else if (op == TOK_GT)
2525 op = TOK_UGT;
2526 else if (op == TOK_LE)
2527 op = TOK_ULE;
2528 else if (op == TOK_GE)
2529 op = TOK_UGE;
2531 vswap();
2532 type1.t = t;
2533 type1.ref = NULL;
2534 gen_cast(&type1);
2535 vswap();
2536 /* special case for shifts and long long: we keep the shift as
2537 an integer */
2538 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2539 type1.t = VT_INT;
2540 gen_cast(&type1);
2541 if (is_float(t))
2542 gen_opif(op);
2543 else
2544 gen_opic(op);
2545 if (op >= TOK_ULT && op <= TOK_GT) {
2546 /* relational op: the result is an int */
2547 vtop->type.t = VT_INT;
2548 } else {
2549 vtop->type.t = t;
2552 // Make sure that we have converted to an rvalue:
2553 if (vtop->r & VT_LVAL)
2554 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2557 #ifndef TCC_TARGET_ARM
2558 /* generic itof for unsigned long long case */
2559 static void gen_cvt_itof1(int t)
2561 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2562 gen_cvt_itof(t);
2563 #else
2564 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2565 (VT_LLONG | VT_UNSIGNED)) {
2567 if (t == VT_FLOAT)
2568 vpush_global_sym(&func_old_type, TOK___floatundisf);
2569 #if LDOUBLE_SIZE != 8
2570 else if (t == VT_LDOUBLE)
2571 vpush_global_sym(&func_old_type, TOK___floatundixf);
2572 #endif
2573 else
2574 vpush_global_sym(&func_old_type, TOK___floatundidf);
2575 vrott(2);
2576 gfunc_call(1);
2577 vpushi(0);
2578 vtop->r = reg_fret(t);
2579 } else {
2580 gen_cvt_itof(t);
2582 #endif
2584 #endif
2586 /* generic ftoi for unsigned long long case */
2587 static void gen_cvt_ftoi1(int t)
2589 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2590 gen_cvt_ftoi(t);
2591 #else
2592 int st;
2594 if (t == (VT_LLONG | VT_UNSIGNED)) {
2595 /* not handled natively */
2596 st = vtop->type.t & VT_BTYPE;
2597 if (st == VT_FLOAT)
2598 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2599 #if LDOUBLE_SIZE != 8
2600 else if (st == VT_LDOUBLE)
2601 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2602 #endif
2603 else
2604 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2605 vrott(2);
2606 gfunc_call(1);
2607 vpushi(0);
2608 vtop->r = REG_IRET;
2609 vtop->r2 = REG_LRET;
2610 } else {
2611 gen_cvt_ftoi(t);
2613 #endif
2616 /* force char or short cast */
2617 static void force_charshort_cast(int t)
2619 int bits, dbt;
2621 /* cannot cast static initializers */
2622 if (STATIC_DATA_WANTED)
2623 return;
2625 dbt = t & VT_BTYPE;
2626 /* XXX: add optimization if lvalue : just change type and offset */
2627 if (dbt == VT_BYTE)
2628 bits = 8;
2629 else
2630 bits = 16;
2631 if (t & VT_UNSIGNED) {
2632 vpushi((1 << bits) - 1);
2633 gen_op('&');
2634 } else {
2635 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2636 bits = 64 - bits;
2637 else
2638 bits = 32 - bits;
2639 vpushi(bits);
2640 gen_op(TOK_SHL);
2641 /* result must be signed or the SAR is converted to an SHL
2642 This was not the case when "t" was a signed short
2643 and the last value on the stack was an unsigned int */
2644 vtop->type.t &= ~VT_UNSIGNED;
2645 vpushi(bits);
2646 gen_op(TOK_SAR);
2650 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2651 static void gen_cast_s(int t)
2653 CType type;
2654 type.t = t;
2655 type.ref = NULL;
2656 gen_cast(&type);
2659 static void gen_cast(CType *type)
2661 int sbt, dbt, sf, df, c, p;
2663 /* special delayed cast for char/short */
2664 /* XXX: in some cases (multiple cascaded casts), it may still
2665 be incorrect */
2666 if (vtop->r & VT_MUSTCAST) {
2667 vtop->r &= ~VT_MUSTCAST;
2668 force_charshort_cast(vtop->type.t);
2671 /* bitfields first get cast to ints */
2672 if (vtop->type.t & VT_BITFIELD) {
2673 gv(RC_INT);
2676 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2677 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2679 if (sbt != dbt) {
2680 sf = is_float(sbt);
2681 df = is_float(dbt);
2682 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2683 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2684 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2685 c &= dbt != VT_LDOUBLE;
2686 #endif
2687 if (c) {
2688 /* constant case: we can do it now */
2689 /* XXX: in ISOC, cannot do it if error in convert */
2690 if (sbt == VT_FLOAT)
2691 vtop->c.ld = vtop->c.f;
2692 else if (sbt == VT_DOUBLE)
2693 vtop->c.ld = vtop->c.d;
2695 if (df) {
2696 if ((sbt & VT_BTYPE) == VT_LLONG) {
2697 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2698 vtop->c.ld = vtop->c.i;
2699 else
2700 vtop->c.ld = -(long double)-vtop->c.i;
2701 } else if(!sf) {
2702 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2703 vtop->c.ld = (uint32_t)vtop->c.i;
2704 else
2705 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2708 if (dbt == VT_FLOAT)
2709 vtop->c.f = (float)vtop->c.ld;
2710 else if (dbt == VT_DOUBLE)
2711 vtop->c.d = (double)vtop->c.ld;
2712 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2713 vtop->c.i = vtop->c.ld;
2714 } else if (sf && dbt == VT_BOOL) {
2715 vtop->c.i = (vtop->c.ld != 0);
2716 } else {
2717 if(sf)
2718 vtop->c.i = vtop->c.ld;
2719 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2721 else if (sbt & VT_UNSIGNED)
2722 vtop->c.i = (uint32_t)vtop->c.i;
2723 #if PTR_SIZE == 8
2724 else if (sbt == VT_PTR)
2726 #endif
2727 else if (sbt != VT_LLONG)
2728 vtop->c.i = ((uint32_t)vtop->c.i |
2729 -(vtop->c.i & 0x80000000));
2731 if (dbt == (VT_LLONG|VT_UNSIGNED))
2733 else if (dbt == VT_BOOL)
2734 vtop->c.i = (vtop->c.i != 0);
2735 #if PTR_SIZE == 8
2736 else if (dbt == VT_PTR)
2738 #endif
2739 else if (dbt != VT_LLONG) {
2740 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2741 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2742 0xffffffff);
2743 vtop->c.i &= m;
2744 if (!(dbt & VT_UNSIGNED))
2745 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2748 } else if (p && dbt == VT_BOOL) {
2749 vtop->r = VT_CONST;
2750 vtop->c.i = 1;
2751 } else {
2752 /* non constant case: generate code */
2753 if (sf && df) {
2754 /* convert from fp to fp */
2755 gen_cvt_ftof(dbt);
2756 } else if (df) {
2757 /* convert int to fp */
2758 gen_cvt_itof1(dbt);
2759 } else if (sf) {
2760 /* convert fp to int */
2761 if (dbt == VT_BOOL) {
2762 vpushi(0);
2763 gen_op(TOK_NE);
2764 } else {
2765 /* we handle char/short/etc... with generic code */
2766 if (dbt != (VT_INT | VT_UNSIGNED) &&
2767 dbt != (VT_LLONG | VT_UNSIGNED) &&
2768 dbt != VT_LLONG)
2769 dbt = VT_INT;
2770 gen_cvt_ftoi1(dbt);
2771 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2772 /* additional cast for char/short... */
2773 vtop->type.t = dbt;
2774 gen_cast(type);
2777 #if PTR_SIZE == 4
2778 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2779 if ((sbt & VT_BTYPE) != VT_LLONG) {
2780 /* scalar to long long */
2781 /* machine independent conversion */
2782 gv(RC_INT);
2783 /* generate high word */
2784 if (sbt == (VT_INT | VT_UNSIGNED)) {
2785 vpushi(0);
2786 gv(RC_INT);
2787 } else {
2788 if (sbt == VT_PTR) {
2789 /* cast from pointer to int before we apply
2790 shift operation, which pointers don't support*/
2791 gen_cast_s(VT_INT);
2793 gv_dup();
2794 vpushi(31);
2795 gen_op(TOK_SAR);
2797 /* patch second register */
2798 vtop[-1].r2 = vtop->r;
2799 vpop();
2801 #else
2802 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2803 (dbt & VT_BTYPE) == VT_PTR ||
2804 (dbt & VT_BTYPE) == VT_FUNC) {
2805 if ((sbt & VT_BTYPE) != VT_LLONG &&
2806 (sbt & VT_BTYPE) != VT_PTR &&
2807 (sbt & VT_BTYPE) != VT_FUNC) {
2808 /* need to convert from 32bit to 64bit */
2809 gv(RC_INT);
2810 if (sbt != (VT_INT | VT_UNSIGNED)) {
2811 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2812 gen_cvt_sxtw();
2813 #elif defined(TCC_TARGET_X86_64)
2814 int r = gv(RC_INT);
2815 /* x86_64 specific: movslq */
2816 o(0x6348);
2817 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2818 #else
2819 #error
2820 #endif
2823 #endif
2824 } else if (dbt == VT_BOOL) {
2825 /* scalar to bool */
2826 vpushi(0);
2827 gen_op(TOK_NE);
2828 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2829 (dbt & VT_BTYPE) == VT_SHORT) {
2830 if (sbt == VT_PTR) {
2831 vtop->type.t = VT_INT;
2832 tcc_warning("nonportable conversion from pointer to char/short");
2834 force_charshort_cast(dbt);
2835 } else if ((dbt & VT_BTYPE) == VT_INT) {
2836 /* scalar to int */
2837 if ((sbt & VT_BTYPE) == VT_LLONG) {
2838 #if PTR_SIZE == 4
2839 /* from long long: just take low order word */
2840 lexpand();
2841 vpop();
2842 #else
2843 vpushi(0xffffffff);
2844 vtop->type.t |= VT_UNSIGNED;
2845 gen_op('&');
2846 #endif
2848 /* if lvalue and single word type, nothing to do because
2849 the lvalue already contains the real type size (see
2850 VT_LVAL_xxx constants) */
2853 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2854 /* if we are casting between pointer types,
2855 we must update the VT_LVAL_xxx size */
2856 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2857 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2859 vtop->type = *type;
2860 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2863 /* return type size as known at compile time. Put alignment at 'a' */
2864 ST_FUNC int type_size(CType *type, int *a)
2866 Sym *s;
2867 int bt;
2869 bt = type->t & VT_BTYPE;
2870 if (bt == VT_STRUCT) {
2871 /* struct/union */
2872 s = type->ref;
2873 *a = s->r;
2874 return s->c;
2875 } else if (bt == VT_PTR) {
2876 if (type->t & VT_ARRAY) {
2877 int ts;
2879 s = type->ref;
2880 ts = type_size(&s->type, a);
2882 if (ts < 0 && s->c < 0)
2883 ts = -ts;
2885 return ts * s->c;
2886 } else {
2887 *a = PTR_SIZE;
2888 return PTR_SIZE;
2890 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2891 return -1; /* incomplete enum */
2892 } else if (bt == VT_LDOUBLE) {
2893 *a = LDOUBLE_ALIGN;
2894 return LDOUBLE_SIZE;
2895 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2896 #ifdef TCC_TARGET_I386
2897 #ifdef TCC_TARGET_PE
2898 *a = 8;
2899 #else
2900 *a = 4;
2901 #endif
2902 #elif defined(TCC_TARGET_ARM)
2903 #ifdef TCC_ARM_EABI
2904 *a = 8;
2905 #else
2906 *a = 4;
2907 #endif
2908 #else
2909 *a = 8;
2910 #endif
2911 return 8;
2912 } else if (bt == VT_INT || bt == VT_FLOAT) {
2913 *a = 4;
2914 return 4;
2915 } else if (bt == VT_SHORT) {
2916 *a = 2;
2917 return 2;
2918 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2919 *a = 8;
2920 return 16;
2921 } else {
2922 /* char, void, function, _Bool */
2923 *a = 1;
2924 return 1;
2928 /* push type size as known at runtime time on top of value stack. Put
2929 alignment at 'a' */
2930 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2932 if (type->t & VT_VLA) {
2933 type_size(&type->ref->type, a);
2934 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2935 } else {
2936 vpushi(type_size(type, a));
2940 /* return the pointed type of t */
2941 static inline CType *pointed_type(CType *type)
2943 return &type->ref->type;
2946 /* modify type so that its it is a pointer to type. */
2947 ST_FUNC void mk_pointer(CType *type)
2949 Sym *s;
2950 s = sym_push(SYM_FIELD, type, 0, -1);
2951 type->t = VT_PTR | (type->t & VT_STORAGE);
2952 type->ref = s;
2955 /* compare function types. OLD functions match any new functions */
2956 static int is_compatible_func(CType *type1, CType *type2)
2958 Sym *s1, *s2;
2960 s1 = type1->ref;
2961 s2 = type2->ref;
2962 if (s1->f.func_call != s2->f.func_call)
2963 return 0;
2964 if (s1->f.func_type != s2->f.func_type
2965 && s1->f.func_type != FUNC_OLD
2966 && s2->f.func_type != FUNC_OLD)
2967 return 0;
2968 /* we should check the function return type for FUNC_OLD too
2969 but that causes problems with the internally used support
2970 functions such as TOK_memmove */
2971 if (s1->f.func_type == FUNC_OLD && !s1->next)
2972 return 1;
2973 if (s2->f.func_type == FUNC_OLD && !s2->next)
2974 return 1;
2975 for (;;) {
2976 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2977 return 0;
2978 s1 = s1->next;
2979 s2 = s2->next;
2980 if (!s1)
2981 return !s2;
2982 if (!s2)
2983 return 0;
2987 /* return true if type1 and type2 are the same. If unqualified is
2988 true, qualifiers on the types are ignored.
2990 static int compare_types(CType *type1, CType *type2, int unqualified)
2992 int bt1, t1, t2;
2994 t1 = type1->t & VT_TYPE;
2995 t2 = type2->t & VT_TYPE;
2996 if (unqualified) {
2997 /* strip qualifiers before comparing */
2998 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2999 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3002 /* Default Vs explicit signedness only matters for char */
3003 if ((t1 & VT_BTYPE) != VT_BYTE) {
3004 t1 &= ~VT_DEFSIGN;
3005 t2 &= ~VT_DEFSIGN;
3007 /* XXX: bitfields ? */
3008 if (t1 != t2)
3009 return 0;
3011 if ((t1 & VT_ARRAY)
3012 && !(type1->ref->c < 0
3013 || type2->ref->c < 0
3014 || type1->ref->c == type2->ref->c))
3015 return 0;
3017 /* test more complicated cases */
3018 bt1 = t1 & VT_BTYPE;
3019 if (bt1 == VT_PTR) {
3020 type1 = pointed_type(type1);
3021 type2 = pointed_type(type2);
3022 return is_compatible_types(type1, type2);
3023 } else if (bt1 == VT_STRUCT) {
3024 return (type1->ref == type2->ref);
3025 } else if (bt1 == VT_FUNC) {
3026 return is_compatible_func(type1, type2);
3027 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3028 return type1->ref == type2->ref;
3029 } else {
3030 return 1;
3034 /* return true if type1 and type2 are exactly the same (including
3035 qualifiers).
3037 static int is_compatible_types(CType *type1, CType *type2)
3039 return compare_types(type1,type2,0);
3042 /* return true if type1 and type2 are the same (ignoring qualifiers).
3044 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3046 return compare_types(type1,type2,1);
3049 /* print a type. If 'varstr' is not NULL, then the variable is also
3050 printed in the type */
3051 /* XXX: union */
3052 /* XXX: add array and function pointers */
3053 static void type_to_str(char *buf, int buf_size,
3054 CType *type, const char *varstr)
3056 int bt, v, t;
3057 Sym *s, *sa;
3058 char buf1[256];
3059 const char *tstr;
3061 t = type->t;
3062 bt = t & VT_BTYPE;
3063 buf[0] = '\0';
3065 if (t & VT_EXTERN)
3066 pstrcat(buf, buf_size, "extern ");
3067 if (t & VT_STATIC)
3068 pstrcat(buf, buf_size, "static ");
3069 if (t & VT_TYPEDEF)
3070 pstrcat(buf, buf_size, "typedef ");
3071 if (t & VT_INLINE)
3072 pstrcat(buf, buf_size, "inline ");
3073 if (t & VT_VOLATILE)
3074 pstrcat(buf, buf_size, "volatile ");
3075 if (t & VT_CONSTANT)
3076 pstrcat(buf, buf_size, "const ");
3078 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3079 || ((t & VT_UNSIGNED)
3080 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3081 && !IS_ENUM(t)
3083 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3085 buf_size -= strlen(buf);
3086 buf += strlen(buf);
3088 switch(bt) {
3089 case VT_VOID:
3090 tstr = "void";
3091 goto add_tstr;
3092 case VT_BOOL:
3093 tstr = "_Bool";
3094 goto add_tstr;
3095 case VT_BYTE:
3096 tstr = "char";
3097 goto add_tstr;
3098 case VT_SHORT:
3099 tstr = "short";
3100 goto add_tstr;
3101 case VT_INT:
3102 tstr = "int";
3103 goto maybe_long;
3104 case VT_LLONG:
3105 tstr = "long long";
3106 maybe_long:
3107 if (t & VT_LONG)
3108 tstr = "long";
3109 if (!IS_ENUM(t))
3110 goto add_tstr;
3111 tstr = "enum ";
3112 goto tstruct;
3113 case VT_FLOAT:
3114 tstr = "float";
3115 goto add_tstr;
3116 case VT_DOUBLE:
3117 tstr = "double";
3118 goto add_tstr;
3119 case VT_LDOUBLE:
3120 tstr = "long double";
3121 add_tstr:
3122 pstrcat(buf, buf_size, tstr);
3123 break;
3124 case VT_STRUCT:
3125 tstr = "struct ";
3126 if (IS_UNION(t))
3127 tstr = "union ";
3128 tstruct:
3129 pstrcat(buf, buf_size, tstr);
3130 v = type->ref->v & ~SYM_STRUCT;
3131 if (v >= SYM_FIRST_ANOM)
3132 pstrcat(buf, buf_size, "<anonymous>");
3133 else
3134 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3135 break;
3136 case VT_FUNC:
3137 s = type->ref;
3138 buf1[0]=0;
3139 if (varstr && '*' == *varstr) {
3140 pstrcat(buf1, sizeof(buf1), "(");
3141 pstrcat(buf1, sizeof(buf1), varstr);
3142 pstrcat(buf1, sizeof(buf1), ")");
3144 pstrcat(buf1, buf_size, "(");
3145 sa = s->next;
3146 while (sa != NULL) {
3147 char buf2[256];
3148 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3149 pstrcat(buf1, sizeof(buf1), buf2);
3150 sa = sa->next;
3151 if (sa)
3152 pstrcat(buf1, sizeof(buf1), ", ");
3154 if (s->f.func_type == FUNC_ELLIPSIS)
3155 pstrcat(buf1, sizeof(buf1), ", ...");
3156 pstrcat(buf1, sizeof(buf1), ")");
3157 type_to_str(buf, buf_size, &s->type, buf1);
3158 goto no_var;
3159 case VT_PTR:
3160 s = type->ref;
3161 if (t & VT_ARRAY) {
3162 if (varstr && '*' == *varstr)
3163 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3164 else
3165 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3166 type_to_str(buf, buf_size, &s->type, buf1);
3167 goto no_var;
3169 pstrcpy(buf1, sizeof(buf1), "*");
3170 if (t & VT_CONSTANT)
3171 pstrcat(buf1, buf_size, "const ");
3172 if (t & VT_VOLATILE)
3173 pstrcat(buf1, buf_size, "volatile ");
3174 if (varstr)
3175 pstrcat(buf1, sizeof(buf1), varstr);
3176 type_to_str(buf, buf_size, &s->type, buf1);
3177 goto no_var;
3179 if (varstr) {
3180 pstrcat(buf, buf_size, " ");
3181 pstrcat(buf, buf_size, varstr);
3183 no_var: ;
3186 /* verify type compatibility to store vtop in 'dt' type, and generate
3187 casts if needed. */
3188 static void gen_assign_cast(CType *dt)
3190 CType *st, *type1, *type2;
3191 char buf1[256], buf2[256];
3192 int dbt, sbt, qualwarn, lvl;
3194 st = &vtop->type; /* source type */
3195 dbt = dt->t & VT_BTYPE;
3196 sbt = st->t & VT_BTYPE;
3197 if (sbt == VT_VOID || dbt == VT_VOID) {
3198 if (sbt == VT_VOID && dbt == VT_VOID)
3199 ; /* It is Ok if both are void */
3200 else
3201 tcc_error("cannot cast from/to void");
3203 if (dt->t & VT_CONSTANT)
3204 tcc_warning("assignment of read-only location");
3205 switch(dbt) {
3206 case VT_PTR:
3207 /* special cases for pointers */
3208 /* '0' can also be a pointer */
3209 if (is_null_pointer(vtop))
3210 break;
3211 /* accept implicit pointer to integer cast with warning */
3212 if (is_integer_btype(sbt)) {
3213 tcc_warning("assignment makes pointer from integer without a cast");
3214 break;
3216 type1 = pointed_type(dt);
3217 if (sbt == VT_PTR)
3218 type2 = pointed_type(st);
3219 else if (sbt == VT_FUNC)
3220 type2 = st; /* a function is implicitly a function pointer */
3221 else
3222 goto error;
3223 if (is_compatible_types(type1, type2))
3224 break;
3225 for (qualwarn = lvl = 0;; ++lvl) {
3226 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3227 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3228 qualwarn = 1;
3229 dbt = type1->t & (VT_BTYPE|VT_LONG);
3230 sbt = type2->t & (VT_BTYPE|VT_LONG);
3231 if (dbt != VT_PTR || sbt != VT_PTR)
3232 break;
3233 type1 = pointed_type(type1);
3234 type2 = pointed_type(type2);
3236 if (!is_compatible_unqualified_types(type1, type2)) {
3237 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3238 /* void * can match anything */
3239 } else if (dbt == sbt
3240 && is_integer_btype(sbt & VT_BTYPE)
3241 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3242 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3243 /* Like GCC don't warn by default for merely changes
3244 in pointer target signedness. Do warn for different
3245 base types, though, in particular for unsigned enums
3246 and signed int targets. */
3247 } else {
3248 tcc_warning("assignment from incompatible pointer type");
3249 break;
3252 if (qualwarn)
3253 tcc_warning("assignment discards qualifiers from pointer target type");
3254 break;
3255 case VT_BYTE:
3256 case VT_SHORT:
3257 case VT_INT:
3258 case VT_LLONG:
3259 if (sbt == VT_PTR || sbt == VT_FUNC) {
3260 tcc_warning("assignment makes integer from pointer without a cast");
3261 } else if (sbt == VT_STRUCT) {
3262 goto case_VT_STRUCT;
3264 /* XXX: more tests */
3265 break;
3266 case VT_STRUCT:
3267 case_VT_STRUCT:
3268 if (!is_compatible_unqualified_types(dt, st)) {
3269 error:
3270 type_to_str(buf1, sizeof(buf1), st, NULL);
3271 type_to_str(buf2, sizeof(buf2), dt, NULL);
3272 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3274 break;
3276 gen_cast(dt);
3279 /* store vtop in lvalue pushed on stack */
3280 ST_FUNC void vstore(void)
3282 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3284 ft = vtop[-1].type.t;
3285 sbt = vtop->type.t & VT_BTYPE;
3286 dbt = ft & VT_BTYPE;
3287 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3288 (sbt == VT_INT && dbt == VT_SHORT))
3289 && !(vtop->type.t & VT_BITFIELD)) {
3290 /* optimize char/short casts */
3291 delayed_cast = VT_MUSTCAST;
3292 vtop->type.t = ft & VT_TYPE;
3293 /* XXX: factorize */
3294 if (ft & VT_CONSTANT)
3295 tcc_warning("assignment of read-only location");
3296 } else {
3297 delayed_cast = 0;
3298 if (!(ft & VT_BITFIELD))
3299 gen_assign_cast(&vtop[-1].type);
3302 if (sbt == VT_STRUCT) {
3303 /* if structure, only generate pointer */
3304 /* structure assignment : generate memcpy */
3305 /* XXX: optimize if small size */
3306 size = type_size(&vtop->type, &align);
3308 /* destination */
3309 vswap();
3310 vtop->type.t = VT_PTR;
3311 gaddrof();
3313 /* address of memcpy() */
3314 #ifdef TCC_ARM_EABI
3315 if(!(align & 7))
3316 vpush_global_sym(&func_old_type, TOK_memcpy8);
3317 else if(!(align & 3))
3318 vpush_global_sym(&func_old_type, TOK_memcpy4);
3319 else
3320 #endif
3321 /* Use memmove, rather than memcpy, as dest and src may be same: */
3322 vpush_global_sym(&func_old_type, TOK_memmove);
3324 vswap();
3325 /* source */
3326 vpushv(vtop - 2);
3327 vtop->type.t = VT_PTR;
3328 gaddrof();
3329 /* type size */
3330 vpushi(size);
3331 gfunc_call(3);
3333 /* leave source on stack */
3334 } else if (ft & VT_BITFIELD) {
3335 /* bitfield store handling */
3337 /* save lvalue as expression result (example: s.b = s.a = n;) */
3338 vdup(), vtop[-1] = vtop[-2];
3340 bit_pos = BIT_POS(ft);
3341 bit_size = BIT_SIZE(ft);
3342 /* remove bit field info to avoid loops */
3343 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3345 if ((ft & VT_BTYPE) == VT_BOOL) {
3346 gen_cast(&vtop[-1].type);
3347 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3350 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3351 if (r == VT_STRUCT) {
3352 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3353 store_packed_bf(bit_pos, bit_size);
3354 } else {
3355 unsigned long long mask = (1ULL << bit_size) - 1;
3356 if ((ft & VT_BTYPE) != VT_BOOL) {
3357 /* mask source */
3358 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3359 vpushll(mask);
3360 else
3361 vpushi((unsigned)mask);
3362 gen_op('&');
3364 /* shift source */
3365 vpushi(bit_pos);
3366 gen_op(TOK_SHL);
3367 vswap();
3368 /* duplicate destination */
3369 vdup();
3370 vrott(3);
3371 /* load destination, mask and or with source */
3372 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3373 vpushll(~(mask << bit_pos));
3374 else
3375 vpushi(~((unsigned)mask << bit_pos));
3376 gen_op('&');
3377 gen_op('|');
3378 /* store result */
3379 vstore();
3380 /* ... and discard */
3381 vpop();
3383 } else if (dbt == VT_VOID) {
3384 --vtop;
3385 } else {
3386 #ifdef CONFIG_TCC_BCHECK
3387 /* bound check case */
3388 if (vtop[-1].r & VT_MUSTBOUND) {
3389 vswap();
3390 gbound();
3391 vswap();
3393 #endif
3394 rc = RC_INT;
3395 if (is_float(ft)) {
3396 rc = RC_FLOAT;
3397 #ifdef TCC_TARGET_X86_64
3398 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3399 rc = RC_ST0;
3400 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3401 rc = RC_FRET;
3403 #endif
3405 r = gv(rc); /* generate value */
3406 /* if lvalue was saved on stack, must read it */
3407 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3408 SValue sv;
3409 t = get_reg(RC_INT);
3410 #if PTR_SIZE == 8
3411 sv.type.t = VT_PTR;
3412 #else
3413 sv.type.t = VT_INT;
3414 #endif
3415 sv.r = VT_LOCAL | VT_LVAL;
3416 sv.c.i = vtop[-1].c.i;
3417 load(t, &sv);
3418 vtop[-1].r = t | VT_LVAL;
3420 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3421 #if PTR_SIZE == 8
3422 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3423 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3424 #else
3425 if ((ft & VT_BTYPE) == VT_LLONG) {
3426 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3427 #endif
3428 vtop[-1].type.t = load_type;
3429 store(r, vtop - 1);
3430 vswap();
3431 /* convert to int to increment easily */
3432 vtop->type.t = addr_type;
3433 gaddrof();
3434 vpushi(load_size);
3435 gen_op('+');
3436 vtop->r |= VT_LVAL;
3437 vswap();
3438 vtop[-1].type.t = load_type;
3439 /* XXX: it works because r2 is spilled last ! */
3440 store(vtop->r2, vtop - 1);
3441 } else {
3442 store(r, vtop - 1);
3445 vswap();
3446 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3447 vtop->r |= delayed_cast;
3451 /* post defines POST/PRE add. c is the token ++ or -- */
3452 ST_FUNC void inc(int post, int c)
3454 test_lvalue();
3455 vdup(); /* save lvalue */
3456 if (post) {
3457 gv_dup(); /* duplicate value */
3458 vrotb(3);
3459 vrotb(3);
3461 /* add constant */
3462 vpushi(c - TOK_MID);
3463 gen_op('+');
3464 vstore(); /* store value */
3465 if (post)
3466 vpop(); /* if post op, return saved value */
3469 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3471 /* read the string */
3472 if (tok != TOK_STR)
3473 expect(msg);
3474 cstr_new(astr);
3475 while (tok == TOK_STR) {
3476 /* XXX: add \0 handling too ? */
3477 cstr_cat(astr, tokc.str.data, -1);
3478 next();
3480 cstr_ccat(astr, '\0');
3483 /* If I is >= 1 and a power of two, returns log2(i)+1.
3484 If I is 0 returns 0. */
3485 static int exact_log2p1(int i)
3487 int ret;
3488 if (!i)
3489 return 0;
3490 for (ret = 1; i >= 1 << 8; ret += 8)
3491 i >>= 8;
3492 if (i >= 1 << 4)
3493 ret += 4, i >>= 4;
3494 if (i >= 1 << 2)
3495 ret += 2, i >>= 2;
3496 if (i >= 1 << 1)
3497 ret++;
3498 return ret;
3501 /* Parse __attribute__((...)) GNUC extension. */
3502 static void parse_attribute(AttributeDef *ad)
3504 int t, n;
3505 CString astr;
3507 redo:
3508 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3509 return;
3510 next();
3511 skip('(');
3512 skip('(');
3513 while (tok != ')') {
3514 if (tok < TOK_IDENT)
3515 expect("attribute name");
3516 t = tok;
3517 next();
3518 switch(t) {
3519 case TOK_CLEANUP1:
3520 case TOK_CLEANUP2:
3522 Sym *s;
3524 skip('(');
3525 s = sym_find(tok);
3526 if (!s) {
3527 tcc_warning("implicit declaration of function '%s'",
3528 get_tok_str(tok, &tokc));
3529 s = external_global_sym(tok, &func_old_type);
3531 ad->cleanup_func = s;
3532 next();
3533 skip(')');
3534 break;
3536 case TOK_SECTION1:
3537 case TOK_SECTION2:
3538 skip('(');
3539 parse_mult_str(&astr, "section name");
3540 ad->section = find_section(tcc_state, (char *)astr.data);
3541 skip(')');
3542 cstr_free(&astr);
3543 break;
3544 case TOK_ALIAS1:
3545 case TOK_ALIAS2:
3546 skip('(');
3547 parse_mult_str(&astr, "alias(\"target\")");
3548 ad->alias_target = /* save string as token, for later */
3549 tok_alloc((char*)astr.data, astr.size-1)->tok;
3550 skip(')');
3551 cstr_free(&astr);
3552 break;
3553 case TOK_VISIBILITY1:
3554 case TOK_VISIBILITY2:
3555 skip('(');
3556 parse_mult_str(&astr,
3557 "visibility(\"default|hidden|internal|protected\")");
3558 if (!strcmp (astr.data, "default"))
3559 ad->a.visibility = STV_DEFAULT;
3560 else if (!strcmp (astr.data, "hidden"))
3561 ad->a.visibility = STV_HIDDEN;
3562 else if (!strcmp (astr.data, "internal"))
3563 ad->a.visibility = STV_INTERNAL;
3564 else if (!strcmp (astr.data, "protected"))
3565 ad->a.visibility = STV_PROTECTED;
3566 else
3567 expect("visibility(\"default|hidden|internal|protected\")");
3568 skip(')');
3569 cstr_free(&astr);
3570 break;
3571 case TOK_ALIGNED1:
3572 case TOK_ALIGNED2:
3573 if (tok == '(') {
3574 next();
3575 n = expr_const();
3576 if (n <= 0 || (n & (n - 1)) != 0)
3577 tcc_error("alignment must be a positive power of two");
3578 skip(')');
3579 } else {
3580 n = MAX_ALIGN;
3582 ad->a.aligned = exact_log2p1(n);
3583 if (n != 1 << (ad->a.aligned - 1))
3584 tcc_error("alignment of %d is larger than implemented", n);
3585 break;
3586 case TOK_PACKED1:
3587 case TOK_PACKED2:
3588 ad->a.packed = 1;
3589 break;
3590 case TOK_WEAK1:
3591 case TOK_WEAK2:
3592 ad->a.weak = 1;
3593 break;
3594 case TOK_UNUSED1:
3595 case TOK_UNUSED2:
3596 /* currently, no need to handle it because tcc does not
3597 track unused objects */
3598 break;
3599 case TOK_NORETURN1:
3600 case TOK_NORETURN2:
3601 ad->f.func_noreturn = 1;
3602 break;
3603 case TOK_CDECL1:
3604 case TOK_CDECL2:
3605 case TOK_CDECL3:
3606 ad->f.func_call = FUNC_CDECL;
3607 break;
3608 case TOK_STDCALL1:
3609 case TOK_STDCALL2:
3610 case TOK_STDCALL3:
3611 ad->f.func_call = FUNC_STDCALL;
3612 break;
3613 #ifdef TCC_TARGET_I386
3614 case TOK_REGPARM1:
3615 case TOK_REGPARM2:
3616 skip('(');
3617 n = expr_const();
3618 if (n > 3)
3619 n = 3;
3620 else if (n < 0)
3621 n = 0;
3622 if (n > 0)
3623 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3624 skip(')');
3625 break;
3626 case TOK_FASTCALL1:
3627 case TOK_FASTCALL2:
3628 case TOK_FASTCALL3:
3629 ad->f.func_call = FUNC_FASTCALLW;
3630 break;
3631 #endif
3632 case TOK_MODE:
3633 skip('(');
3634 switch(tok) {
3635 case TOK_MODE_DI:
3636 ad->attr_mode = VT_LLONG + 1;
3637 break;
3638 case TOK_MODE_QI:
3639 ad->attr_mode = VT_BYTE + 1;
3640 break;
3641 case TOK_MODE_HI:
3642 ad->attr_mode = VT_SHORT + 1;
3643 break;
3644 case TOK_MODE_SI:
3645 case TOK_MODE_word:
3646 ad->attr_mode = VT_INT + 1;
3647 break;
3648 default:
3649 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3650 break;
3652 next();
3653 skip(')');
3654 break;
3655 case TOK_DLLEXPORT:
3656 ad->a.dllexport = 1;
3657 break;
3658 case TOK_NODECORATE:
3659 ad->a.nodecorate = 1;
3660 break;
3661 case TOK_DLLIMPORT:
3662 ad->a.dllimport = 1;
3663 break;
3664 default:
3665 if (tcc_state->warn_unsupported)
3666 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3667 /* skip parameters */
3668 if (tok == '(') {
3669 int parenthesis = 0;
3670 do {
3671 if (tok == '(')
3672 parenthesis++;
3673 else if (tok == ')')
3674 parenthesis--;
3675 next();
3676 } while (parenthesis && tok != -1);
3678 break;
3680 if (tok != ',')
3681 break;
3682 next();
3684 skip(')');
3685 skip(')');
3686 goto redo;
3689 static Sym * find_field (CType *type, int v, int *cumofs)
3691 Sym *s = type->ref;
3692 v |= SYM_FIELD;
3693 while ((s = s->next) != NULL) {
3694 if ((s->v & SYM_FIELD) &&
3695 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3696 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3697 Sym *ret = find_field (&s->type, v, cumofs);
3698 if (ret) {
3699 *cumofs += s->c;
3700 return ret;
3703 if (s->v == v)
3704 break;
3706 return s;
3709 static void struct_layout(CType *type, AttributeDef *ad)
3711 int size, align, maxalign, offset, c, bit_pos, bit_size;
3712 int packed, a, bt, prevbt, prev_bit_size;
3713 int pcc = !tcc_state->ms_bitfields;
3714 int pragma_pack = *tcc_state->pack_stack_ptr;
3715 Sym *f;
3717 maxalign = 1;
3718 offset = 0;
3719 c = 0;
3720 bit_pos = 0;
3721 prevbt = VT_STRUCT; /* make it never match */
3722 prev_bit_size = 0;
3724 //#define BF_DEBUG
3726 for (f = type->ref->next; f; f = f->next) {
3727 if (f->type.t & VT_BITFIELD)
3728 bit_size = BIT_SIZE(f->type.t);
3729 else
3730 bit_size = -1;
3731 size = type_size(&f->type, &align);
3732 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3733 packed = 0;
3735 if (pcc && bit_size == 0) {
3736 /* in pcc mode, packing does not affect zero-width bitfields */
3738 } else {
3739 /* in pcc mode, attribute packed overrides if set. */
3740 if (pcc && (f->a.packed || ad->a.packed))
3741 align = packed = 1;
3743 /* pragma pack overrides align if lesser and packs bitfields always */
3744 if (pragma_pack) {
3745 packed = 1;
3746 if (pragma_pack < align)
3747 align = pragma_pack;
3748 /* in pcc mode pragma pack also overrides individual align */
3749 if (pcc && pragma_pack < a)
3750 a = 0;
3753 /* some individual align was specified */
3754 if (a)
3755 align = a;
3757 if (type->ref->type.t == VT_UNION) {
3758 if (pcc && bit_size >= 0)
3759 size = (bit_size + 7) >> 3;
3760 offset = 0;
3761 if (size > c)
3762 c = size;
3764 } else if (bit_size < 0) {
3765 if (pcc)
3766 c += (bit_pos + 7) >> 3;
3767 c = (c + align - 1) & -align;
3768 offset = c;
3769 if (size > 0)
3770 c += size;
3771 bit_pos = 0;
3772 prevbt = VT_STRUCT;
3773 prev_bit_size = 0;
3775 } else {
3776 /* A bit-field. Layout is more complicated. There are two
3777 options: PCC (GCC) compatible and MS compatible */
3778 if (pcc) {
3779 /* In PCC layout a bit-field is placed adjacent to the
3780 preceding bit-fields, except if:
3781 - it has zero-width
3782 - an individual alignment was given
3783 - it would overflow its base type container and
3784 there is no packing */
3785 if (bit_size == 0) {
3786 new_field:
3787 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3788 bit_pos = 0;
3789 } else if (f->a.aligned) {
3790 goto new_field;
3791 } else if (!packed) {
3792 int a8 = align * 8;
3793 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3794 if (ofs > size / align)
3795 goto new_field;
3798 /* in pcc mode, long long bitfields have type int if they fit */
3799 if (size == 8 && bit_size <= 32)
3800 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3802 while (bit_pos >= align * 8)
3803 c += align, bit_pos -= align * 8;
3804 offset = c;
3806 /* In PCC layout named bit-fields influence the alignment
3807 of the containing struct using the base types alignment,
3808 except for packed fields (which here have correct align). */
3809 if (f->v & SYM_FIRST_ANOM
3810 // && bit_size // ??? gcc on ARM/rpi does that
3812 align = 1;
3814 } else {
3815 bt = f->type.t & VT_BTYPE;
3816 if ((bit_pos + bit_size > size * 8)
3817 || (bit_size > 0) == (bt != prevbt)
3819 c = (c + align - 1) & -align;
3820 offset = c;
3821 bit_pos = 0;
3822 /* In MS bitfield mode a bit-field run always uses
3823 at least as many bits as the underlying type.
3824 To start a new run it's also required that this
3825 or the last bit-field had non-zero width. */
3826 if (bit_size || prev_bit_size)
3827 c += size;
3829 /* In MS layout the records alignment is normally
3830 influenced by the field, except for a zero-width
3831 field at the start of a run (but by further zero-width
3832 fields it is again). */
3833 if (bit_size == 0 && prevbt != bt)
3834 align = 1;
3835 prevbt = bt;
3836 prev_bit_size = bit_size;
3839 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3840 | (bit_pos << VT_STRUCT_SHIFT);
3841 bit_pos += bit_size;
3843 if (align > maxalign)
3844 maxalign = align;
3846 #ifdef BF_DEBUG
3847 printf("set field %s offset %-2d size %-2d align %-2d",
3848 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3849 if (f->type.t & VT_BITFIELD) {
3850 printf(" pos %-2d bits %-2d",
3851 BIT_POS(f->type.t),
3852 BIT_SIZE(f->type.t)
3855 printf("\n");
3856 #endif
3858 f->c = offset;
3859 f->r = 0;
3862 if (pcc)
3863 c += (bit_pos + 7) >> 3;
3865 /* store size and alignment */
3866 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3867 if (a < maxalign)
3868 a = maxalign;
3869 type->ref->r = a;
3870 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3871 /* can happen if individual align for some member was given. In
3872 this case MSVC ignores maxalign when aligning the size */
3873 a = pragma_pack;
3874 if (a < bt)
3875 a = bt;
3877 c = (c + a - 1) & -a;
3878 type->ref->c = c;
3880 #ifdef BF_DEBUG
3881 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3882 #endif
3884 /* check whether we can access bitfields by their type */
3885 for (f = type->ref->next; f; f = f->next) {
3886 int s, px, cx, c0;
3887 CType t;
3889 if (0 == (f->type.t & VT_BITFIELD))
3890 continue;
3891 f->type.ref = f;
3892 f->auxtype = -1;
3893 bit_size = BIT_SIZE(f->type.t);
3894 if (bit_size == 0)
3895 continue;
3896 bit_pos = BIT_POS(f->type.t);
3897 size = type_size(&f->type, &align);
3898 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3899 continue;
3901 /* try to access the field using a different type */
3902 c0 = -1, s = align = 1;
3903 for (;;) {
3904 px = f->c * 8 + bit_pos;
3905 cx = (px >> 3) & -align;
3906 px = px - (cx << 3);
3907 if (c0 == cx)
3908 break;
3909 s = (px + bit_size + 7) >> 3;
3910 if (s > 4) {
3911 t.t = VT_LLONG;
3912 } else if (s > 2) {
3913 t.t = VT_INT;
3914 } else if (s > 1) {
3915 t.t = VT_SHORT;
3916 } else {
3917 t.t = VT_BYTE;
3919 s = type_size(&t, &align);
3920 c0 = cx;
3923 if (px + bit_size <= s * 8 && cx + s <= c) {
3924 /* update offset and bit position */
3925 f->c = cx;
3926 bit_pos = px;
3927 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3928 | (bit_pos << VT_STRUCT_SHIFT);
3929 if (s != size)
3930 f->auxtype = t.t;
3931 #ifdef BF_DEBUG
3932 printf("FIX field %s offset %-2d size %-2d align %-2d "
3933 "pos %-2d bits %-2d\n",
3934 get_tok_str(f->v & ~SYM_FIELD, NULL),
3935 cx, s, align, px, bit_size);
3936 #endif
3937 } else {
3938 /* fall back to load/store single-byte wise */
3939 f->auxtype = VT_STRUCT;
3940 #ifdef BF_DEBUG
3941 printf("FIX field %s : load byte-wise\n",
3942 get_tok_str(f->v & ~SYM_FIELD, NULL));
3943 #endif
3948 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3949 static void struct_decl(CType *type, int u)
3951 int v, c, size, align, flexible;
3952 int bit_size, bsize, bt;
3953 Sym *s, *ss, **ps;
3954 AttributeDef ad, ad1;
3955 CType type1, btype;
3957 memset(&ad, 0, sizeof ad);
3958 next();
3959 parse_attribute(&ad);
3960 if (tok != '{') {
3961 v = tok;
3962 next();
3963 /* struct already defined ? return it */
3964 if (v < TOK_IDENT)
3965 expect("struct/union/enum name");
3966 s = struct_find(v);
3967 if (s && (s->sym_scope == local_scope || tok != '{')) {
3968 if (u == s->type.t)
3969 goto do_decl;
3970 if (u == VT_ENUM && IS_ENUM(s->type.t))
3971 goto do_decl;
3972 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3974 } else {
3975 v = anon_sym++;
3977 /* Record the original enum/struct/union token. */
3978 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3979 type1.ref = NULL;
3980 /* we put an undefined size for struct/union */
3981 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3982 s->r = 0; /* default alignment is zero as gcc */
3983 do_decl:
3984 type->t = s->type.t;
3985 type->ref = s;
3987 if (tok == '{') {
3988 next();
3989 if (s->c != -1)
3990 tcc_error("struct/union/enum already defined");
3991 s->c = -2;
3992 /* cannot be empty */
3993 /* non empty enums are not allowed */
3994 ps = &s->next;
3995 if (u == VT_ENUM) {
3996 long long ll = 0, pl = 0, nl = 0;
3997 CType t;
3998 t.ref = s;
3999 /* enum symbols have static storage */
4000 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4001 for(;;) {
4002 v = tok;
4003 if (v < TOK_UIDENT)
4004 expect("identifier");
4005 ss = sym_find(v);
4006 if (ss && !local_stack)
4007 tcc_error("redefinition of enumerator '%s'",
4008 get_tok_str(v, NULL));
4009 next();
4010 if (tok == '=') {
4011 next();
4012 ll = expr_const64();
4014 ss = sym_push(v, &t, VT_CONST, 0);
4015 ss->enum_val = ll;
4016 *ps = ss, ps = &ss->next;
4017 if (ll < nl)
4018 nl = ll;
4019 if (ll > pl)
4020 pl = ll;
4021 if (tok != ',')
4022 break;
4023 next();
4024 ll++;
4025 /* NOTE: we accept a trailing comma */
4026 if (tok == '}')
4027 break;
4029 skip('}');
4030 /* set integral type of the enum */
4031 t.t = VT_INT;
4032 if (nl >= 0) {
4033 if (pl != (unsigned)pl)
4034 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4035 t.t |= VT_UNSIGNED;
4036 } else if (pl != (int)pl || nl != (int)nl)
4037 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4038 s->type.t = type->t = t.t | VT_ENUM;
4039 s->c = 0;
4040 /* set type for enum members */
4041 for (ss = s->next; ss; ss = ss->next) {
4042 ll = ss->enum_val;
4043 if (ll == (int)ll) /* default is int if it fits */
4044 continue;
4045 if (t.t & VT_UNSIGNED) {
4046 ss->type.t |= VT_UNSIGNED;
4047 if (ll == (unsigned)ll)
4048 continue;
4050 ss->type.t = (ss->type.t & ~VT_BTYPE)
4051 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4053 } else {
4054 c = 0;
4055 flexible = 0;
4056 while (tok != '}') {
4057 if (!parse_btype(&btype, &ad1)) {
4058 skip(';');
4059 continue;
4061 while (1) {
4062 if (flexible)
4063 tcc_error("flexible array member '%s' not at the end of struct",
4064 get_tok_str(v, NULL));
4065 bit_size = -1;
4066 v = 0;
4067 type1 = btype;
4068 if (tok != ':') {
4069 if (tok != ';')
4070 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4071 if (v == 0) {
4072 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4073 expect("identifier");
4074 else {
4075 int v = btype.ref->v;
4076 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4077 if (tcc_state->ms_extensions == 0)
4078 expect("identifier");
4082 if (type_size(&type1, &align) < 0) {
4083 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4084 flexible = 1;
4085 else
4086 tcc_error("field '%s' has incomplete type",
4087 get_tok_str(v, NULL));
4089 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4090 (type1.t & VT_BTYPE) == VT_VOID ||
4091 (type1.t & VT_STORAGE))
4092 tcc_error("invalid type for '%s'",
4093 get_tok_str(v, NULL));
4095 if (tok == ':') {
4096 next();
4097 bit_size = expr_const();
4098 /* XXX: handle v = 0 case for messages */
4099 if (bit_size < 0)
4100 tcc_error("negative width in bit-field '%s'",
4101 get_tok_str(v, NULL));
4102 if (v && bit_size == 0)
4103 tcc_error("zero width for bit-field '%s'",
4104 get_tok_str(v, NULL));
4105 parse_attribute(&ad1);
4107 size = type_size(&type1, &align);
4108 if (bit_size >= 0) {
4109 bt = type1.t & VT_BTYPE;
4110 if (bt != VT_INT &&
4111 bt != VT_BYTE &&
4112 bt != VT_SHORT &&
4113 bt != VT_BOOL &&
4114 bt != VT_LLONG)
4115 tcc_error("bitfields must have scalar type");
4116 bsize = size * 8;
4117 if (bit_size > bsize) {
4118 tcc_error("width of '%s' exceeds its type",
4119 get_tok_str(v, NULL));
4120 } else if (bit_size == bsize
4121 && !ad.a.packed && !ad1.a.packed) {
4122 /* no need for bit fields */
4124 } else if (bit_size == 64) {
4125 tcc_error("field width 64 not implemented");
4126 } else {
4127 type1.t = (type1.t & ~VT_STRUCT_MASK)
4128 | VT_BITFIELD
4129 | (bit_size << (VT_STRUCT_SHIFT + 6));
4132 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4133 /* Remember we've seen a real field to check
4134 for placement of flexible array member. */
4135 c = 1;
4137 /* If member is a struct or bit-field, enforce
4138 placing into the struct (as anonymous). */
4139 if (v == 0 &&
4140 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4141 bit_size >= 0)) {
4142 v = anon_sym++;
4144 if (v) {
4145 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4146 ss->a = ad1.a;
4147 *ps = ss;
4148 ps = &ss->next;
4150 if (tok == ';' || tok == TOK_EOF)
4151 break;
4152 skip(',');
4154 skip(';');
4156 skip('}');
4157 parse_attribute(&ad);
4158 struct_layout(type, &ad);
4163 static void sym_to_attr(AttributeDef *ad, Sym *s)
4165 merge_symattr(&ad->a, &s->a);
4166 merge_funcattr(&ad->f, &s->f);
4169 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4170 are added to the element type, copied because it could be a typedef. */
4171 static void parse_btype_qualify(CType *type, int qualifiers)
4173 while (type->t & VT_ARRAY) {
4174 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4175 type = &type->ref->type;
4177 type->t |= qualifiers;
4180 /* return 0 if no type declaration. otherwise, return the basic type
4181 and skip it.
4183 static int parse_btype(CType *type, AttributeDef *ad)
4185 int t, u, bt, st, type_found, typespec_found, g, n;
4186 Sym *s;
4187 CType type1;
4189 memset(ad, 0, sizeof(AttributeDef));
4190 type_found = 0;
4191 typespec_found = 0;
4192 t = VT_INT;
4193 bt = st = -1;
4194 type->ref = NULL;
4196 while(1) {
4197 switch(tok) {
4198 case TOK_EXTENSION:
4199 /* currently, we really ignore extension */
4200 next();
4201 continue;
4203 /* basic types */
4204 case TOK_CHAR:
4205 u = VT_BYTE;
4206 basic_type:
4207 next();
4208 basic_type1:
4209 if (u == VT_SHORT || u == VT_LONG) {
4210 if (st != -1 || (bt != -1 && bt != VT_INT))
4211 tmbt: tcc_error("too many basic types");
4212 st = u;
4213 } else {
4214 if (bt != -1 || (st != -1 && u != VT_INT))
4215 goto tmbt;
4216 bt = u;
4218 if (u != VT_INT)
4219 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4220 typespec_found = 1;
4221 break;
4222 case TOK_VOID:
4223 u = VT_VOID;
4224 goto basic_type;
4225 case TOK_SHORT:
4226 u = VT_SHORT;
4227 goto basic_type;
4228 case TOK_INT:
4229 u = VT_INT;
4230 goto basic_type;
4231 case TOK_ALIGNAS:
4232 { int n;
4233 AttributeDef ad1;
4234 next();
4235 skip('(');
4236 memset(&ad1, 0, sizeof(AttributeDef));
4237 if (parse_btype(&type1, &ad1)) {
4238 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4239 if (ad1.a.aligned)
4240 n = 1 << (ad1.a.aligned - 1);
4241 else
4242 type_size(&type1, &n);
4243 } else {
4244 n = expr_const();
4245 if (n <= 0 || (n & (n - 1)) != 0)
4246 tcc_error("alignment must be a positive power of two");
4248 skip(')');
4249 ad->a.aligned = exact_log2p1(n);
4251 continue;
4252 case TOK_LONG:
4253 if ((t & VT_BTYPE) == VT_DOUBLE) {
4254 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4255 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4256 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4257 } else {
4258 u = VT_LONG;
4259 goto basic_type;
4261 next();
4262 break;
4263 #ifdef TCC_TARGET_ARM64
4264 case TOK_UINT128:
4265 /* GCC's __uint128_t appears in some Linux header files. Make it a
4266 synonym for long double to get the size and alignment right. */
4267 u = VT_LDOUBLE;
4268 goto basic_type;
4269 #endif
4270 case TOK_BOOL:
4271 u = VT_BOOL;
4272 goto basic_type;
4273 case TOK_FLOAT:
4274 u = VT_FLOAT;
4275 goto basic_type;
4276 case TOK_DOUBLE:
4277 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4278 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4279 } else {
4280 u = VT_DOUBLE;
4281 goto basic_type;
4283 next();
4284 break;
4285 case TOK_ENUM:
4286 struct_decl(&type1, VT_ENUM);
4287 basic_type2:
4288 u = type1.t;
4289 type->ref = type1.ref;
4290 goto basic_type1;
4291 case TOK_STRUCT:
4292 struct_decl(&type1, VT_STRUCT);
4293 goto basic_type2;
4294 case TOK_UNION:
4295 struct_decl(&type1, VT_UNION);
4296 goto basic_type2;
4298 /* type modifiers */
4299 case TOK_CONST1:
4300 case TOK_CONST2:
4301 case TOK_CONST3:
4302 type->t = t;
4303 parse_btype_qualify(type, VT_CONSTANT);
4304 t = type->t;
4305 next();
4306 break;
4307 case TOK_VOLATILE1:
4308 case TOK_VOLATILE2:
4309 case TOK_VOLATILE3:
4310 type->t = t;
4311 parse_btype_qualify(type, VT_VOLATILE);
4312 t = type->t;
4313 next();
4314 break;
4315 case TOK_SIGNED1:
4316 case TOK_SIGNED2:
4317 case TOK_SIGNED3:
4318 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4319 tcc_error("signed and unsigned modifier");
4320 t |= VT_DEFSIGN;
4321 next();
4322 typespec_found = 1;
4323 break;
4324 case TOK_REGISTER:
4325 case TOK_AUTO:
4326 case TOK_RESTRICT1:
4327 case TOK_RESTRICT2:
4328 case TOK_RESTRICT3:
4329 next();
4330 break;
4331 case TOK_UNSIGNED:
4332 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4333 tcc_error("signed and unsigned modifier");
4334 t |= VT_DEFSIGN | VT_UNSIGNED;
4335 next();
4336 typespec_found = 1;
4337 break;
4339 /* storage */
4340 case TOK_EXTERN:
4341 g = VT_EXTERN;
4342 goto storage;
4343 case TOK_STATIC:
4344 g = VT_STATIC;
4345 goto storage;
4346 case TOK_TYPEDEF:
4347 g = VT_TYPEDEF;
4348 goto storage;
4349 storage:
4350 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4351 tcc_error("multiple storage classes");
4352 t |= g;
4353 next();
4354 break;
4355 case TOK_INLINE1:
4356 case TOK_INLINE2:
4357 case TOK_INLINE3:
4358 t |= VT_INLINE;
4359 next();
4360 break;
4361 case TOK_NORETURN3:
4362 /* currently, no need to handle it because tcc does not
4363 track unused objects */
4364 next();
4365 break;
4366 /* GNUC attribute */
4367 case TOK_ATTRIBUTE1:
4368 case TOK_ATTRIBUTE2:
4369 parse_attribute(ad);
4370 if (ad->attr_mode) {
4371 u = ad->attr_mode -1;
4372 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4374 continue;
4375 /* GNUC typeof */
4376 case TOK_TYPEOF1:
4377 case TOK_TYPEOF2:
4378 case TOK_TYPEOF3:
4379 next();
4380 parse_expr_type(&type1);
4381 /* remove all storage modifiers except typedef */
4382 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4383 if (type1.ref)
4384 sym_to_attr(ad, type1.ref);
4385 goto basic_type2;
4386 default:
4387 if (typespec_found)
4388 goto the_end;
4389 s = sym_find(tok);
4390 if (!s || !(s->type.t & VT_TYPEDEF))
4391 goto the_end;
4393 n = tok, next();
4394 if (tok == ':' && !in_generic) {
4395 /* ignore if it's a label */
4396 unget_tok(n);
4397 goto the_end;
4400 t &= ~(VT_BTYPE|VT_LONG);
4401 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4402 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4403 type->ref = s->type.ref;
4404 if (t)
4405 parse_btype_qualify(type, t);
4406 t = type->t;
4407 /* get attributes from typedef */
4408 sym_to_attr(ad, s);
4409 typespec_found = 1;
4410 st = bt = -2;
4411 break;
4413 type_found = 1;
4415 the_end:
4416 if (tcc_state->char_is_unsigned) {
4417 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4418 t |= VT_UNSIGNED;
4420 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4421 bt = t & (VT_BTYPE|VT_LONG);
4422 if (bt == VT_LONG)
4423 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4424 #ifdef TCC_TARGET_PE
4425 if (bt == VT_LDOUBLE)
4426 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4427 #endif
4428 type->t = t;
4429 return type_found;
4432 /* convert a function parameter type (array to pointer and function to
4433 function pointer) */
4434 static inline void convert_parameter_type(CType *pt)
4436 /* remove const and volatile qualifiers (XXX: const could be used
4437 to indicate a const function parameter */
4438 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4439 /* array must be transformed to pointer according to ANSI C */
4440 pt->t &= ~VT_ARRAY;
4441 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4442 mk_pointer(pt);
4446 ST_FUNC void parse_asm_str(CString *astr)
4448 skip('(');
4449 parse_mult_str(astr, "string constant");
4452 /* Parse an asm label and return the token */
4453 static int asm_label_instr(void)
4455 int v;
4456 CString astr;
4458 next();
4459 parse_asm_str(&astr);
4460 skip(')');
4461 #ifdef ASM_DEBUG
4462 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4463 #endif
4464 v = tok_alloc(astr.data, astr.size - 1)->tok;
4465 cstr_free(&astr);
4466 return v;
4469 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4471 int n, l, t1, arg_size, align, unused_align;
4472 Sym **plast, *s, *first;
4473 AttributeDef ad1;
4474 CType pt;
4476 if (tok == '(') {
4477 /* function type, or recursive declarator (return if so) */
4478 next();
4479 if (td && !(td & TYPE_ABSTRACT))
4480 return 0;
4481 if (tok == ')')
4482 l = 0;
4483 else if (parse_btype(&pt, &ad1))
4484 l = FUNC_NEW;
4485 else if (td) {
4486 merge_attr (ad, &ad1);
4487 return 0;
4488 } else
4489 l = FUNC_OLD;
4490 first = NULL;
4491 plast = &first;
4492 arg_size = 0;
4493 if (l) {
4494 for(;;) {
4495 /* read param name and compute offset */
4496 if (l != FUNC_OLD) {
4497 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4498 break;
4499 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4500 if ((pt.t & VT_BTYPE) == VT_VOID)
4501 tcc_error("parameter declared as void");
4502 } else {
4503 n = tok;
4504 if (n < TOK_UIDENT)
4505 expect("identifier");
4506 pt.t = VT_VOID; /* invalid type */
4507 pt.ref = NULL;
4508 next();
4510 convert_parameter_type(&pt);
4511 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4512 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4513 *plast = s;
4514 plast = &s->next;
4515 if (tok == ')')
4516 break;
4517 skip(',');
4518 if (l == FUNC_NEW && tok == TOK_DOTS) {
4519 l = FUNC_ELLIPSIS;
4520 next();
4521 break;
4523 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4524 tcc_error("invalid type");
4526 } else
4527 /* if no parameters, then old type prototype */
4528 l = FUNC_OLD;
4529 skip(')');
4530 /* NOTE: const is ignored in returned type as it has a special
4531 meaning in gcc / C++ */
4532 type->t &= ~VT_CONSTANT;
4533 /* some ancient pre-K&R C allows a function to return an array
4534 and the array brackets to be put after the arguments, such
4535 that "int c()[]" means something like "int[] c()" */
4536 if (tok == '[') {
4537 next();
4538 skip(']'); /* only handle simple "[]" */
4539 mk_pointer(type);
4541 /* we push a anonymous symbol which will contain the function prototype */
4542 ad->f.func_args = arg_size;
4543 ad->f.func_type = l;
4544 s = sym_push(SYM_FIELD, type, 0, 0);
4545 s->a = ad->a;
4546 s->f = ad->f;
4547 s->next = first;
4548 type->t = VT_FUNC;
4549 type->ref = s;
4550 } else if (tok == '[') {
4551 int saved_nocode_wanted = nocode_wanted;
4552 /* array definition */
4553 next();
4554 while (1) {
4555 /* XXX The optional type-quals and static should only be accepted
4556 in parameter decls. The '*' as well, and then even only
4557 in prototypes (not function defs). */
4558 switch (tok) {
4559 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4560 case TOK_CONST1:
4561 case TOK_VOLATILE1:
4562 case TOK_STATIC:
4563 case '*':
4564 next();
4565 continue;
4566 default:
4567 break;
4569 break;
4571 n = -1;
4572 t1 = 0;
4573 if (tok != ']') {
4574 if (!local_stack || (storage & VT_STATIC))
4575 vpushi(expr_const());
4576 else {
4577 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4578 length must always be evaluated, even under nocode_wanted,
4579 so that its size slot is initialized (e.g. under sizeof
4580 or typeof). */
4581 nocode_wanted = 0;
4582 gexpr();
4584 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4585 n = vtop->c.i;
4586 if (n < 0)
4587 tcc_error("invalid array size");
4588 } else {
4589 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4590 tcc_error("size of variable length array should be an integer");
4591 n = 0;
4592 t1 = VT_VLA;
4595 skip(']');
4596 /* parse next post type */
4597 post_type(type, ad, storage, 0);
4599 if ((type->t & VT_BTYPE) == VT_FUNC)
4600 tcc_error("declaration of an array of functions");
4601 if ((type->t & VT_BTYPE) == VT_VOID
4602 || type_size(type, &unused_align) < 0)
4603 tcc_error("declaration of an array of incomplete type elements");
4605 t1 |= type->t & VT_VLA;
4607 if (t1 & VT_VLA) {
4608 if (n < 0)
4609 tcc_error("need explicit inner array size in VLAs");
4610 loc -= type_size(&int_type, &align);
4611 loc &= -align;
4612 n = loc;
4614 vla_runtime_type_size(type, &align);
4615 gen_op('*');
4616 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4617 vswap();
4618 vstore();
4620 if (n != -1)
4621 vpop();
4622 nocode_wanted = saved_nocode_wanted;
4624 /* we push an anonymous symbol which will contain the array
4625 element type */
4626 s = sym_push(SYM_FIELD, type, 0, n);
4627 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4628 type->ref = s;
4630 return 1;
4633 /* Parse a type declarator (except basic type), and return the type
4634 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4635 expected. 'type' should contain the basic type. 'ad' is the
4636 attribute definition of the basic type. It can be modified by
4637 type_decl(). If this (possibly abstract) declarator is a pointer chain
4638 it returns the innermost pointed to type (equals *type, but is a different
4639 pointer), otherwise returns type itself, that's used for recursive calls. */
4640 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4642 CType *post, *ret;
4643 int qualifiers, storage;
4645 /* recursive type, remove storage bits first, apply them later again */
4646 storage = type->t & VT_STORAGE;
4647 type->t &= ~VT_STORAGE;
4648 post = ret = type;
4650 while (tok == '*') {
4651 qualifiers = 0;
4652 redo:
4653 next();
4654 switch(tok) {
4655 case TOK_CONST1:
4656 case TOK_CONST2:
4657 case TOK_CONST3:
4658 qualifiers |= VT_CONSTANT;
4659 goto redo;
4660 case TOK_VOLATILE1:
4661 case TOK_VOLATILE2:
4662 case TOK_VOLATILE3:
4663 qualifiers |= VT_VOLATILE;
4664 goto redo;
4665 case TOK_RESTRICT1:
4666 case TOK_RESTRICT2:
4667 case TOK_RESTRICT3:
4668 goto redo;
4669 /* XXX: clarify attribute handling */
4670 case TOK_ATTRIBUTE1:
4671 case TOK_ATTRIBUTE2:
4672 parse_attribute(ad);
4673 break;
4675 mk_pointer(type);
4676 type->t |= qualifiers;
4677 if (ret == type)
4678 /* innermost pointed to type is the one for the first derivation */
4679 ret = pointed_type(type);
4682 if (tok == '(') {
4683 /* This is possibly a parameter type list for abstract declarators
4684 ('int ()'), use post_type for testing this. */
4685 if (!post_type(type, ad, 0, td)) {
4686 /* It's not, so it's a nested declarator, and the post operations
4687 apply to the innermost pointed to type (if any). */
4688 /* XXX: this is not correct to modify 'ad' at this point, but
4689 the syntax is not clear */
4690 parse_attribute(ad);
4691 post = type_decl(type, ad, v, td);
4692 skip(')');
4693 } else
4694 goto abstract;
4695 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4696 /* type identifier */
4697 *v = tok;
4698 next();
4699 } else {
4700 abstract:
4701 if (!(td & TYPE_ABSTRACT))
4702 expect("identifier");
4703 *v = 0;
4705 post_type(post, ad, storage, 0);
4706 parse_attribute(ad);
4707 type->t |= storage;
4708 return ret;
4711 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4712 ST_FUNC int lvalue_type(int t)
4714 int bt, r;
4715 r = VT_LVAL;
4716 bt = t & VT_BTYPE;
4717 if (bt == VT_BYTE || bt == VT_BOOL)
4718 r |= VT_LVAL_BYTE;
4719 else if (bt == VT_SHORT)
4720 r |= VT_LVAL_SHORT;
4721 else
4722 return r;
4723 if (t & VT_UNSIGNED)
4724 r |= VT_LVAL_UNSIGNED;
4725 return r;
4728 /* indirection with full error checking and bound check */
4729 ST_FUNC void indir(void)
4731 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4732 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4733 return;
4734 expect("pointer");
4736 if (vtop->r & VT_LVAL)
4737 gv(RC_INT);
4738 vtop->type = *pointed_type(&vtop->type);
4739 /* Arrays and functions are never lvalues */
4740 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4741 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4742 vtop->r |= lvalue_type(vtop->type.t);
4743 /* if bound checking, the referenced pointer must be checked */
4744 #ifdef CONFIG_TCC_BCHECK
4745 if (tcc_state->do_bounds_check)
4746 vtop->r |= VT_MUSTBOUND;
4747 #endif
4751 /* pass a parameter to a function and do type checking and casting */
4752 static void gfunc_param_typed(Sym *func, Sym *arg)
4754 int func_type;
4755 CType type;
4757 func_type = func->f.func_type;
4758 if (func_type == FUNC_OLD ||
4759 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4760 /* default casting : only need to convert float to double */
4761 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4762 gen_cast_s(VT_DOUBLE);
4763 } else if (vtop->type.t & VT_BITFIELD) {
4764 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4765 type.ref = vtop->type.ref;
4766 gen_cast(&type);
4768 } else if (arg == NULL) {
4769 tcc_error("too many arguments to function");
4770 } else {
4771 type = arg->type;
4772 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4773 gen_assign_cast(&type);
4777 /* parse an expression and return its type without any side effect. */
4778 static void expr_type(CType *type, void (*expr_fn)(void))
4780 nocode_wanted++;
4781 expr_fn();
4782 *type = vtop->type;
4783 vpop();
4784 nocode_wanted--;
4787 /* parse an expression of the form '(type)' or '(expr)' and return its
4788 type */
4789 static void parse_expr_type(CType *type)
4791 int n;
4792 AttributeDef ad;
4794 skip('(');
4795 if (parse_btype(type, &ad)) {
4796 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4797 } else {
4798 expr_type(type, gexpr);
4800 skip(')');
4803 static void parse_type(CType *type)
4805 AttributeDef ad;
4806 int n;
4808 if (!parse_btype(type, &ad)) {
4809 expect("type");
4811 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4814 static void parse_builtin_params(int nc, const char *args)
4816 char c, sep = '(';
4817 CType t;
4818 if (nc)
4819 nocode_wanted++;
4820 next();
4821 while ((c = *args++)) {
4822 skip(sep);
4823 sep = ',';
4824 switch (c) {
4825 case 'e': expr_eq(); continue;
4826 case 't': parse_type(&t); vpush(&t); continue;
4827 default: tcc_error("internal error"); break;
4830 skip(')');
4831 if (nc)
4832 nocode_wanted--;
4835 ST_FUNC void unary(void)
4837 int n, t, align, size, r, sizeof_caller;
4838 CType type;
4839 Sym *s;
4840 AttributeDef ad;
4842 sizeof_caller = in_sizeof;
4843 in_sizeof = 0;
4844 type.ref = NULL;
4845 /* XXX: GCC 2.95.3 does not generate a table although it should be
4846 better here */
4847 tok_next:
4848 switch(tok) {
4849 case TOK_EXTENSION:
4850 next();
4851 goto tok_next;
4852 case TOK_LCHAR:
4853 #ifdef TCC_TARGET_PE
4854 t = VT_SHORT|VT_UNSIGNED;
4855 goto push_tokc;
4856 #endif
4857 case TOK_CINT:
4858 case TOK_CCHAR:
4859 t = VT_INT;
4860 push_tokc:
4861 type.t = t;
4862 vsetc(&type, VT_CONST, &tokc);
4863 next();
4864 break;
4865 case TOK_CUINT:
4866 t = VT_INT | VT_UNSIGNED;
4867 goto push_tokc;
4868 case TOK_CLLONG:
4869 t = VT_LLONG;
4870 goto push_tokc;
4871 case TOK_CULLONG:
4872 t = VT_LLONG | VT_UNSIGNED;
4873 goto push_tokc;
4874 case TOK_CFLOAT:
4875 t = VT_FLOAT;
4876 goto push_tokc;
4877 case TOK_CDOUBLE:
4878 t = VT_DOUBLE;
4879 goto push_tokc;
4880 case TOK_CLDOUBLE:
4881 t = VT_LDOUBLE;
4882 goto push_tokc;
4883 case TOK_CLONG:
4884 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4885 goto push_tokc;
4886 case TOK_CULONG:
4887 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4888 goto push_tokc;
4889 case TOK___FUNCTION__:
4890 if (!gnu_ext)
4891 goto tok_identifier;
4892 /* fall thru */
4893 case TOK___FUNC__:
4895 void *ptr;
4896 int len;
4897 /* special function name identifier */
4898 len = strlen(funcname) + 1;
4899 /* generate char[len] type */
4900 type.t = VT_BYTE;
4901 mk_pointer(&type);
4902 type.t |= VT_ARRAY;
4903 type.ref->c = len;
4904 vpush_ref(&type, data_section, data_section->data_offset, len);
4905 if (!NODATA_WANTED) {
4906 ptr = section_ptr_add(data_section, len);
4907 memcpy(ptr, funcname, len);
4909 next();
4911 break;
4912 case TOK_LSTR:
4913 #ifdef TCC_TARGET_PE
4914 t = VT_SHORT | VT_UNSIGNED;
4915 #else
4916 t = VT_INT;
4917 #endif
4918 goto str_init;
4919 case TOK_STR:
4920 /* string parsing */
4921 t = VT_BYTE;
4922 if (tcc_state->char_is_unsigned)
4923 t = VT_BYTE | VT_UNSIGNED;
4924 str_init:
4925 if (tcc_state->warn_write_strings)
4926 t |= VT_CONSTANT;
4927 type.t = t;
4928 mk_pointer(&type);
4929 type.t |= VT_ARRAY;
4930 memset(&ad, 0, sizeof(AttributeDef));
4931 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4932 break;
4933 case '(':
4934 next();
4935 /* cast ? */
4936 if (parse_btype(&type, &ad)) {
4937 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4938 skip(')');
4939 /* check ISOC99 compound literal */
4940 if (tok == '{') {
4941 /* data is allocated locally by default */
4942 if (global_expr)
4943 r = VT_CONST;
4944 else
4945 r = VT_LOCAL;
4946 /* all except arrays are lvalues */
4947 if (!(type.t & VT_ARRAY))
4948 r |= lvalue_type(type.t);
4949 memset(&ad, 0, sizeof(AttributeDef));
4950 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4951 } else {
4952 if (sizeof_caller) {
4953 vpush(&type);
4954 return;
4956 unary();
4957 gen_cast(&type);
4959 } else if (tok == '{') {
4960 int saved_nocode_wanted = nocode_wanted;
4961 if (const_wanted)
4962 tcc_error("expected constant");
4963 /* save all registers */
4964 save_regs(0);
4965 /* statement expression : we do not accept break/continue
4966 inside as GCC does. We do retain the nocode_wanted state,
4967 as statement expressions can't ever be entered from the
4968 outside, so any reactivation of code emission (from labels
4969 or loop heads) can be disabled again after the end of it. */
4970 block(1);
4971 nocode_wanted = saved_nocode_wanted;
4972 skip(')');
4973 } else {
4974 gexpr();
4975 skip(')');
4977 break;
4978 case '*':
4979 next();
4980 unary();
4981 indir();
4982 break;
4983 case '&':
4984 next();
4985 unary();
4986 /* functions names must be treated as function pointers,
4987 except for unary '&' and sizeof. Since we consider that
4988 functions are not lvalues, we only have to handle it
4989 there and in function calls. */
4990 /* arrays can also be used although they are not lvalues */
4991 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4992 !(vtop->type.t & VT_ARRAY))
4993 test_lvalue();
4994 mk_pointer(&vtop->type);
4995 gaddrof();
4996 break;
4997 case '!':
4998 next();
4999 unary();
5000 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5001 gen_cast_s(VT_BOOL);
5002 vtop->c.i = !vtop->c.i;
5003 } else if (vtop->r == VT_CMP) {
5004 vtop->cmp_op ^= 1;
5005 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5006 } else {
5007 vpushi(0);
5008 gen_op(TOK_EQ);
5010 break;
5011 case '~':
5012 next();
5013 unary();
5014 vpushi(-1);
5015 gen_op('^');
5016 break;
5017 case '+':
5018 next();
5019 unary();
5020 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5021 tcc_error("pointer not accepted for unary plus");
5022 /* In order to force cast, we add zero, except for floating point
5023 where we really need an noop (otherwise -0.0 will be transformed
5024 into +0.0). */
5025 if (!is_float(vtop->type.t)) {
5026 vpushi(0);
5027 gen_op('+');
5029 break;
5030 case TOK_SIZEOF:
5031 case TOK_ALIGNOF1:
5032 case TOK_ALIGNOF2:
5033 case TOK_ALIGNOF3:
5034 t = tok;
5035 next();
5036 in_sizeof++;
5037 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5038 s = NULL;
5039 if (vtop[1].r & VT_SYM)
5040 s = vtop[1].sym; /* hack: accessing previous vtop */
5041 size = type_size(&type, &align);
5042 if (s && s->a.aligned)
5043 align = 1 << (s->a.aligned - 1);
5044 if (t == TOK_SIZEOF) {
5045 if (!(type.t & VT_VLA)) {
5046 if (size < 0)
5047 tcc_error("sizeof applied to an incomplete type");
5048 vpushs(size);
5049 } else {
5050 vla_runtime_type_size(&type, &align);
5052 } else {
5053 vpushs(align);
5055 vtop->type.t |= VT_UNSIGNED;
5056 break;
5058 case TOK_builtin_expect:
5059 /* __builtin_expect is a no-op for now */
5060 parse_builtin_params(0, "ee");
5061 vpop();
5062 break;
5063 case TOK_builtin_types_compatible_p:
5064 parse_builtin_params(0, "tt");
5065 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5066 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5067 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5068 vtop -= 2;
5069 vpushi(n);
5070 break;
5071 case TOK_builtin_choose_expr:
5073 int64_t c;
5074 next();
5075 skip('(');
5076 c = expr_const64();
5077 skip(',');
5078 if (!c) {
5079 nocode_wanted++;
5081 expr_eq();
5082 if (!c) {
5083 vpop();
5084 nocode_wanted--;
5086 skip(',');
5087 if (c) {
5088 nocode_wanted++;
5090 expr_eq();
5091 if (c) {
5092 vpop();
5093 nocode_wanted--;
5095 skip(')');
5097 break;
5098 case TOK_builtin_constant_p:
5099 parse_builtin_params(1, "e");
5100 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5101 vtop--;
5102 vpushi(n);
5103 break;
5104 case TOK_builtin_frame_address:
5105 case TOK_builtin_return_address:
5107 int tok1 = tok;
5108 int level;
5109 next();
5110 skip('(');
5111 if (tok != TOK_CINT) {
5112 tcc_error("%s only takes positive integers",
5113 tok1 == TOK_builtin_return_address ?
5114 "__builtin_return_address" :
5115 "__builtin_frame_address");
5117 level = (uint32_t)tokc.i;
5118 next();
5119 skip(')');
5120 type.t = VT_VOID;
5121 mk_pointer(&type);
5122 vset(&type, VT_LOCAL, 0); /* local frame */
5123 while (level--) {
5124 mk_pointer(&vtop->type);
5125 indir(); /* -> parent frame */
5127 if (tok1 == TOK_builtin_return_address) {
5128 // assume return address is just above frame pointer on stack
5129 vpushi(PTR_SIZE);
5130 gen_op('+');
5131 mk_pointer(&vtop->type);
5132 indir();
5135 break;
5136 #ifdef TCC_TARGET_X86_64
5137 #ifdef TCC_TARGET_PE
5138 case TOK_builtin_va_start:
5139 parse_builtin_params(0, "ee");
5140 r = vtop->r & VT_VALMASK;
5141 if (r == VT_LLOCAL)
5142 r = VT_LOCAL;
5143 if (r != VT_LOCAL)
5144 tcc_error("__builtin_va_start expects a local variable");
5145 vtop->r = r;
5146 vtop->type = char_pointer_type;
5147 vtop->c.i += 8;
5148 vstore();
5149 break;
5150 #else
5151 case TOK_builtin_va_arg_types:
5152 parse_builtin_params(0, "t");
5153 vpushi(classify_x86_64_va_arg(&vtop->type));
5154 vswap();
5155 vpop();
5156 break;
5157 #endif
5158 #endif
5160 #ifdef TCC_TARGET_ARM64
5161 case TOK___va_start: {
5162 parse_builtin_params(0, "ee");
5163 //xx check types
5164 gen_va_start();
5165 vpushi(0);
5166 vtop->type.t = VT_VOID;
5167 break;
5169 case TOK___va_arg: {
5170 parse_builtin_params(0, "et");
5171 type = vtop->type;
5172 vpop();
5173 //xx check types
5174 gen_va_arg(&type);
5175 vtop->type = type;
5176 break;
5178 case TOK___arm64_clear_cache: {
5179 parse_builtin_params(0, "ee");
5180 gen_clear_cache();
5181 vpushi(0);
5182 vtop->type.t = VT_VOID;
5183 break;
5185 #endif
5186 /* pre operations */
5187 case TOK_INC:
5188 case TOK_DEC:
5189 t = tok;
5190 next();
5191 unary();
5192 inc(0, t);
5193 break;
5194 case '-':
5195 next();
5196 unary();
5197 t = vtop->type.t & VT_BTYPE;
5198 if (is_float(t)) {
5199 /* In IEEE negate(x) isn't subtract(0,x), but rather
5200 subtract(-0, x). */
5201 vpush(&vtop->type);
5202 if (t == VT_FLOAT)
5203 vtop->c.f = -1.0 * 0.0;
5204 else if (t == VT_DOUBLE)
5205 vtop->c.d = -1.0 * 0.0;
5206 else
5207 vtop->c.ld = -1.0 * 0.0;
5208 } else
5209 vpushi(0);
5210 vswap();
5211 gen_op('-');
5212 break;
5213 case TOK_LAND:
5214 if (!gnu_ext)
5215 goto tok_identifier;
5216 next();
5217 /* allow to take the address of a label */
5218 if (tok < TOK_UIDENT)
5219 expect("label identifier");
5220 s = label_find(tok);
5221 if (!s) {
5222 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5223 } else {
5224 if (s->r == LABEL_DECLARED)
5225 s->r = LABEL_FORWARD;
5227 if (!s->type.t) {
5228 s->type.t = VT_VOID;
5229 mk_pointer(&s->type);
5230 s->type.t |= VT_STATIC;
5232 vpushsym(&s->type, s);
5233 next();
5234 break;
5236 case TOK_GENERIC:
5238 CType controlling_type;
5239 int has_default = 0;
5240 int has_match = 0;
5241 int learn = 0;
5242 TokenString *str = NULL;
5243 int saved_const_wanted = const_wanted;
5245 next();
5246 skip('(');
5247 const_wanted = 0;
5248 expr_type(&controlling_type, expr_eq);
5249 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5250 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5251 mk_pointer(&controlling_type);
5252 const_wanted = saved_const_wanted;
5253 for (;;) {
5254 learn = 0;
5255 skip(',');
5256 if (tok == TOK_DEFAULT) {
5257 if (has_default)
5258 tcc_error("too many 'default'");
5259 has_default = 1;
5260 if (!has_match)
5261 learn = 1;
5262 next();
5263 } else {
5264 AttributeDef ad_tmp;
5265 int itmp;
5266 CType cur_type;
5268 in_generic++;
5269 parse_btype(&cur_type, &ad_tmp);
5270 in_generic--;
5272 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5273 if (compare_types(&controlling_type, &cur_type, 0)) {
5274 if (has_match) {
5275 tcc_error("type match twice");
5277 has_match = 1;
5278 learn = 1;
5281 skip(':');
5282 if (learn) {
5283 if (str)
5284 tok_str_free(str);
5285 skip_or_save_block(&str);
5286 } else {
5287 skip_or_save_block(NULL);
5289 if (tok == ')')
5290 break;
5292 if (!str) {
5293 char buf[60];
5294 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5295 tcc_error("type '%s' does not match any association", buf);
5297 begin_macro(str, 1);
5298 next();
5299 expr_eq();
5300 if (tok != TOK_EOF)
5301 expect(",");
5302 end_macro();
5303 next();
5304 break;
5306 // special qnan , snan and infinity values
5307 case TOK___NAN__:
5308 n = 0x7fc00000;
5309 special_math_val:
5310 vpushi(n);
5311 vtop->type.t = VT_FLOAT;
5312 next();
5313 break;
5314 case TOK___SNAN__:
5315 n = 0x7f800001;
5316 goto special_math_val;
5317 case TOK___INF__:
5318 n = 0x7f800000;
5319 goto special_math_val;
5321 default:
5322 tok_identifier:
5323 t = tok;
5324 next();
5325 if (t < TOK_UIDENT)
5326 expect("identifier");
5327 s = sym_find(t);
5328 if (!s || IS_ASM_SYM(s)) {
5329 const char *name = get_tok_str(t, NULL);
5330 if (tok != '(')
5331 tcc_error("'%s' undeclared", name);
5332 /* for simple function calls, we tolerate undeclared
5333 external reference to int() function */
5334 if (tcc_state->warn_implicit_function_declaration
5335 #ifdef TCC_TARGET_PE
5336 /* people must be warned about using undeclared WINAPI functions
5337 (which usually start with uppercase letter) */
5338 || (name[0] >= 'A' && name[0] <= 'Z')
5339 #endif
5341 tcc_warning("implicit declaration of function '%s'", name);
5342 s = external_global_sym(t, &func_old_type);
5345 r = s->r;
5346 /* A symbol that has a register is a local register variable,
5347 which starts out as VT_LOCAL value. */
5348 if ((r & VT_VALMASK) < VT_CONST)
5349 r = (r & ~VT_VALMASK) | VT_LOCAL;
5351 vset(&s->type, r, s->c);
5352 /* Point to s as backpointer (even without r&VT_SYM).
5353 Will be used by at least the x86 inline asm parser for
5354 regvars. */
5355 vtop->sym = s;
5357 if (r & VT_SYM) {
5358 vtop->c.i = 0;
5359 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5360 vtop->c.i = s->enum_val;
5362 break;
5365 /* post operations */
5366 while (1) {
5367 if (tok == TOK_INC || tok == TOK_DEC) {
5368 inc(1, tok);
5369 next();
5370 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5371 int qualifiers, cumofs = 0;
5372 /* field */
5373 if (tok == TOK_ARROW)
5374 indir();
5375 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5376 test_lvalue();
5377 gaddrof();
5378 /* expect pointer on structure */
5379 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5380 expect("struct or union");
5381 if (tok == TOK_CDOUBLE)
5382 expect("field name");
5383 next();
5384 if (tok == TOK_CINT || tok == TOK_CUINT)
5385 expect("field name");
5386 s = find_field(&vtop->type, tok, &cumofs);
5387 if (!s)
5388 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5389 /* add field offset to pointer */
5390 vtop->type = char_pointer_type; /* change type to 'char *' */
5391 vpushi(cumofs + s->c);
5392 gen_op('+');
5393 /* change type to field type, and set to lvalue */
5394 vtop->type = s->type;
5395 vtop->type.t |= qualifiers;
5396 /* an array is never an lvalue */
5397 if (!(vtop->type.t & VT_ARRAY)) {
5398 vtop->r |= lvalue_type(vtop->type.t);
5399 #ifdef CONFIG_TCC_BCHECK
5400 /* if bound checking, the referenced pointer must be checked */
5401 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5402 vtop->r |= VT_MUSTBOUND;
5403 #endif
5405 next();
5406 } else if (tok == '[') {
5407 next();
5408 gexpr();
5409 gen_op('+');
5410 indir();
5411 skip(']');
5412 } else if (tok == '(') {
5413 SValue ret;
5414 Sym *sa;
5415 int nb_args, ret_nregs, ret_align, regsize, variadic;
5417 /* function call */
5418 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5419 /* pointer test (no array accepted) */
5420 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5421 vtop->type = *pointed_type(&vtop->type);
5422 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5423 goto error_func;
5424 } else {
5425 error_func:
5426 expect("function pointer");
5428 } else {
5429 vtop->r &= ~VT_LVAL; /* no lvalue */
5431 /* get return type */
5432 s = vtop->type.ref;
5433 next();
5434 sa = s->next; /* first parameter */
5435 nb_args = regsize = 0;
5436 ret.r2 = VT_CONST;
5437 /* compute first implicit argument if a structure is returned */
5438 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5439 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5440 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5441 &ret_align, &regsize);
5442 if (!ret_nregs) {
5443 /* get some space for the returned structure */
5444 size = type_size(&s->type, &align);
5445 #ifdef TCC_TARGET_ARM64
5446 /* On arm64, a small struct is return in registers.
5447 It is much easier to write it to memory if we know
5448 that we are allowed to write some extra bytes, so
5449 round the allocated space up to a power of 2: */
5450 if (size < 16)
5451 while (size & (size - 1))
5452 size = (size | (size - 1)) + 1;
5453 #endif
5454 loc = (loc - size) & -align;
5455 ret.type = s->type;
5456 ret.r = VT_LOCAL | VT_LVAL;
5457 /* pass it as 'int' to avoid structure arg passing
5458 problems */
5459 vseti(VT_LOCAL, loc);
5460 ret.c = vtop->c;
5461 nb_args++;
5463 } else {
5464 ret_nregs = 1;
5465 ret.type = s->type;
5468 if (ret_nregs) {
5469 /* return in register */
5470 if (is_float(ret.type.t)) {
5471 ret.r = reg_fret(ret.type.t);
5472 #ifdef TCC_TARGET_X86_64
5473 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5474 ret.r2 = REG_QRET;
5475 #endif
5476 } else {
5477 #ifndef TCC_TARGET_ARM64
5478 #ifndef TCC_TARGET_RISCV64
5479 #ifdef TCC_TARGET_X86_64
5480 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5481 #else
5482 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5483 #endif
5484 ret.r2 = REG_LRET;
5485 #endif
5486 #endif
5487 ret.r = REG_IRET;
5489 ret.c.i = 0;
5491 if (tok != ')') {
5492 for(;;) {
5493 expr_eq();
5494 gfunc_param_typed(s, sa);
5495 nb_args++;
5496 if (sa)
5497 sa = sa->next;
5498 if (tok == ')')
5499 break;
5500 skip(',');
5503 if (sa)
5504 tcc_error("too few arguments to function");
5505 skip(')');
5506 gfunc_call(nb_args);
5508 /* return value */
5509 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5510 vsetc(&ret.type, r, &ret.c);
5511 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5514 /* handle packed struct return */
5515 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5516 int addr, offset;
5518 size = type_size(&s->type, &align);
5519 /* We're writing whole regs often, make sure there's enough
5520 space. Assume register size is power of 2. */
5521 if (regsize > align)
5522 align = regsize;
5523 loc = (loc - size) & -align;
5524 addr = loc;
5525 offset = 0;
5526 for (;;) {
5527 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5528 vswap();
5529 vstore();
5530 vtop--;
5531 if (--ret_nregs == 0)
5532 break;
5533 offset += regsize;
5535 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5537 if (s->f.func_noreturn)
5538 CODE_OFF();
5539 } else {
5540 break;
5545 ST_FUNC void expr_prod(void)
5547 int t;
5549 unary();
5550 while (tok == '*' || tok == '/' || tok == '%') {
5551 t = tok;
5552 next();
5553 unary();
5554 gen_op(t);
5558 ST_FUNC void expr_sum(void)
5560 int t;
5562 expr_prod();
5563 while (tok == '+' || tok == '-') {
5564 t = tok;
5565 next();
5566 expr_prod();
5567 gen_op(t);
5571 static void expr_shift(void)
5573 int t;
5575 expr_sum();
5576 while (tok == TOK_SHL || tok == TOK_SAR) {
5577 t = tok;
5578 next();
5579 expr_sum();
5580 gen_op(t);
5584 static void expr_cmp(void)
5586 int t;
5588 expr_shift();
5589 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5590 tok == TOK_ULT || tok == TOK_UGE) {
5591 t = tok;
5592 next();
5593 expr_shift();
5594 gen_op(t);
5598 static void expr_cmpeq(void)
5600 int t;
5602 expr_cmp();
5603 while (tok == TOK_EQ || tok == TOK_NE) {
5604 t = tok;
5605 next();
5606 expr_cmp();
5607 gen_op(t);
5611 static void expr_and(void)
5613 expr_cmpeq();
5614 while (tok == '&') {
5615 next();
5616 expr_cmpeq();
5617 gen_op('&');
5621 static void expr_xor(void)
5623 expr_and();
5624 while (tok == '^') {
5625 next();
5626 expr_and();
5627 gen_op('^');
5631 static void expr_or(void)
5633 expr_xor();
5634 while (tok == '|') {
5635 next();
5636 expr_xor();
5637 gen_op('|');
5641 static int condition_3way(void);
5643 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5645 int t = 0, cc = 1, f = 0, c;
5646 for(;;) {
5647 c = f ? i : condition_3way();
5648 if (c < 0) {
5649 save_regs(1), cc = 0;
5650 } else if (c != i) {
5651 nocode_wanted++, f = 1;
5653 if (tok != e_op) {
5654 if (cc || f) {
5655 vpop();
5656 vpushi(i ^ f);
5657 gsym(t);
5658 nocode_wanted -= f;
5659 } else {
5660 gvtst_set(i, t);
5662 break;
5664 if (c < 0)
5665 t = gvtst(i, t);
5666 else
5667 vpop();
5668 next();
5669 e_fn();
5673 static void expr_land(void)
5675 expr_or();
5676 if (tok == TOK_LAND)
5677 expr_landor(expr_or, TOK_LAND, 1);
5680 static void expr_lor(void)
5682 expr_land();
5683 if (tok == TOK_LOR)
5684 expr_landor(expr_land, TOK_LOR, 0);
5687 /* Assuming vtop is a value used in a conditional context
5688 (i.e. compared with zero) return 0 if it's false, 1 if
5689 true and -1 if it can't be statically determined. */
5690 static int condition_3way(void)
5692 int c = -1;
5693 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5694 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5695 vdup();
5696 gen_cast_s(VT_BOOL);
5697 c = vtop->c.i;
5698 vpop();
5700 return c;
5703 static int is_cond_bool(SValue *sv)
5705 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5706 && (sv->type.t & VT_BTYPE) == VT_INT)
5707 return (unsigned)sv->c.i < 2;
5708 if (sv->r == VT_CMP)
5709 return 1;
5710 return 0;
5713 static void expr_cond(void)
5715 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5716 SValue sv;
5717 CType type, type1, type2;
5718 int ncw_prev;
5720 expr_lor();
5721 if (tok == '?') {
5722 next();
5723 c = condition_3way();
5724 g = (tok == ':' && gnu_ext);
5725 tt = 0;
5726 if (!g) {
5727 if (c < 0) {
5728 save_regs(1);
5729 tt = gvtst(1, 0);
5730 } else {
5731 vpop();
5733 } else if (c < 0) {
5734 /* needed to avoid having different registers saved in
5735 each branch */
5736 rc = RC_INT;
5737 if (is_float(vtop->type.t)) {
5738 rc = RC_FLOAT;
5739 #ifdef TCC_TARGET_X86_64
5740 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5741 rc = RC_ST0;
5743 #endif
5745 gv(rc);
5746 save_regs(1);
5747 gv_dup();
5748 tt = gvtst(0, 0);
5751 ncw_prev = nocode_wanted;
5752 if (1) {
5753 if (c == 0)
5754 nocode_wanted++;
5755 if (!g)
5756 gexpr();
5758 if (c < 0 && vtop->r == VT_CMP) {
5759 t1 = gvtst(0, 0);
5760 vpushi(0);
5761 gvtst_set(0, t1);
5764 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5765 mk_pointer(&vtop->type);
5766 type1 = vtop->type;
5767 sv = *vtop; /* save value to handle it later */
5768 vtop--; /* no vpop so that FP stack is not flushed */
5770 if (g) {
5771 u = tt;
5772 } else if (c < 0) {
5773 u = gjmp(0);
5774 gsym(tt);
5775 } else
5776 u = 0;
5778 nocode_wanted = ncw_prev;
5779 if (c == 1)
5780 nocode_wanted++;
5781 skip(':');
5782 expr_cond();
5784 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5785 if (sv.r == VT_CMP) {
5786 t1 = sv.jtrue;
5787 t2 = u;
5788 } else {
5789 t1 = gvtst(0, 0);
5790 t2 = gjmp(0);
5791 gsym(u);
5792 vpushv(&sv);
5794 gvtst_set(0, t1);
5795 gvtst_set(1, t2);
5796 nocode_wanted = ncw_prev;
5797 // tcc_warning("two conditions expr_cond");
5798 return;
5801 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5802 mk_pointer(&vtop->type);
5803 type2=vtop->type;
5804 t1 = type1.t;
5805 bt1 = t1 & VT_BTYPE;
5806 t2 = type2.t;
5807 bt2 = t2 & VT_BTYPE;
5808 type.ref = NULL;
5810 /* cast operands to correct type according to ISOC rules */
5811 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5812 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5813 } else if (is_float(bt1) || is_float(bt2)) {
5814 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5815 type.t = VT_LDOUBLE;
5817 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5818 type.t = VT_DOUBLE;
5819 } else {
5820 type.t = VT_FLOAT;
5822 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5823 /* cast to biggest op */
5824 type.t = VT_LLONG | VT_LONG;
5825 if (bt1 == VT_LLONG)
5826 type.t &= t1;
5827 if (bt2 == VT_LLONG)
5828 type.t &= t2;
5829 /* convert to unsigned if it does not fit in a long long */
5830 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5831 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5832 type.t |= VT_UNSIGNED;
5833 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5834 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5835 /* If one is a null ptr constant the result type
5836 is the other. */
5837 if (is_null_pointer (vtop)) type = type1;
5838 else if (is_null_pointer (&sv)) type = type2;
5839 else if (bt1 != bt2)
5840 tcc_error("incompatible types in conditional expressions");
5841 else {
5842 CType *pt1 = pointed_type(&type1);
5843 CType *pt2 = pointed_type(&type2);
5844 int pbt1 = pt1->t & VT_BTYPE;
5845 int pbt2 = pt2->t & VT_BTYPE;
5846 int newquals, copied = 0;
5847 /* pointers to void get preferred, otherwise the
5848 pointed to types minus qualifs should be compatible */
5849 type = (pbt1 == VT_VOID) ? type1 : type2;
5850 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5851 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5852 tcc_warning("pointer type mismatch in conditional expression\n");
5854 /* combine qualifs */
5855 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5856 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5857 & newquals)
5859 /* copy the pointer target symbol */
5860 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5861 0, type.ref->c);
5862 copied = 1;
5863 pointed_type(&type)->t |= newquals;
5865 /* pointers to incomplete arrays get converted to
5866 pointers to completed ones if possible */
5867 if (pt1->t & VT_ARRAY
5868 && pt2->t & VT_ARRAY
5869 && pointed_type(&type)->ref->c < 0
5870 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5872 if (!copied)
5873 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5874 0, type.ref->c);
5875 pointed_type(&type)->ref =
5876 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5877 0, pointed_type(&type)->ref->c);
5878 pointed_type(&type)->ref->c =
5879 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5882 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5883 /* XXX: test structure compatibility */
5884 type = bt1 == VT_STRUCT ? type1 : type2;
5885 } else {
5886 /* integer operations */
5887 type.t = VT_INT | (VT_LONG & (t1 | t2));
5888 /* convert to unsigned if it does not fit in an integer */
5889 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5890 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5891 type.t |= VT_UNSIGNED;
5893 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5894 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5895 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5897 /* now we convert second operand */
5898 if (c != 1) {
5899 gen_cast(&type);
5900 if (islv) {
5901 mk_pointer(&vtop->type);
5902 gaddrof();
5903 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5904 gaddrof();
5907 rc = RC_INT;
5908 if (is_float(type.t)) {
5909 rc = RC_FLOAT;
5910 #ifdef TCC_TARGET_X86_64
5911 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5912 rc = RC_ST0;
5914 #endif
5915 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5916 /* for long longs, we use fixed registers to avoid having
5917 to handle a complicated move */
5918 rc = RC_IRET;
5921 tt = r2 = 0;
5922 if (c < 0) {
5923 r2 = gv(rc);
5924 tt = gjmp(0);
5926 gsym(u);
5927 nocode_wanted = ncw_prev;
5929 /* this is horrible, but we must also convert first
5930 operand */
5931 if (c != 0) {
5932 *vtop = sv;
5933 gen_cast(&type);
5934 if (islv) {
5935 mk_pointer(&vtop->type);
5936 gaddrof();
5937 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5938 gaddrof();
5941 if (c < 0) {
5942 r1 = gv(rc);
5943 move_reg(r2, r1, type.t);
5944 vtop->r = r2;
5945 gsym(tt);
5948 if (islv)
5949 indir();
5954 static void expr_eq(void)
5956 int t;
5958 expr_cond();
5959 if (tok == '=' ||
5960 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5961 tok == TOK_A_XOR || tok == TOK_A_OR ||
5962 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5963 test_lvalue();
5964 t = tok;
5965 next();
5966 if (t == '=') {
5967 expr_eq();
5968 } else {
5969 vdup();
5970 expr_eq();
5971 gen_op(t & 0x7f);
5973 vstore();
5977 ST_FUNC void gexpr(void)
5979 while (1) {
5980 expr_eq();
5981 if (tok != ',')
5982 break;
5983 vpop();
5984 next();
5988 /* parse a constant expression and return value in vtop. */
5989 static void expr_const1(void)
5991 const_wanted++;
5992 nocode_wanted++;
5993 expr_cond();
5994 nocode_wanted--;
5995 const_wanted--;
5998 /* parse an integer constant and return its value. */
5999 static inline int64_t expr_const64(void)
6001 int64_t c;
6002 expr_const1();
6003 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6004 expect("constant expression");
6005 c = vtop->c.i;
6006 vpop();
6007 return c;
6010 /* parse an integer constant and return its value.
6011 Complain if it doesn't fit 32bit (signed or unsigned). */
6012 ST_FUNC int expr_const(void)
6014 int c;
6015 int64_t wc = expr_const64();
6016 c = wc;
6017 if (c != wc && (unsigned)c != wc)
6018 tcc_error("constant exceeds 32 bit");
6019 return c;
6022 /* ------------------------------------------------------------------------- */
6023 /* return from function */
6025 #ifndef TCC_TARGET_ARM64
6026 #ifndef TCC_TARGET_RISCV64
6027 static void gfunc_return(CType *func_type)
6029 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6030 CType type, ret_type;
6031 int ret_align, ret_nregs, regsize;
6032 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6033 &ret_align, &regsize);
6034 if (0 == ret_nregs) {
6035 /* if returning structure, must copy it to implicit
6036 first pointer arg location */
6037 type = *func_type;
6038 mk_pointer(&type);
6039 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6040 indir();
6041 vswap();
6042 /* copy structure value to pointer */
6043 vstore();
6044 } else {
6045 /* returning structure packed into registers */
6046 int r, size, addr, align;
6047 size = type_size(func_type,&align);
6048 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6049 (vtop->c.i & (ret_align-1)))
6050 && (align & (ret_align-1))) {
6051 loc = (loc - size) & -ret_align;
6052 addr = loc;
6053 type = *func_type;
6054 vset(&type, VT_LOCAL | VT_LVAL, addr);
6055 vswap();
6056 vstore();
6057 vpop();
6058 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6060 vtop->type = ret_type;
6061 if (is_float(ret_type.t))
6062 r = rc_fret(ret_type.t);
6063 else
6064 r = RC_IRET;
6066 if (ret_nregs == 1)
6067 gv(r);
6068 else {
6069 for (;;) {
6070 vdup();
6071 gv(r);
6072 vpop();
6073 if (--ret_nregs == 0)
6074 break;
6075 /* We assume that when a structure is returned in multiple
6076 registers, their classes are consecutive values of the
6077 suite s(n) = 2^n */
6078 r <<= 1;
6079 vtop->c.i += regsize;
6083 } else if (is_float(func_type->t)) {
6084 gv(rc_fret(func_type->t));
6085 } else {
6086 gv(RC_IRET);
6088 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6090 #endif
6091 #endif
6093 static void check_func_return(void)
6095 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6096 return;
6097 if (!strcmp (funcname, "main")
6098 && (func_vt.t & VT_BTYPE) == VT_INT) {
6099 /* main returns 0 by default */
6100 vpushi(0);
6101 gen_assign_cast(&func_vt);
6102 gfunc_return(&func_vt);
6103 } else {
6104 tcc_warning("function might return no value: '%s'", funcname);
6108 /* ------------------------------------------------------------------------- */
6109 /* switch/case */
6111 static int case_cmp(const void *pa, const void *pb)
6113 int64_t a = (*(struct case_t**) pa)->v1;
6114 int64_t b = (*(struct case_t**) pb)->v1;
6115 return a < b ? -1 : a > b;
6118 static void gtst_addr(int t, int a)
6120 gsym_addr(gvtst(0, t), a);
6123 static void gcase(struct case_t **base, int len, int *bsym)
6125 struct case_t *p;
6126 int e;
6127 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6128 while (len > 8) {
6129 /* binary search */
6130 p = base[len/2];
6131 vdup();
6132 if (ll)
6133 vpushll(p->v2);
6134 else
6135 vpushi(p->v2);
6136 gen_op(TOK_LE);
6137 e = gvtst(1, 0);
6138 vdup();
6139 if (ll)
6140 vpushll(p->v1);
6141 else
6142 vpushi(p->v1);
6143 gen_op(TOK_GE);
6144 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6145 /* x < v1 */
6146 gcase(base, len/2, bsym);
6147 /* x > v2 */
6148 gsym(e);
6149 e = len/2 + 1;
6150 base += e; len -= e;
6152 /* linear scan */
6153 while (len--) {
6154 p = *base++;
6155 vdup();
6156 if (ll)
6157 vpushll(p->v2);
6158 else
6159 vpushi(p->v2);
6160 if (p->v1 == p->v2) {
6161 gen_op(TOK_EQ);
6162 gtst_addr(0, p->sym);
6163 } else {
6164 gen_op(TOK_LE);
6165 e = gvtst(1, 0);
6166 vdup();
6167 if (ll)
6168 vpushll(p->v1);
6169 else
6170 vpushi(p->v1);
6171 gen_op(TOK_GE);
6172 gtst_addr(0, p->sym);
6173 gsym(e);
6176 *bsym = gjmp(*bsym);
6179 /* ------------------------------------------------------------------------- */
6180 /* __attribute__((cleanup(fn))) */
6182 static void try_call_scope_cleanup(Sym *stop)
6184 Sym *cls = cur_scope->cl.s;
6186 for (; cls != stop; cls = cls->ncl) {
6187 Sym *fs = cls->next;
6188 Sym *vs = cls->prev_tok;
6190 vpushsym(&fs->type, fs);
6191 vset(&vs->type, vs->r, vs->c);
6192 vtop->sym = vs;
6193 mk_pointer(&vtop->type);
6194 gaddrof();
6195 gfunc_call(1);
6199 static void try_call_cleanup_goto(Sym *cleanupstate)
6201 Sym *oc, *cc;
6202 int ocd, ccd;
6204 if (!cur_scope->cl.s)
6205 return;
6207 /* search NCA of both cleanup chains given parents and initial depth */
6208 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6209 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6211 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6213 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6216 try_call_scope_cleanup(cc);
6219 /* call 'func' for each __attribute__((cleanup(func))) */
6220 static void block_cleanup(struct scope *o)
6222 int jmp = 0;
6223 Sym *g, **pg;
6224 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6225 if (g->prev_tok->r & LABEL_FORWARD) {
6226 Sym *pcl = g->next;
6227 if (!jmp)
6228 jmp = gjmp(0);
6229 gsym(pcl->jnext);
6230 try_call_scope_cleanup(o->cl.s);
6231 pcl->jnext = gjmp(0);
6232 if (!o->cl.n)
6233 goto remove_pending;
6234 g->c = o->cl.n;
6235 pg = &g->prev;
6236 } else {
6237 remove_pending:
6238 *pg = g->prev;
6239 sym_free(g);
6242 gsym(jmp);
6243 try_call_scope_cleanup(o->cl.s);
6246 /* ------------------------------------------------------------------------- */
6247 /* VLA */
6249 static void vla_restore(int loc)
6251 if (loc)
6252 gen_vla_sp_restore(loc);
6255 static void vla_leave(struct scope *o)
6257 if (o->vla.num < cur_scope->vla.num)
6258 vla_restore(o->vla.loc);
6261 /* ------------------------------------------------------------------------- */
6262 /* local scopes */
6264 void new_scope(struct scope *o)
6266 /* copy and link previous scope */
6267 *o = *cur_scope;
6268 o->prev = cur_scope;
6269 cur_scope = o;
6271 /* record local declaration stack position */
6272 o->lstk = local_stack;
6273 o->llstk = local_label_stack;
6275 ++local_scope;
6278 void prev_scope(struct scope *o, int is_expr)
6280 vla_leave(o->prev);
6282 if (o->cl.s != o->prev->cl.s)
6283 block_cleanup(o->prev);
6285 /* pop locally defined labels */
6286 label_pop(&local_label_stack, o->llstk, is_expr);
6288 /* In the is_expr case (a statement expression is finished here),
6289 vtop might refer to symbols on the local_stack. Either via the
6290 type or via vtop->sym. We can't pop those nor any that in turn
6291 might be referred to. To make it easier we don't roll back
6292 any symbols in that case; some upper level call to block() will
6293 do that. We do have to remove such symbols from the lookup
6294 tables, though. sym_pop will do that. */
6296 /* pop locally defined symbols */
6297 sym_pop(&local_stack, o->lstk, is_expr);
6299 cur_scope = o->prev;
6300 --local_scope;
6303 /* leave a scope via break/continue(/goto) */
6304 void leave_scope(struct scope *o)
6306 if (!o)
6307 return;
6308 try_call_scope_cleanup(o->cl.s);
6309 vla_leave(o);
6312 /* ------------------------------------------------------------------------- */
6313 /* call block from 'for do while' loops */
6315 static void lblock(int *bsym, int *csym)
6317 struct scope *lo = loop_scope, *co = cur_scope;
6318 int *b = co->bsym, *c = co->csym;
6319 if (csym) {
6320 co->csym = csym;
6321 loop_scope = co;
6323 co->bsym = bsym;
6324 block(0);
6325 co->bsym = b;
6326 if (csym) {
6327 co->csym = c;
6328 loop_scope = lo;
6332 static void block(int is_expr)
6334 int a, b, c, d, e, t;
6335 Sym *s;
6337 if (is_expr) {
6338 /* default return value is (void) */
6339 vpushi(0);
6340 vtop->type.t = VT_VOID;
6343 again:
6344 t = tok, next();
6346 if (t == TOK_IF) {
6347 skip('(');
6348 gexpr();
6349 skip(')');
6350 a = gvtst(1, 0);
6351 block(0);
6352 if (tok == TOK_ELSE) {
6353 d = gjmp(0);
6354 gsym(a);
6355 next();
6356 block(0);
6357 gsym(d); /* patch else jmp */
6358 } else {
6359 gsym(a);
6362 } else if (t == TOK_WHILE) {
6363 d = gind();
6364 skip('(');
6365 gexpr();
6366 skip(')');
6367 a = gvtst(1, 0);
6368 b = 0;
6369 lblock(&a, &b);
6370 gjmp_addr(d);
6371 gsym_addr(b, d);
6372 gsym(a);
6374 } else if (t == '{') {
6375 struct scope o;
6376 new_scope(&o);
6378 /* handle local labels declarations */
6379 while (tok == TOK_LABEL) {
6380 do {
6381 next();
6382 if (tok < TOK_UIDENT)
6383 expect("label identifier");
6384 label_push(&local_label_stack, tok, LABEL_DECLARED);
6385 next();
6386 } while (tok == ',');
6387 skip(';');
6390 while (tok != '}') {
6391 decl(VT_LOCAL);
6392 if (tok != '}') {
6393 if (is_expr)
6394 vpop();
6395 block(is_expr);
6399 prev_scope(&o, is_expr);
6401 if (0 == local_scope && !nocode_wanted)
6402 check_func_return();
6403 next();
6405 } else if (t == TOK_RETURN) {
6406 a = tok != ';';
6407 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6408 if (a)
6409 gexpr(), gen_assign_cast(&func_vt);
6410 leave_scope(root_scope);
6411 if (a && b)
6412 gfunc_return(&func_vt);
6413 else if (a)
6414 vtop--;
6415 else if (b)
6416 tcc_warning("'return' with no value.");
6417 skip(';');
6418 /* jump unless last stmt in top-level block */
6419 if (tok != '}' || local_scope != 1)
6420 rsym = gjmp(rsym);
6421 CODE_OFF();
6423 } else if (t == TOK_BREAK) {
6424 /* compute jump */
6425 if (!cur_scope->bsym)
6426 tcc_error("cannot break");
6427 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6428 leave_scope(loop_scope);
6429 else
6430 leave_scope(cur_switch->scope);
6431 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6432 skip(';');
6434 } else if (t == TOK_CONTINUE) {
6435 /* compute jump */
6436 if (!cur_scope->csym)
6437 tcc_error("cannot continue");
6438 leave_scope(loop_scope);
6439 *cur_scope->csym = gjmp(*cur_scope->csym);
6440 skip(';');
6442 } else if (t == TOK_FOR) {
6443 struct scope o;
6444 new_scope(&o);
6446 skip('(');
6447 if (tok != ';') {
6448 /* c99 for-loop init decl? */
6449 if (!decl0(VT_LOCAL, 1, NULL)) {
6450 /* no, regular for-loop init expr */
6451 gexpr();
6452 vpop();
6455 skip(';');
6456 a = b = 0;
6457 c = d = gind();
6458 if (tok != ';') {
6459 gexpr();
6460 a = gvtst(1, 0);
6462 skip(';');
6463 if (tok != ')') {
6464 e = gjmp(0);
6465 d = gind();
6466 gexpr();
6467 vpop();
6468 gjmp_addr(c);
6469 gsym(e);
6471 skip(')');
6472 lblock(&a, &b);
6473 gjmp_addr(d);
6474 gsym_addr(b, d);
6475 gsym(a);
6476 prev_scope(&o, 0);
6478 } else if (t == TOK_DO) {
6479 a = b = 0;
6480 d = gind();
6481 lblock(&a, &b);
6482 gsym(b);
6483 skip(TOK_WHILE);
6484 skip('(');
6485 gexpr();
6486 skip(')');
6487 skip(';');
6488 c = gvtst(0, 0);
6489 gsym_addr(c, d);
6490 gsym(a);
6492 } else if (t == TOK_SWITCH) {
6493 struct switch_t *saved, sw;
6494 SValue switchval;
6496 sw.p = NULL;
6497 sw.n = 0;
6498 sw.def_sym = 0;
6499 sw.bsym = &a;
6500 sw.scope = cur_scope;
6502 saved = cur_switch;
6503 cur_switch = &sw;
6505 skip('(');
6506 gexpr();
6507 skip(')');
6508 switchval = *vtop--;
6510 a = 0;
6511 b = gjmp(0); /* jump to first case */
6512 lblock(&a, NULL);
6513 a = gjmp(a); /* add implicit break */
6514 /* case lookup */
6515 gsym(b);
6517 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6518 for (b = 1; b < sw.n; b++)
6519 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6520 tcc_error("duplicate case value");
6522 /* Our switch table sorting is signed, so the compared
6523 value needs to be as well when it's 64bit. */
6524 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6525 switchval.type.t &= ~VT_UNSIGNED;
6526 vpushv(&switchval);
6527 gv(RC_INT);
6528 d = 0, gcase(sw.p, sw.n, &d);
6529 vpop();
6530 if (sw.def_sym)
6531 gsym_addr(d, sw.def_sym);
6532 else
6533 gsym(d);
6534 /* break label */
6535 gsym(a);
6537 dynarray_reset(&sw.p, &sw.n);
6538 cur_switch = saved;
6540 } else if (t == TOK_CASE) {
6541 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6542 if (!cur_switch)
6543 expect("switch");
6544 cr->v1 = cr->v2 = expr_const64();
6545 if (gnu_ext && tok == TOK_DOTS) {
6546 next();
6547 cr->v2 = expr_const64();
6548 if (cr->v2 < cr->v1)
6549 tcc_warning("empty case range");
6551 cr->sym = gind();
6552 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6553 skip(':');
6554 is_expr = 0;
6555 goto block_after_label;
6557 } else if (t == TOK_DEFAULT) {
6558 if (!cur_switch)
6559 expect("switch");
6560 if (cur_switch->def_sym)
6561 tcc_error("too many 'default'");
6562 cur_switch->def_sym = gind();
6563 skip(':');
6564 is_expr = 0;
6565 goto block_after_label;
6567 } else if (t == TOK_GOTO) {
6568 vla_restore(root_scope->vla.loc);
6569 if (tok == '*' && gnu_ext) {
6570 /* computed goto */
6571 next();
6572 gexpr();
6573 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6574 expect("pointer");
6575 ggoto();
6577 } else if (tok >= TOK_UIDENT) {
6578 s = label_find(tok);
6579 /* put forward definition if needed */
6580 if (!s)
6581 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6582 else if (s->r == LABEL_DECLARED)
6583 s->r = LABEL_FORWARD;
6585 if (s->r & LABEL_FORWARD) {
6586 /* start new goto chain for cleanups, linked via label->next */
6587 if (cur_scope->cl.s && !nocode_wanted) {
6588 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6589 pending_gotos->prev_tok = s;
6590 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6591 pending_gotos->next = s;
6593 s->jnext = gjmp(s->jnext);
6594 } else {
6595 try_call_cleanup_goto(s->cleanupstate);
6596 gjmp_addr(s->jnext);
6598 next();
6600 } else {
6601 expect("label identifier");
6603 skip(';');
6605 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6606 asm_instr();
6608 } else {
6609 if (tok == ':' && t >= TOK_UIDENT) {
6610 /* label case */
6611 next();
6612 s = label_find(t);
6613 if (s) {
6614 if (s->r == LABEL_DEFINED)
6615 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6616 s->r = LABEL_DEFINED;
6617 if (s->next) {
6618 Sym *pcl; /* pending cleanup goto */
6619 for (pcl = s->next; pcl; pcl = pcl->prev)
6620 gsym(pcl->jnext);
6621 sym_pop(&s->next, NULL, 0);
6622 } else
6623 gsym(s->jnext);
6624 } else {
6625 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6627 s->jnext = gind();
6628 s->cleanupstate = cur_scope->cl.s;
6630 block_after_label:
6631 vla_restore(cur_scope->vla.loc);
6632 /* we accept this, but it is a mistake */
6633 if (tok == '}') {
6634 tcc_warning("deprecated use of label at end of compound statement");
6635 } else {
6636 goto again;
6639 } else {
6640 /* expression case */
6641 if (t != ';') {
6642 unget_tok(t);
6643 if (is_expr) {
6644 vpop();
6645 gexpr();
6646 } else {
6647 gexpr();
6648 vpop();
6650 skip(';');
6656 /* This skips over a stream of tokens containing balanced {} and ()
6657 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6658 with a '{'). If STR then allocates and stores the skipped tokens
6659 in *STR. This doesn't check if () and {} are nested correctly,
6660 i.e. "({)}" is accepted. */
6661 static void skip_or_save_block(TokenString **str)
6663 int braces = tok == '{';
6664 int level = 0;
6665 if (str)
6666 *str = tok_str_alloc();
6668 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6669 int t;
6670 if (tok == TOK_EOF) {
6671 if (str || level > 0)
6672 tcc_error("unexpected end of file");
6673 else
6674 break;
6676 if (str)
6677 tok_str_add_tok(*str);
6678 t = tok;
6679 next();
6680 if (t == '{' || t == '(') {
6681 level++;
6682 } else if (t == '}' || t == ')') {
6683 level--;
6684 if (level == 0 && braces && t == '}')
6685 break;
6688 if (str) {
6689 tok_str_add(*str, -1);
6690 tok_str_add(*str, 0);
6694 #define EXPR_CONST 1
6695 #define EXPR_ANY 2
6697 static void parse_init_elem(int expr_type)
6699 int saved_global_expr;
6700 switch(expr_type) {
6701 case EXPR_CONST:
6702 /* compound literals must be allocated globally in this case */
6703 saved_global_expr = global_expr;
6704 global_expr = 1;
6705 expr_const1();
6706 global_expr = saved_global_expr;
6707 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6708 (compound literals). */
6709 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6710 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6711 || vtop->sym->v < SYM_FIRST_ANOM))
6712 #ifdef TCC_TARGET_PE
6713 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6714 #endif
6716 tcc_error("initializer element is not constant");
6717 break;
6718 case EXPR_ANY:
6719 expr_eq();
6720 break;
6724 /* put zeros for variable based init */
6725 static void init_putz(Section *sec, unsigned long c, int size)
6727 if (sec) {
6728 /* nothing to do because globals are already set to zero */
6729 } else {
6730 vpush_global_sym(&func_old_type, TOK_memset);
6731 vseti(VT_LOCAL, c);
6732 #ifdef TCC_TARGET_ARM
6733 vpushs(size);
6734 vpushi(0);
6735 #else
6736 vpushi(0);
6737 vpushs(size);
6738 #endif
6739 gfunc_call(3);
6743 #define DIF_FIRST 1
6744 #define DIF_SIZE_ONLY 2
6745 #define DIF_HAVE_ELEM 4
6747 /* t is the array or struct type. c is the array or struct
6748 address. cur_field is the pointer to the current
6749 field, for arrays the 'c' member contains the current start
6750 index. 'flags' is as in decl_initializer.
6751 'al' contains the already initialized length of the
6752 current container (starting at c). This returns the new length of that. */
6753 static int decl_designator(CType *type, Section *sec, unsigned long c,
6754 Sym **cur_field, int flags, int al)
6756 Sym *s, *f;
6757 int index, index_last, align, l, nb_elems, elem_size;
6758 unsigned long corig = c;
6760 elem_size = 0;
6761 nb_elems = 1;
6763 if (flags & DIF_HAVE_ELEM)
6764 goto no_designator;
6766 if (gnu_ext && tok >= TOK_UIDENT) {
6767 l = tok, next();
6768 if (tok == ':')
6769 goto struct_field;
6770 unget_tok(l);
6773 /* NOTE: we only support ranges for last designator */
6774 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6775 if (tok == '[') {
6776 if (!(type->t & VT_ARRAY))
6777 expect("array type");
6778 next();
6779 index = index_last = expr_const();
6780 if (tok == TOK_DOTS && gnu_ext) {
6781 next();
6782 index_last = expr_const();
6784 skip(']');
6785 s = type->ref;
6786 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6787 index_last < index)
6788 tcc_error("invalid index");
6789 if (cur_field)
6790 (*cur_field)->c = index_last;
6791 type = pointed_type(type);
6792 elem_size = type_size(type, &align);
6793 c += index * elem_size;
6794 nb_elems = index_last - index + 1;
6795 } else {
6796 int cumofs;
6797 next();
6798 l = tok;
6799 struct_field:
6800 next();
6801 if ((type->t & VT_BTYPE) != VT_STRUCT)
6802 expect("struct/union type");
6803 cumofs = 0;
6804 f = find_field(type, l, &cumofs);
6805 if (!f)
6806 expect("field");
6807 if (cur_field)
6808 *cur_field = f;
6809 type = &f->type;
6810 c += cumofs + f->c;
6812 cur_field = NULL;
6814 if (!cur_field) {
6815 if (tok == '=') {
6816 next();
6817 } else if (!gnu_ext) {
6818 expect("=");
6820 } else {
6821 no_designator:
6822 if (type->t & VT_ARRAY) {
6823 index = (*cur_field)->c;
6824 if (type->ref->c >= 0 && index >= type->ref->c)
6825 tcc_error("index too large");
6826 type = pointed_type(type);
6827 c += index * type_size(type, &align);
6828 } else {
6829 f = *cur_field;
6830 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6831 *cur_field = f = f->next;
6832 if (!f)
6833 tcc_error("too many field init");
6834 type = &f->type;
6835 c += f->c;
6838 /* must put zero in holes (note that doing it that way
6839 ensures that it even works with designators) */
6840 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6841 init_putz(sec, corig + al, c - corig - al);
6842 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6844 /* XXX: make it more general */
6845 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6846 unsigned long c_end;
6847 uint8_t *src, *dst;
6848 int i;
6850 if (!sec) {
6851 vset(type, VT_LOCAL|VT_LVAL, c);
6852 for (i = 1; i < nb_elems; i++) {
6853 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6854 vswap();
6855 vstore();
6857 vpop();
6858 } else if (!NODATA_WANTED) {
6859 c_end = c + nb_elems * elem_size;
6860 if (c_end > sec->data_allocated)
6861 section_realloc(sec, c_end);
6862 src = sec->data + c;
6863 dst = src;
6864 for(i = 1; i < nb_elems; i++) {
6865 dst += elem_size;
6866 memcpy(dst, src, elem_size);
6870 c += nb_elems * type_size(type, &align);
6871 if (c - corig > al)
6872 al = c - corig;
6873 return al;
6876 /* store a value or an expression directly in global data or in local array */
6877 static void init_putv(CType *type, Section *sec, unsigned long c)
6879 int bt;
6880 void *ptr;
6881 CType dtype;
6883 dtype = *type;
6884 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6886 if (sec) {
6887 int size, align;
6888 /* XXX: not portable */
6889 /* XXX: generate error if incorrect relocation */
6890 gen_assign_cast(&dtype);
6891 bt = type->t & VT_BTYPE;
6893 if ((vtop->r & VT_SYM)
6894 && bt != VT_PTR
6895 && bt != VT_FUNC
6896 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6897 || (type->t & VT_BITFIELD))
6898 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6900 tcc_error("initializer element is not computable at load time");
6902 if (NODATA_WANTED) {
6903 vtop--;
6904 return;
6907 size = type_size(type, &align);
6908 section_reserve(sec, c + size);
6909 ptr = sec->data + c;
6911 /* XXX: make code faster ? */
6912 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6913 vtop->sym->v >= SYM_FIRST_ANOM &&
6914 /* XXX This rejects compound literals like
6915 '(void *){ptr}'. The problem is that '&sym' is
6916 represented the same way, which would be ruled out
6917 by the SYM_FIRST_ANOM check above, but also '"string"'
6918 in 'char *p = "string"' is represented the same
6919 with the type being VT_PTR and the symbol being an
6920 anonymous one. That is, there's no difference in vtop
6921 between '(void *){x}' and '&(void *){x}'. Ignore
6922 pointer typed entities here. Hopefully no real code
6923 will every use compound literals with scalar type. */
6924 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6925 /* These come from compound literals, memcpy stuff over. */
6926 Section *ssec;
6927 ElfSym *esym;
6928 ElfW_Rel *rel;
6929 esym = elfsym(vtop->sym);
6930 ssec = tcc_state->sections[esym->st_shndx];
6931 memmove (ptr, ssec->data + esym->st_value, size);
6932 if (ssec->reloc) {
6933 /* We need to copy over all memory contents, and that
6934 includes relocations. Use the fact that relocs are
6935 created it order, so look from the end of relocs
6936 until we hit one before the copied region. */
6937 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6938 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6939 while (num_relocs--) {
6940 rel--;
6941 if (rel->r_offset >= esym->st_value + size)
6942 continue;
6943 if (rel->r_offset < esym->st_value)
6944 break;
6945 /* Note: if the same fields are initialized multiple
6946 times (possible with designators) then we possibly
6947 add multiple relocations for the same offset here.
6948 That would lead to wrong code, the last reloc needs
6949 to win. We clean this up later after the whole
6950 initializer is parsed. */
6951 put_elf_reloca(symtab_section, sec,
6952 c + rel->r_offset - esym->st_value,
6953 ELFW(R_TYPE)(rel->r_info),
6954 ELFW(R_SYM)(rel->r_info),
6955 #if PTR_SIZE == 8
6956 rel->r_addend
6957 #else
6959 #endif
6963 } else {
6964 if (type->t & VT_BITFIELD) {
6965 int bit_pos, bit_size, bits, n;
6966 unsigned char *p, v, m;
6967 bit_pos = BIT_POS(vtop->type.t);
6968 bit_size = BIT_SIZE(vtop->type.t);
6969 p = (unsigned char*)ptr + (bit_pos >> 3);
6970 bit_pos &= 7, bits = 0;
6971 while (bit_size) {
6972 n = 8 - bit_pos;
6973 if (n > bit_size)
6974 n = bit_size;
6975 v = vtop->c.i >> bits << bit_pos;
6976 m = ((1 << n) - 1) << bit_pos;
6977 *p = (*p & ~m) | (v & m);
6978 bits += n, bit_size -= n, bit_pos = 0, ++p;
6980 } else
6981 switch(bt) {
6982 /* XXX: when cross-compiling we assume that each type has the
6983 same representation on host and target, which is likely to
6984 be wrong in the case of long double */
6985 case VT_BOOL:
6986 vtop->c.i = vtop->c.i != 0;
6987 case VT_BYTE:
6988 *(char *)ptr |= vtop->c.i;
6989 break;
6990 case VT_SHORT:
6991 *(short *)ptr |= vtop->c.i;
6992 break;
6993 case VT_FLOAT:
6994 *(float*)ptr = vtop->c.f;
6995 break;
6996 case VT_DOUBLE:
6997 *(double *)ptr = vtop->c.d;
6998 break;
6999 case VT_LDOUBLE:
7000 #if defined TCC_IS_NATIVE_387
7001 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7002 memcpy(ptr, &vtop->c.ld, 10);
7003 #ifdef __TINYC__
7004 else if (sizeof (long double) == sizeof (double))
7005 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7006 #endif
7007 else if (vtop->c.ld == 0.0)
7009 else
7010 #endif
7011 if (sizeof(long double) == LDOUBLE_SIZE)
7012 *(long double*)ptr = vtop->c.ld;
7013 else if (sizeof(double) == LDOUBLE_SIZE)
7014 *(double *)ptr = (double)vtop->c.ld;
7015 else
7016 tcc_error("can't cross compile long double constants");
7017 break;
7018 #if PTR_SIZE != 8
7019 case VT_LLONG:
7020 *(long long *)ptr |= vtop->c.i;
7021 break;
7022 #else
7023 case VT_LLONG:
7024 #endif
7025 case VT_PTR:
7027 addr_t val = vtop->c.i;
7028 #if PTR_SIZE == 8
7029 if (vtop->r & VT_SYM)
7030 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7031 else
7032 *(addr_t *)ptr |= val;
7033 #else
7034 if (vtop->r & VT_SYM)
7035 greloc(sec, vtop->sym, c, R_DATA_PTR);
7036 *(addr_t *)ptr |= val;
7037 #endif
7038 break;
7040 default:
7042 int val = vtop->c.i;
7043 #if PTR_SIZE == 8
7044 if (vtop->r & VT_SYM)
7045 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7046 else
7047 *(int *)ptr |= val;
7048 #else
7049 if (vtop->r & VT_SYM)
7050 greloc(sec, vtop->sym, c, R_DATA_PTR);
7051 *(int *)ptr |= val;
7052 #endif
7053 break;
7057 vtop--;
7058 } else {
7059 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7060 vswap();
7061 vstore();
7062 vpop();
7066 /* 't' contains the type and storage info. 'c' is the offset of the
7067 object in section 'sec'. If 'sec' is NULL, it means stack based
7068 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7069 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7070 size only evaluation is wanted (only for arrays). */
7071 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7072 int flags)
7074 int len, n, no_oblock, nb, i;
7075 int size1, align1;
7076 Sym *s, *f;
7077 Sym indexsym;
7078 CType *t1;
7080 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7081 /* In case of strings we have special handling for arrays, so
7082 don't consume them as initializer value (which would commit them
7083 to some anonymous symbol). */
7084 tok != TOK_LSTR && tok != TOK_STR &&
7085 !(flags & DIF_SIZE_ONLY)) {
7086 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7087 flags |= DIF_HAVE_ELEM;
7090 if ((flags & DIF_HAVE_ELEM) &&
7091 !(type->t & VT_ARRAY) &&
7092 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7093 The source type might have VT_CONSTANT set, which is
7094 of course assignable to non-const elements. */
7095 is_compatible_unqualified_types(type, &vtop->type)) {
7096 init_putv(type, sec, c);
7097 } else if (type->t & VT_ARRAY) {
7098 s = type->ref;
7099 n = s->c;
7100 t1 = pointed_type(type);
7101 size1 = type_size(t1, &align1);
7103 no_oblock = 1;
7104 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7105 tok == '{') {
7106 if (tok != '{')
7107 tcc_error("character array initializer must be a literal,"
7108 " optionally enclosed in braces");
7109 skip('{');
7110 no_oblock = 0;
7113 /* only parse strings here if correct type (otherwise: handle
7114 them as ((w)char *) expressions */
7115 if ((tok == TOK_LSTR &&
7116 #ifdef TCC_TARGET_PE
7117 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7118 #else
7119 (t1->t & VT_BTYPE) == VT_INT
7120 #endif
7121 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7122 len = 0;
7123 while (tok == TOK_STR || tok == TOK_LSTR) {
7124 int cstr_len, ch;
7126 /* compute maximum number of chars wanted */
7127 if (tok == TOK_STR)
7128 cstr_len = tokc.str.size;
7129 else
7130 cstr_len = tokc.str.size / sizeof(nwchar_t);
7131 cstr_len--;
7132 nb = cstr_len;
7133 if (n >= 0 && nb > (n - len))
7134 nb = n - len;
7135 if (!(flags & DIF_SIZE_ONLY)) {
7136 if (cstr_len > nb)
7137 tcc_warning("initializer-string for array is too long");
7138 /* in order to go faster for common case (char
7139 string in global variable, we handle it
7140 specifically */
7141 if (sec && tok == TOK_STR && size1 == 1) {
7142 if (!NODATA_WANTED)
7143 memcpy(sec->data + c + len, tokc.str.data, nb);
7144 } else {
7145 for(i=0;i<nb;i++) {
7146 if (tok == TOK_STR)
7147 ch = ((unsigned char *)tokc.str.data)[i];
7148 else
7149 ch = ((nwchar_t *)tokc.str.data)[i];
7150 vpushi(ch);
7151 init_putv(t1, sec, c + (len + i) * size1);
7155 len += nb;
7156 next();
7158 /* only add trailing zero if enough storage (no
7159 warning in this case since it is standard) */
7160 if (n < 0 || len < n) {
7161 if (!(flags & DIF_SIZE_ONLY)) {
7162 vpushi(0);
7163 init_putv(t1, sec, c + (len * size1));
7165 len++;
7167 len *= size1;
7168 } else {
7169 indexsym.c = 0;
7170 f = &indexsym;
7172 do_init_list:
7173 len = 0;
7174 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7175 len = decl_designator(type, sec, c, &f, flags, len);
7176 flags &= ~DIF_HAVE_ELEM;
7177 if (type->t & VT_ARRAY) {
7178 ++indexsym.c;
7179 /* special test for multi dimensional arrays (may not
7180 be strictly correct if designators are used at the
7181 same time) */
7182 if (no_oblock && len >= n*size1)
7183 break;
7184 } else {
7185 if (s->type.t == VT_UNION)
7186 f = NULL;
7187 else
7188 f = f->next;
7189 if (no_oblock && f == NULL)
7190 break;
7193 if (tok == '}')
7194 break;
7195 skip(',');
7198 /* put zeros at the end */
7199 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7200 init_putz(sec, c + len, n*size1 - len);
7201 if (!no_oblock)
7202 skip('}');
7203 /* patch type size if needed, which happens only for array types */
7204 if (n < 0)
7205 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7206 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7207 size1 = 1;
7208 no_oblock = 1;
7209 if ((flags & DIF_FIRST) || tok == '{') {
7210 skip('{');
7211 no_oblock = 0;
7213 s = type->ref;
7214 f = s->next;
7215 n = s->c;
7216 goto do_init_list;
7217 } else if (tok == '{') {
7218 if (flags & DIF_HAVE_ELEM)
7219 skip(';');
7220 next();
7221 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7222 skip('}');
7223 } else if ((flags & DIF_SIZE_ONLY)) {
7224 /* If we supported only ISO C we wouldn't have to accept calling
7225 this on anything than an array if DIF_SIZE_ONLY (and even then
7226 only on the outermost level, so no recursion would be needed),
7227 because initializing a flex array member isn't supported.
7228 But GNU C supports it, so we need to recurse even into
7229 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7230 /* just skip expression */
7231 skip_or_save_block(NULL);
7232 } else {
7233 if (!(flags & DIF_HAVE_ELEM)) {
7234 /* This should happen only when we haven't parsed
7235 the init element above for fear of committing a
7236 string constant to memory too early. */
7237 if (tok != TOK_STR && tok != TOK_LSTR)
7238 expect("string constant");
7239 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7241 init_putv(type, sec, c);
7245 /* parse an initializer for type 't' if 'has_init' is non zero, and
7246 allocate space in local or global data space ('r' is either
7247 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7248 variable 'v' of scope 'scope' is declared before initializers
7249 are parsed. If 'v' is zero, then a reference to the new object
7250 is put in the value stack. If 'has_init' is 2, a special parsing
7251 is done to handle string constants. */
7252 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7253 int has_init, int v, int scope)
7255 int size, align, addr;
7256 TokenString *init_str = NULL;
7258 Section *sec;
7259 Sym *flexible_array;
7260 Sym *sym = NULL;
7261 int saved_nocode_wanted = nocode_wanted;
7262 #ifdef CONFIG_TCC_BCHECK
7263 int bcheck;
7264 #endif
7266 /* Always allocate static or global variables */
7267 if (v && (r & VT_VALMASK) == VT_CONST)
7268 nocode_wanted |= 0x80000000;
7270 #ifdef CONFIG_TCC_BCHECK
7271 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7272 #endif
7274 flexible_array = NULL;
7275 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7276 Sym *field = type->ref->next;
7277 if (field) {
7278 while (field->next)
7279 field = field->next;
7280 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7281 flexible_array = field;
7285 size = type_size(type, &align);
7286 /* If unknown size, we must evaluate it before
7287 evaluating initializers because
7288 initializers can generate global data too
7289 (e.g. string pointers or ISOC99 compound
7290 literals). It also simplifies local
7291 initializers handling */
7292 if (size < 0 || (flexible_array && has_init)) {
7293 if (!has_init)
7294 tcc_error("unknown type size");
7295 /* get all init string */
7296 if (has_init == 2) {
7297 init_str = tok_str_alloc();
7298 /* only get strings */
7299 while (tok == TOK_STR || tok == TOK_LSTR) {
7300 tok_str_add_tok(init_str);
7301 next();
7303 tok_str_add(init_str, -1);
7304 tok_str_add(init_str, 0);
7305 } else {
7306 skip_or_save_block(&init_str);
7308 unget_tok(0);
7310 /* compute size */
7311 begin_macro(init_str, 1);
7312 next();
7313 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7314 /* prepare second initializer parsing */
7315 macro_ptr = init_str->str;
7316 next();
7318 /* if still unknown size, error */
7319 size = type_size(type, &align);
7320 if (size < 0)
7321 tcc_error("unknown type size");
7323 /* If there's a flex member and it was used in the initializer
7324 adjust size. */
7325 if (flexible_array &&
7326 flexible_array->type.ref->c > 0)
7327 size += flexible_array->type.ref->c
7328 * pointed_size(&flexible_array->type);
7329 /* take into account specified alignment if bigger */
7330 if (ad->a.aligned) {
7331 int speca = 1 << (ad->a.aligned - 1);
7332 if (speca > align)
7333 align = speca;
7334 } else if (ad->a.packed) {
7335 align = 1;
7338 if (!v && NODATA_WANTED)
7339 size = 0, align = 1;
7341 if ((r & VT_VALMASK) == VT_LOCAL) {
7342 sec = NULL;
7343 #ifdef CONFIG_TCC_BCHECK
7344 if (bcheck && (type->t & VT_ARRAY)) {
7345 loc--;
7347 #endif
7348 loc = (loc - size) & -align;
7349 addr = loc;
7350 #ifdef CONFIG_TCC_BCHECK
7351 /* handles bounds */
7352 /* XXX: currently, since we do only one pass, we cannot track
7353 '&' operators, so we add only arrays */
7354 if (bcheck && (type->t & VT_ARRAY)) {
7355 addr_t *bounds_ptr;
7356 /* add padding between regions */
7357 loc--;
7358 /* then add local bound info */
7359 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7360 bounds_ptr[0] = addr;
7361 bounds_ptr[1] = size;
7363 #endif
7364 if (v) {
7365 /* local variable */
7366 #ifdef CONFIG_TCC_ASM
7367 if (ad->asm_label) {
7368 int reg = asm_parse_regvar(ad->asm_label);
7369 if (reg >= 0)
7370 r = (r & ~VT_VALMASK) | reg;
7372 #endif
7373 sym = sym_push(v, type, r, addr);
7374 if (ad->cleanup_func) {
7375 Sym *cls = sym_push2(&all_cleanups,
7376 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7377 cls->prev_tok = sym;
7378 cls->next = ad->cleanup_func;
7379 cls->ncl = cur_scope->cl.s;
7380 cur_scope->cl.s = cls;
7383 sym->a = ad->a;
7384 } else {
7385 /* push local reference */
7386 vset(type, r, addr);
7388 } else {
7389 if (v && scope == VT_CONST) {
7390 /* see if the symbol was already defined */
7391 sym = sym_find(v);
7392 if (sym) {
7393 patch_storage(sym, ad, type);
7394 /* we accept several definitions of the same global variable. */
7395 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7396 goto no_alloc;
7400 /* allocate symbol in corresponding section */
7401 sec = ad->section;
7402 if (!sec) {
7403 if (has_init)
7404 sec = data_section;
7405 else if (tcc_state->nocommon)
7406 sec = bss_section;
7409 if (sec) {
7410 addr = section_add(sec, size, align);
7411 #ifdef CONFIG_TCC_BCHECK
7412 /* add padding if bound check */
7413 if (bcheck)
7414 section_add(sec, 1, 1);
7415 #endif
7416 } else {
7417 addr = align; /* SHN_COMMON is special, symbol value is align */
7418 sec = common_section;
7421 if (v) {
7422 if (!sym) {
7423 sym = sym_push(v, type, r | VT_SYM, 0);
7424 patch_storage(sym, ad, NULL);
7426 /* update symbol definition */
7427 put_extern_sym(sym, sec, addr, size);
7428 } else {
7429 /* push global reference */
7430 vpush_ref(type, sec, addr, size);
7431 sym = vtop->sym;
7432 vtop->r |= r;
7435 #ifdef CONFIG_TCC_BCHECK
7436 /* handles bounds now because the symbol must be defined
7437 before for the relocation */
7438 if (bcheck) {
7439 addr_t *bounds_ptr;
7441 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7442 /* then add global bound info */
7443 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7444 bounds_ptr[0] = 0; /* relocated */
7445 bounds_ptr[1] = size;
7447 #endif
7450 if (type->t & VT_VLA) {
7451 int a;
7453 if (NODATA_WANTED)
7454 goto no_alloc;
7456 /* save current stack pointer */
7457 if (root_scope->vla.loc == 0) {
7458 struct scope *v = cur_scope;
7459 gen_vla_sp_save(loc -= PTR_SIZE);
7460 do v->vla.loc = loc; while ((v = v->prev));
7463 vla_runtime_type_size(type, &a);
7464 gen_vla_alloc(type, a);
7465 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7466 /* on _WIN64, because of the function args scratch area, the
7467 result of alloca differs from RSP and is returned in RAX. */
7468 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7469 #endif
7470 gen_vla_sp_save(addr);
7471 cur_scope->vla.loc = addr;
7472 cur_scope->vla.num++;
7474 } else if (has_init) {
7475 size_t oldreloc_offset = 0;
7476 if (sec && sec->reloc)
7477 oldreloc_offset = sec->reloc->data_offset;
7478 decl_initializer(type, sec, addr, DIF_FIRST);
7479 if (sec && sec->reloc)
7480 squeeze_multi_relocs(sec, oldreloc_offset);
7481 /* patch flexible array member size back to -1, */
7482 /* for possible subsequent similar declarations */
7483 if (flexible_array)
7484 flexible_array->type.ref->c = -1;
7487 no_alloc:
7488 /* restore parse state if needed */
7489 if (init_str) {
7490 end_macro();
7491 next();
7494 nocode_wanted = saved_nocode_wanted;
7497 /* parse a function defined by symbol 'sym' and generate its code in
7498 'cur_text_section' */
7499 static void gen_function(Sym *sym)
7501 /* Initialize VLA state */
7502 struct scope f = { 0 };
7503 cur_scope = root_scope = &f;
7505 nocode_wanted = 0;
7506 ind = cur_text_section->data_offset;
7507 if (sym->a.aligned) {
7508 size_t newoff = section_add(cur_text_section, 0,
7509 1 << (sym->a.aligned - 1));
7510 gen_fill_nops(newoff - ind);
7512 /* NOTE: we patch the symbol size later */
7513 put_extern_sym(sym, cur_text_section, ind, 0);
7515 funcname = get_tok_str(sym->v, NULL);
7516 func_ind = ind;
7518 /* put debug symbol */
7519 tcc_debug_funcstart(tcc_state, sym);
7520 /* push a dummy symbol to enable local sym storage */
7521 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7522 local_scope = 1; /* for function parameters */
7523 gfunc_prolog(&sym->type);
7524 local_scope = 0;
7525 rsym = 0;
7526 clear_temp_local_var_list();
7527 block(0);
7528 gsym(rsym);
7529 nocode_wanted = 0;
7530 gfunc_epilog();
7531 cur_text_section->data_offset = ind;
7532 /* reset local stack */
7533 sym_pop(&local_stack, NULL, 0);
7534 local_scope = 0;
7535 label_pop(&global_label_stack, NULL, 0);
7536 sym_pop(&all_cleanups, NULL, 0);
7537 /* patch symbol size */
7538 elfsym(sym)->st_size = ind - func_ind;
7539 /* end of function */
7540 tcc_debug_funcend(tcc_state, ind - func_ind);
7541 /* It's better to crash than to generate wrong code */
7542 cur_text_section = NULL;
7543 funcname = ""; /* for safety */
7544 func_vt.t = VT_VOID; /* for safety */
7545 func_var = 0; /* for safety */
7546 ind = 0; /* for safety */
7547 nocode_wanted = 0x80000000;
7548 check_vstack();
7551 static void gen_inline_functions(TCCState *s)
7553 Sym *sym;
7554 int inline_generated, i;
7555 struct InlineFunc *fn;
7557 tcc_open_bf(s, ":inline:", 0);
7558 /* iterate while inline function are referenced */
7559 do {
7560 inline_generated = 0;
7561 for (i = 0; i < s->nb_inline_fns; ++i) {
7562 fn = s->inline_fns[i];
7563 sym = fn->sym;
7564 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7565 /* the function was used or forced (and then not internal):
7566 generate its code and convert it to a normal function */
7567 fn->sym = NULL;
7568 if (file)
7569 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7570 begin_macro(fn->func_str, 1);
7571 next();
7572 cur_text_section = text_section;
7573 gen_function(sym);
7574 end_macro();
7576 inline_generated = 1;
7579 } while (inline_generated);
7580 tcc_close();
7583 ST_FUNC void free_inline_functions(TCCState *s)
7585 int i;
7586 /* free tokens of unused inline functions */
7587 for (i = 0; i < s->nb_inline_fns; ++i) {
7588 struct InlineFunc *fn = s->inline_fns[i];
7589 if (fn->sym)
7590 tok_str_free(fn->func_str);
7592 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7595 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7596 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7597 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7599 int v, has_init, r;
7600 CType type, btype;
7601 Sym *sym;
7602 AttributeDef ad, adbase;
7604 while (1) {
7605 if (tok == TOK_STATIC_ASSERT) {
7606 int c;
7608 next();
7609 skip('(');
7610 c = expr_const();
7611 skip(',');
7612 if (c == 0)
7613 tcc_error("%s", get_tok_str(tok, &tokc));
7614 next();
7615 skip(')');
7616 skip(';');
7617 continue;
7619 if (!parse_btype(&btype, &adbase)) {
7620 if (is_for_loop_init)
7621 return 0;
7622 /* skip redundant ';' if not in old parameter decl scope */
7623 if (tok == ';' && l != VT_CMP) {
7624 next();
7625 continue;
7627 if (l != VT_CONST)
7628 break;
7629 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7630 /* global asm block */
7631 asm_global_instr();
7632 continue;
7634 if (tok >= TOK_UIDENT) {
7635 /* special test for old K&R protos without explicit int
7636 type. Only accepted when defining global data */
7637 btype.t = VT_INT;
7638 } else {
7639 if (tok != TOK_EOF)
7640 expect("declaration");
7641 break;
7644 if (tok == ';') {
7645 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7646 int v = btype.ref->v;
7647 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7648 tcc_warning("unnamed struct/union that defines no instances");
7649 next();
7650 continue;
7652 if (IS_ENUM(btype.t)) {
7653 next();
7654 continue;
7657 while (1) { /* iterate thru each declaration */
7658 type = btype;
7659 /* If the base type itself was an array type of unspecified
7660 size (like in 'typedef int arr[]; arr x = {1};') then
7661 we will overwrite the unknown size by the real one for
7662 this decl. We need to unshare the ref symbol holding
7663 that size. */
7664 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7665 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7667 ad = adbase;
7668 type_decl(&type, &ad, &v, TYPE_DIRECT);
7669 #if 0
7671 char buf[500];
7672 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7673 printf("type = '%s'\n", buf);
7675 #endif
7676 if ((type.t & VT_BTYPE) == VT_FUNC) {
7677 /* if old style function prototype, we accept a
7678 declaration list */
7679 sym = type.ref;
7680 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7681 decl0(VT_CMP, 0, sym);
7682 /* always compile 'extern inline' */
7683 if (type.t & VT_EXTERN)
7684 type.t &= ~VT_INLINE;
7687 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7688 ad.asm_label = asm_label_instr();
7689 /* parse one last attribute list, after asm label */
7690 parse_attribute(&ad);
7691 #if 0
7692 /* gcc does not allow __asm__("label") with function definition,
7693 but why not ... */
7694 if (tok == '{')
7695 expect(";");
7696 #endif
7699 #ifdef TCC_TARGET_PE
7700 if (ad.a.dllimport || ad.a.dllexport) {
7701 if (type.t & VT_STATIC)
7702 tcc_error("cannot have dll linkage with static");
7703 if (type.t & VT_TYPEDEF) {
7704 tcc_warning("'%s' attribute ignored for typedef",
7705 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7706 (ad.a.dllexport = 0, "dllexport"));
7707 } else if (ad.a.dllimport) {
7708 if ((type.t & VT_BTYPE) == VT_FUNC)
7709 ad.a.dllimport = 0;
7710 else
7711 type.t |= VT_EXTERN;
7714 #endif
7715 if (tok == '{') {
7716 if (l != VT_CONST)
7717 tcc_error("cannot use local functions");
7718 if ((type.t & VT_BTYPE) != VT_FUNC)
7719 expect("function definition");
7721 /* reject abstract declarators in function definition
7722 make old style params without decl have int type */
7723 sym = type.ref;
7724 while ((sym = sym->next) != NULL) {
7725 if (!(sym->v & ~SYM_FIELD))
7726 expect("identifier");
7727 if (sym->type.t == VT_VOID)
7728 sym->type = int_type;
7731 /* put function symbol */
7732 type.t &= ~VT_EXTERN;
7733 sym = external_sym(v, &type, 0, &ad);
7734 /* static inline functions are just recorded as a kind
7735 of macro. Their code will be emitted at the end of
7736 the compilation unit only if they are used */
7737 if (sym->type.t & VT_INLINE) {
7738 struct InlineFunc *fn;
7739 const char *filename;
7741 filename = file ? file->filename : "";
7742 fn = tcc_malloc(sizeof *fn + strlen(filename));
7743 strcpy(fn->filename, filename);
7744 fn->sym = sym;
7745 skip_or_save_block(&fn->func_str);
7746 dynarray_add(&tcc_state->inline_fns,
7747 &tcc_state->nb_inline_fns, fn);
7748 } else {
7749 /* compute text section */
7750 cur_text_section = ad.section;
7751 if (!cur_text_section)
7752 cur_text_section = text_section;
7753 gen_function(sym);
7755 break;
7756 } else {
7757 if (l == VT_CMP) {
7758 /* find parameter in function parameter list */
7759 for (sym = func_sym->next; sym; sym = sym->next)
7760 if ((sym->v & ~SYM_FIELD) == v)
7761 goto found;
7762 tcc_error("declaration for parameter '%s' but no such parameter",
7763 get_tok_str(v, NULL));
7764 found:
7765 if (type.t & VT_STORAGE) /* 'register' is okay */
7766 tcc_error("storage class specified for '%s'",
7767 get_tok_str(v, NULL));
7768 if (sym->type.t != VT_VOID)
7769 tcc_error("redefinition of parameter '%s'",
7770 get_tok_str(v, NULL));
7771 convert_parameter_type(&type);
7772 sym->type = type;
7773 } else if (type.t & VT_TYPEDEF) {
7774 /* save typedefed type */
7775 /* XXX: test storage specifiers ? */
7776 sym = sym_find(v);
7777 if (sym && sym->sym_scope == local_scope) {
7778 if (!is_compatible_types(&sym->type, &type)
7779 || !(sym->type.t & VT_TYPEDEF))
7780 tcc_error("incompatible redefinition of '%s'",
7781 get_tok_str(v, NULL));
7782 sym->type = type;
7783 } else {
7784 sym = sym_push(v, &type, 0, 0);
7786 sym->a = ad.a;
7787 sym->f = ad.f;
7788 } else if ((type.t & VT_BTYPE) == VT_VOID
7789 && !(type.t & VT_EXTERN)) {
7790 tcc_error("declaration of void object");
7791 } else {
7792 r = 0;
7793 if ((type.t & VT_BTYPE) == VT_FUNC) {
7794 /* external function definition */
7795 /* specific case for func_call attribute */
7796 type.ref->f = ad.f;
7797 } else if (!(type.t & VT_ARRAY)) {
7798 /* not lvalue if array */
7799 r |= lvalue_type(type.t);
7801 has_init = (tok == '=');
7802 if (has_init && (type.t & VT_VLA))
7803 tcc_error("variable length array cannot be initialized");
7804 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7805 || (type.t & VT_BTYPE) == VT_FUNC
7806 /* as with GCC, uninitialized global arrays with no size
7807 are considered extern: */
7808 || ((type.t & VT_ARRAY) && !has_init
7809 && l == VT_CONST && type.ref->c < 0)
7811 /* external variable or function */
7812 type.t |= VT_EXTERN;
7813 sym = external_sym(v, &type, r, &ad);
7814 if (ad.alias_target) {
7815 ElfSym *esym;
7816 Sym *alias_target;
7817 alias_target = sym_find(ad.alias_target);
7818 esym = elfsym(alias_target);
7819 if (!esym)
7820 tcc_error("unsupported forward __alias__ attribute");
7821 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7823 } else {
7824 if (type.t & VT_STATIC)
7825 r |= VT_CONST;
7826 else
7827 r |= l;
7828 if (has_init)
7829 next();
7830 else if (l == VT_CONST)
7831 /* uninitialized global variables may be overridden */
7832 type.t |= VT_EXTERN;
7833 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7836 if (tok != ',') {
7837 if (is_for_loop_init)
7838 return 1;
7839 skip(';');
7840 break;
7842 next();
7846 return 0;
7849 static void decl(int l)
7851 decl0(l, 0, NULL);
7854 /* ------------------------------------------------------------------------- */
7855 #undef gjmp_addr
7856 #undef gjmp
7857 /* ------------------------------------------------------------------------- */