riscv: Handle JUMP_SLOT reloc
[tinycc.git] / tccgen.c
blob5173275eb3058a24a60a039ca4724ba99c8bb2d6
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r != VT_CMP) /* must be VT_CONST then */
879 vset_VT_CMP(vtop->c.i != 0);
881 p = inv ? &vtop->jfalse : &vtop->jtrue;
882 *p = gjmp_append(*p, t);
885 /* Generate value test
887 * Generate a test for any value (jump, comparison and integers) */
888 static int gvtst(int inv, int t)
890 int op, u, x;
892 gvtst_set(inv, t);
894 t = vtop->jtrue, u = vtop->jfalse;
895 if (inv)
896 x = u, u = t, t = x;
897 op = vtop->cmp_op;
899 /* jump to the wanted target */
900 if (op > 1)
901 t = gjmp_cond(op ^ inv, t);
902 else if (op != inv)
903 t = gjmp(t);
904 /* resolve complementary jumps to here */
905 gsym(u);
907 vtop--;
908 return t;
911 /* ------------------------------------------------------------------------- */
912 /* push a symbol value of TYPE */
913 static inline void vpushsym(CType *type, Sym *sym)
915 CValue cval;
916 cval.i = 0;
917 vsetc(type, VT_CONST | VT_SYM, &cval);
918 vtop->sym = sym;
921 /* Return a static symbol pointing to a section */
922 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
924 int v;
925 Sym *sym;
927 v = anon_sym++;
928 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
929 sym->type.t |= VT_STATIC;
930 put_extern_sym(sym, sec, offset, size);
931 return sym;
934 /* push a reference to a section offset by adding a dummy symbol */
935 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
937 vpushsym(type, get_sym_ref(type, sec, offset, size));
940 /* define a new external reference to a symbol 'v' of type 'u' */
941 ST_FUNC Sym *external_global_sym(int v, CType *type)
943 Sym *s;
945 s = sym_find(v);
946 if (!s) {
947 /* push forward reference */
948 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
949 s->type.ref = type->ref;
950 } else if (IS_ASM_SYM(s)) {
951 s->type.t = type->t | (s->type.t & VT_EXTERN);
952 s->type.ref = type->ref;
953 update_storage(s);
955 return s;
958 /* Merge symbol attributes. */
959 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
961 if (sa1->aligned && !sa->aligned)
962 sa->aligned = sa1->aligned;
963 sa->packed |= sa1->packed;
964 sa->weak |= sa1->weak;
965 if (sa1->visibility != STV_DEFAULT) {
966 int vis = sa->visibility;
967 if (vis == STV_DEFAULT
968 || vis > sa1->visibility)
969 vis = sa1->visibility;
970 sa->visibility = vis;
972 sa->dllexport |= sa1->dllexport;
973 sa->nodecorate |= sa1->nodecorate;
974 sa->dllimport |= sa1->dllimport;
977 /* Merge function attributes. */
978 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
980 if (fa1->func_call && !fa->func_call)
981 fa->func_call = fa1->func_call;
982 if (fa1->func_type && !fa->func_type)
983 fa->func_type = fa1->func_type;
984 if (fa1->func_args && !fa->func_args)
985 fa->func_args = fa1->func_args;
988 /* Merge attributes. */
989 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
991 merge_symattr(&ad->a, &ad1->a);
992 merge_funcattr(&ad->f, &ad1->f);
994 if (ad1->section)
995 ad->section = ad1->section;
996 if (ad1->alias_target)
997 ad->alias_target = ad1->alias_target;
998 if (ad1->asm_label)
999 ad->asm_label = ad1->asm_label;
1000 if (ad1->attr_mode)
1001 ad->attr_mode = ad1->attr_mode;
1004 /* Merge some type attributes. */
1005 static void patch_type(Sym *sym, CType *type)
1007 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1008 if (!(sym->type.t & VT_EXTERN))
1009 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1010 sym->type.t &= ~VT_EXTERN;
1013 if (IS_ASM_SYM(sym)) {
1014 /* stay static if both are static */
1015 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1016 sym->type.ref = type->ref;
1019 if (!is_compatible_types(&sym->type, type)) {
1020 tcc_error("incompatible types for redefinition of '%s'",
1021 get_tok_str(sym->v, NULL));
1023 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1024 int static_proto = sym->type.t & VT_STATIC;
1025 /* warn if static follows non-static function declaration */
1026 if ((type->t & VT_STATIC) && !static_proto
1027 /* XXX this test for inline shouldn't be here. Until we
1028 implement gnu-inline mode again it silences a warning for
1029 mingw caused by our workarounds. */
1030 && !((type->t | sym->type.t) & VT_INLINE))
1031 tcc_warning("static storage ignored for redefinition of '%s'",
1032 get_tok_str(sym->v, NULL));
1034 /* set 'inline' if both agree or if one has static */
1035 if ((type->t | sym->type.t) & VT_INLINE) {
1036 if (!((type->t ^ sym->type.t) & VT_INLINE)
1037 || ((type->t | sym->type.t) & VT_STATIC))
1038 static_proto |= VT_INLINE;
1041 if (0 == (type->t & VT_EXTERN)) {
1042 /* put complete type, use static from prototype */
1043 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1044 sym->type.ref = type->ref;
1045 } else {
1046 sym->type.t &= ~VT_INLINE | static_proto;
1049 if (sym->type.ref->f.func_type == FUNC_OLD
1050 && type->ref->f.func_type != FUNC_OLD) {
1051 sym->type.ref = type->ref;
1054 } else {
1055 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1056 /* set array size if it was omitted in extern declaration */
1057 sym->type.ref->c = type->ref->c;
1059 if ((type->t ^ sym->type.t) & VT_STATIC)
1060 tcc_warning("storage mismatch for redefinition of '%s'",
1061 get_tok_str(sym->v, NULL));
1065 /* Merge some storage attributes. */
1066 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1068 if (type)
1069 patch_type(sym, type);
1071 #ifdef TCC_TARGET_PE
1072 if (sym->a.dllimport != ad->a.dllimport)
1073 tcc_error("incompatible dll linkage for redefinition of '%s'",
1074 get_tok_str(sym->v, NULL));
1075 #endif
1076 merge_symattr(&sym->a, &ad->a);
1077 if (ad->asm_label)
1078 sym->asm_label = ad->asm_label;
1079 update_storage(sym);
1082 /* copy sym to other stack */
1083 static Sym *sym_copy(Sym *s0, Sym **ps)
1085 Sym *s;
1086 s = sym_malloc(), *s = *s0;
1087 s->prev = *ps, *ps = s;
1088 if (s->v < SYM_FIRST_ANOM) {
1089 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1090 s->prev_tok = *ps, *ps = s;
1092 return s;
1095 /* copy a list of syms */
1096 static void sym_copy_ref(Sym *s0, Sym **ps)
1098 Sym *s, **sp = &s0->type.ref;
1099 for (s = *sp, *sp = NULL; s; s = s->next)
1100 sp = &(*sp = sym_copy(s, ps))->next;
1103 /* define a new external reference to a symbol 'v' */
1104 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1106 Sym *s; int bt;
1108 /* look for global symbol */
1109 s = sym_find(v);
1110 while (s && s->sym_scope)
1111 s = s->prev_tok;
1113 if (!s) {
1114 /* push forward reference */
1115 s = global_identifier_push(v, type->t, 0);
1116 s->r |= r;
1117 s->a = ad->a;
1118 s->asm_label = ad->asm_label;
1119 s->type.ref = type->ref;
1120 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1121 /* copy type to the global stack also */
1122 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1123 sym_copy_ref(s, &global_stack);
1124 } else {
1125 patch_storage(s, ad, type);
1126 bt = s->type.t & VT_BTYPE;
1128 /* push variables to local scope if any */
1129 if (local_stack && bt != VT_FUNC)
1130 s = sym_copy(s, &local_stack);
1131 return s;
1134 /* push a reference to global symbol v */
1135 ST_FUNC void vpush_global_sym(CType *type, int v)
1137 vpushsym(type, external_global_sym(v, type));
1140 /* save registers up to (vtop - n) stack entry */
1141 ST_FUNC void save_regs(int n)
1143 SValue *p, *p1;
1144 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1145 save_reg(p->r);
1148 /* save r to the memory stack, and mark it as being free */
1149 ST_FUNC void save_reg(int r)
1151 save_reg_upstack(r, 0);
1154 /* save r to the memory stack, and mark it as being free,
1155 if seen up to (vtop - n) stack entry */
1156 ST_FUNC void save_reg_upstack(int r, int n)
1158 int l, saved, size, align;
1159 SValue *p, *p1, sv;
1160 CType *type;
1162 if ((r &= VT_VALMASK) >= VT_CONST)
1163 return;
1164 if (nocode_wanted)
1165 return;
1167 /* modify all stack values */
1168 saved = 0;
1169 l = 0;
1170 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1171 if ((p->r & VT_VALMASK) == r ||
1172 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1173 /* must save value on stack if not already done */
1174 if (!saved) {
1175 /* NOTE: must reload 'r' because r might be equal to r2 */
1176 r = p->r & VT_VALMASK;
1177 /* store register in the stack */
1178 type = &p->type;
1179 if ((p->r & VT_LVAL) ||
1180 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1181 #if PTR_SIZE == 8
1182 type = &char_pointer_type;
1183 #else
1184 type = &int_type;
1185 #endif
1186 size = type_size(type, &align);
1187 l=get_temp_local_var(size,align);
1188 sv.type.t = type->t;
1189 sv.r = VT_LOCAL | VT_LVAL;
1190 sv.c.i = l;
1191 store(r, &sv);
1192 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1193 /* x86 specific: need to pop fp register ST0 if saved */
1194 if (r == TREG_ST0) {
1195 o(0xd8dd); /* fstp %st(0) */
1197 #endif
1198 #if PTR_SIZE == 4
1199 /* special long long case */
1200 if ((type->t & VT_BTYPE) == VT_LLONG) {
1201 sv.c.i += 4;
1202 store(p->r2, &sv);
1204 #endif
1205 saved = 1;
1207 /* mark that stack entry as being saved on the stack */
1208 if (p->r & VT_LVAL) {
1209 /* also clear the bounded flag because the
1210 relocation address of the function was stored in
1211 p->c.i */
1212 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1213 } else {
1214 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1216 p->r2 = VT_CONST;
1217 p->c.i = l;
1222 #ifdef TCC_TARGET_ARM
1223 /* find a register of class 'rc2' with at most one reference on stack.
1224 * If none, call get_reg(rc) */
1225 ST_FUNC int get_reg_ex(int rc, int rc2)
1227 int r;
1228 SValue *p;
1230 for(r=0;r<NB_REGS;r++) {
1231 if (reg_classes[r] & rc2) {
1232 int n;
1233 n=0;
1234 for(p = vstack; p <= vtop; p++) {
1235 if ((p->r & VT_VALMASK) == r ||
1236 (p->r2 & VT_VALMASK) == r)
1237 n++;
1239 if (n <= 1)
1240 return r;
1243 return get_reg(rc);
1245 #endif
1247 /* find a free register of class 'rc'. If none, save one register */
1248 ST_FUNC int get_reg(int rc)
1250 int r;
1251 SValue *p;
1253 /* find a free register */
1254 for(r=0;r<NB_REGS;r++) {
1255 if (reg_classes[r] & rc) {
1256 if (nocode_wanted)
1257 return r;
1258 for(p=vstack;p<=vtop;p++) {
1259 if ((p->r & VT_VALMASK) == r ||
1260 (p->r2 & VT_VALMASK) == r)
1261 goto notfound;
1263 return r;
1265 notfound: ;
1268 /* no register left : free the first one on the stack (VERY
1269 IMPORTANT to start from the bottom to ensure that we don't
1270 spill registers used in gen_opi()) */
1271 for(p=vstack;p<=vtop;p++) {
1272 /* look at second register (if long long) */
1273 r = p->r2 & VT_VALMASK;
1274 if (r < VT_CONST && (reg_classes[r] & rc))
1275 goto save_found;
1276 r = p->r & VT_VALMASK;
1277 if (r < VT_CONST && (reg_classes[r] & rc)) {
1278 save_found:
1279 save_reg(r);
1280 return r;
1283 /* Should never comes here */
1284 return -1;
1287 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1288 static int get_temp_local_var(int size,int align){
1289 int i;
1290 struct temp_local_variable *temp_var;
1291 int found_var;
1292 SValue *p;
1293 int r;
1294 char free;
1295 char found;
1296 found=0;
1297 for(i=0;i<nb_temp_local_vars;i++){
1298 temp_var=&arr_temp_local_vars[i];
1299 if(temp_var->size<size||align!=temp_var->align){
1300 continue;
1302 /*check if temp_var is free*/
1303 free=1;
1304 for(p=vstack;p<=vtop;p++) {
1305 r=p->r&VT_VALMASK;
1306 if(r==VT_LOCAL||r==VT_LLOCAL){
1307 if(p->c.i==temp_var->location){
1308 free=0;
1309 break;
1313 if(free){
1314 found_var=temp_var->location;
1315 found=1;
1316 break;
1319 if(!found){
1320 loc = (loc - size) & -align;
1321 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1322 temp_var=&arr_temp_local_vars[i];
1323 temp_var->location=loc;
1324 temp_var->size=size;
1325 temp_var->align=align;
1326 nb_temp_local_vars++;
1328 found_var=loc;
1330 return found_var;
1333 static void clear_temp_local_var_list(){
1334 nb_temp_local_vars=0;
1337 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1338 if needed */
1339 static void move_reg(int r, int s, int t)
1341 SValue sv;
1343 if (r != s) {
1344 save_reg(r);
1345 sv.type.t = t;
1346 sv.type.ref = NULL;
1347 sv.r = s;
1348 sv.c.i = 0;
1349 load(r, &sv);
1353 /* get address of vtop (vtop MUST BE an lvalue) */
1354 ST_FUNC void gaddrof(void)
1356 vtop->r &= ~VT_LVAL;
1357 /* tricky: if saved lvalue, then we can go back to lvalue */
1358 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1359 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1364 #ifdef CONFIG_TCC_BCHECK
1365 /* generate lvalue bound code */
1366 static void gbound(void)
1368 int lval_type;
1369 CType type1;
1371 vtop->r &= ~VT_MUSTBOUND;
1372 /* if lvalue, then use checking code before dereferencing */
1373 if (vtop->r & VT_LVAL) {
1374 /* if not VT_BOUNDED value, then make one */
1375 if (!(vtop->r & VT_BOUNDED)) {
1376 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1377 /* must save type because we must set it to int to get pointer */
1378 type1 = vtop->type;
1379 vtop->type.t = VT_PTR;
1380 gaddrof();
1381 vpushi(0);
1382 gen_bounded_ptr_add();
1383 vtop->r |= lval_type;
1384 vtop->type = type1;
1386 /* then check for dereferencing */
1387 gen_bounded_ptr_deref();
1390 #endif
1392 static void incr_bf_adr(int o)
1394 vtop->type = char_pointer_type;
1395 gaddrof();
1396 vpushi(o);
1397 gen_op('+');
1398 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1399 | (VT_BYTE|VT_UNSIGNED);
1400 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1401 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1404 /* single-byte load mode for packed or otherwise unaligned bitfields */
1405 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1407 int n, o, bits;
1408 save_reg_upstack(vtop->r, 1);
1409 vpush64(type->t & VT_BTYPE, 0); // B X
1410 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1411 do {
1412 vswap(); // X B
1413 incr_bf_adr(o);
1414 vdup(); // X B B
1415 n = 8 - bit_pos;
1416 if (n > bit_size)
1417 n = bit_size;
1418 if (bit_pos)
1419 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1420 if (n < 8)
1421 vpushi((1 << n) - 1), gen_op('&');
1422 gen_cast(type);
1423 if (bits)
1424 vpushi(bits), gen_op(TOK_SHL);
1425 vrotb(3); // B Y X
1426 gen_op('|'); // B X
1427 bits += n, bit_size -= n, o = 1;
1428 } while (bit_size);
1429 vswap(), vpop();
1430 if (!(type->t & VT_UNSIGNED)) {
1431 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1432 vpushi(n), gen_op(TOK_SHL);
1433 vpushi(n), gen_op(TOK_SAR);
1437 /* single-byte store mode for packed or otherwise unaligned bitfields */
1438 static void store_packed_bf(int bit_pos, int bit_size)
1440 int bits, n, o, m, c;
1442 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1443 vswap(); // X B
1444 save_reg_upstack(vtop->r, 1);
1445 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1446 do {
1447 incr_bf_adr(o); // X B
1448 vswap(); //B X
1449 c ? vdup() : gv_dup(); // B V X
1450 vrott(3); // X B V
1451 if (bits)
1452 vpushi(bits), gen_op(TOK_SHR);
1453 if (bit_pos)
1454 vpushi(bit_pos), gen_op(TOK_SHL);
1455 n = 8 - bit_pos;
1456 if (n > bit_size)
1457 n = bit_size;
1458 if (n < 8) {
1459 m = ((1 << n) - 1) << bit_pos;
1460 vpushi(m), gen_op('&'); // X B V1
1461 vpushv(vtop-1); // X B V1 B
1462 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1463 gen_op('&'); // X B V1 B1
1464 gen_op('|'); // X B V2
1466 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1467 vstore(), vpop(); // X B
1468 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1469 } while (bit_size);
1470 vpop(), vpop();
1473 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1475 int t;
1476 if (0 == sv->type.ref)
1477 return 0;
1478 t = sv->type.ref->auxtype;
1479 if (t != -1 && t != VT_STRUCT) {
1480 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1481 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1483 return t;
1486 /* store vtop a register belonging to class 'rc'. lvalues are
1487 converted to values. Cannot be used if cannot be converted to
1488 register value (such as structures). */
1489 ST_FUNC int gv(int rc)
1491 int r, bit_pos, bit_size, size, align, rc2;
1493 /* NOTE: get_reg can modify vstack[] */
1494 if (vtop->type.t & VT_BITFIELD) {
1495 CType type;
1497 bit_pos = BIT_POS(vtop->type.t);
1498 bit_size = BIT_SIZE(vtop->type.t);
1499 /* remove bit field info to avoid loops */
1500 vtop->type.t &= ~VT_STRUCT_MASK;
1502 type.ref = NULL;
1503 type.t = vtop->type.t & VT_UNSIGNED;
1504 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1505 type.t |= VT_UNSIGNED;
1507 r = adjust_bf(vtop, bit_pos, bit_size);
1509 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1510 type.t |= VT_LLONG;
1511 else
1512 type.t |= VT_INT;
1514 if (r == VT_STRUCT) {
1515 load_packed_bf(&type, bit_pos, bit_size);
1516 } else {
1517 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1518 /* cast to int to propagate signedness in following ops */
1519 gen_cast(&type);
1520 /* generate shifts */
1521 vpushi(bits - (bit_pos + bit_size));
1522 gen_op(TOK_SHL);
1523 vpushi(bits - bit_size);
1524 /* NOTE: transformed to SHR if unsigned */
1525 gen_op(TOK_SAR);
1527 r = gv(rc);
1528 } else {
1529 if (is_float(vtop->type.t) &&
1530 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1531 unsigned long offset;
1532 /* CPUs usually cannot use float constants, so we store them
1533 generically in data segment */
1534 size = type_size(&vtop->type, &align);
1535 if (NODATA_WANTED)
1536 size = 0, align = 1;
1537 offset = section_add(data_section, size, align);
1538 vpush_ref(&vtop->type, data_section, offset, size);
1539 vswap();
1540 init_putv(&vtop->type, data_section, offset);
1541 vtop->r |= VT_LVAL;
1543 #ifdef CONFIG_TCC_BCHECK
1544 if (vtop->r & VT_MUSTBOUND)
1545 gbound();
1546 #endif
1548 r = vtop->r & VT_VALMASK;
1549 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1550 #ifndef TCC_TARGET_ARM64
1551 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1552 if (rc == RC_IRET)
1553 rc2 = RC_LRET;
1554 #ifdef TCC_TARGET_X86_64
1555 else if (rc == RC_FRET)
1556 rc2 = RC_QRET;
1557 #endif
1558 #endif
1559 #endif
1560 /* need to reload if:
1561 - constant
1562 - lvalue (need to dereference pointer)
1563 - already a register, but not in the right class */
1564 if (r >= VT_CONST
1565 || (vtop->r & VT_LVAL)
1566 || !(reg_classes[r] & rc)
1567 #if PTR_SIZE == 8
1568 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1569 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1570 #else
1571 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1572 #endif
1575 r = get_reg(rc);
1576 #if PTR_SIZE == 8
1577 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1578 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1579 #else
1580 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1581 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1582 unsigned long long ll;
1583 #endif
1584 int r2, original_type;
1585 original_type = vtop->type.t;
1586 /* two register type load : expand to two words
1587 temporarily */
1588 #if PTR_SIZE == 4
1589 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1590 /* load constant */
1591 ll = vtop->c.i;
1592 vtop->c.i = ll; /* first word */
1593 load(r, vtop);
1594 vtop->r = r; /* save register value */
1595 vpushi(ll >> 32); /* second word */
1596 } else
1597 #endif
1598 if (vtop->r & VT_LVAL) {
1599 /* We do not want to modifier the long long
1600 pointer here, so the safest (and less
1601 efficient) is to save all the other registers
1602 in the stack. XXX: totally inefficient. */
1603 #if 0
1604 save_regs(1);
1605 #else
1606 /* lvalue_save: save only if used further down the stack */
1607 save_reg_upstack(vtop->r, 1);
1608 #endif
1609 /* load from memory */
1610 vtop->type.t = load_type;
1611 load(r, vtop);
1612 vdup();
1613 vtop[-1].r = r; /* save register value */
1614 /* increment pointer to get second word */
1615 vtop->type.t = addr_type;
1616 gaddrof();
1617 vpushi(load_size);
1618 gen_op('+');
1619 vtop->r |= VT_LVAL;
1620 vtop->type.t = load_type;
1621 } else {
1622 /* move registers */
1623 load(r, vtop);
1624 vdup();
1625 vtop[-1].r = r; /* save register value */
1626 vtop->r = vtop[-1].r2;
1628 /* Allocate second register. Here we rely on the fact that
1629 get_reg() tries first to free r2 of an SValue. */
1630 r2 = get_reg(rc2);
1631 load(r2, vtop);
1632 vpop();
1633 /* write second register */
1634 vtop->r2 = r2;
1635 vtop->type.t = original_type;
1636 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1637 int t1, t;
1638 /* lvalue of scalar type : need to use lvalue type
1639 because of possible cast */
1640 t = vtop->type.t;
1641 t1 = t;
1642 /* compute memory access type */
1643 if (vtop->r & VT_LVAL_BYTE)
1644 t = VT_BYTE;
1645 else if (vtop->r & VT_LVAL_SHORT)
1646 t = VT_SHORT;
1647 if (vtop->r & VT_LVAL_UNSIGNED)
1648 t |= VT_UNSIGNED;
1649 vtop->type.t = t;
1650 load(r, vtop);
1651 /* restore wanted type */
1652 vtop->type.t = t1;
1653 } else {
1654 if (vtop->r == VT_CMP)
1655 vset_VT_JMP();
1656 /* one register type load */
1657 load(r, vtop);
1660 vtop->r = r;
1661 #ifdef TCC_TARGET_C67
1662 /* uses register pairs for doubles */
1663 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1664 vtop->r2 = r+1;
1665 #endif
1667 return r;
1670 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1671 ST_FUNC void gv2(int rc1, int rc2)
1673 /* generate more generic register first. But VT_JMP or VT_CMP
1674 values must be generated first in all cases to avoid possible
1675 reload errors */
1676 if (vtop->r != VT_CMP && rc1 <= rc2) {
1677 vswap();
1678 gv(rc1);
1679 vswap();
1680 gv(rc2);
1681 /* test if reload is needed for first register */
1682 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1683 vswap();
1684 gv(rc1);
1685 vswap();
1687 } else {
1688 gv(rc2);
1689 vswap();
1690 gv(rc1);
1691 vswap();
1692 /* test if reload is needed for first register */
1693 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1694 gv(rc2);
1699 #ifndef TCC_TARGET_ARM64
1700 /* wrapper around RC_FRET to return a register by type */
1701 static int rc_fret(int t)
1703 #ifdef TCC_TARGET_X86_64
1704 if (t == VT_LDOUBLE) {
1705 return RC_ST0;
1707 #endif
1708 return RC_FRET;
1710 #endif
1712 /* wrapper around REG_FRET to return a register by type */
1713 static int reg_fret(int t)
1715 #ifdef TCC_TARGET_X86_64
1716 if (t == VT_LDOUBLE) {
1717 return TREG_ST0;
1719 #endif
1720 return REG_FRET;
1723 #if PTR_SIZE == 4
1724 /* expand 64bit on stack in two ints */
1725 ST_FUNC void lexpand(void)
1727 int u, v;
1728 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1729 v = vtop->r & (VT_VALMASK | VT_LVAL);
1730 if (v == VT_CONST) {
1731 vdup();
1732 vtop[0].c.i >>= 32;
1733 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1734 vdup();
1735 vtop[0].c.i += 4;
1736 } else {
1737 gv(RC_INT);
1738 vdup();
1739 vtop[0].r = vtop[-1].r2;
1740 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1742 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1744 #endif
1746 #if PTR_SIZE == 4
1747 /* build a long long from two ints */
1748 static void lbuild(int t)
1750 gv2(RC_INT, RC_INT);
1751 vtop[-1].r2 = vtop[0].r;
1752 vtop[-1].type.t = t;
1753 vpop();
1755 #endif
1757 /* convert stack entry to register and duplicate its value in another
1758 register */
1759 static void gv_dup(void)
1761 int rc, t, r, r1;
1762 SValue sv;
1764 t = vtop->type.t;
1765 #if PTR_SIZE == 4
1766 if ((t & VT_BTYPE) == VT_LLONG) {
1767 if (t & VT_BITFIELD) {
1768 gv(RC_INT);
1769 t = vtop->type.t;
1771 lexpand();
1772 gv_dup();
1773 vswap();
1774 vrotb(3);
1775 gv_dup();
1776 vrotb(4);
1777 /* stack: H L L1 H1 */
1778 lbuild(t);
1779 vrotb(3);
1780 vrotb(3);
1781 vswap();
1782 lbuild(t);
1783 vswap();
1784 } else
1785 #endif
1787 /* duplicate value */
1788 rc = RC_INT;
1789 sv.type.t = VT_INT;
1790 if (is_float(t)) {
1791 rc = RC_FLOAT;
1792 #ifdef TCC_TARGET_X86_64
1793 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1794 rc = RC_ST0;
1796 #endif
1797 sv.type.t = t;
1799 r = gv(rc);
1800 r1 = get_reg(rc);
1801 sv.r = r;
1802 sv.c.i = 0;
1803 load(r1, &sv); /* move r to r1 */
1804 vdup();
1805 /* duplicates value */
1806 if (r != r1)
1807 vtop->r = r1;
1811 #if PTR_SIZE == 4
1812 /* generate CPU independent (unsigned) long long operations */
1813 static void gen_opl(int op)
1815 int t, a, b, op1, c, i;
1816 int func;
1817 unsigned short reg_iret = REG_IRET;
1818 unsigned short reg_lret = REG_LRET;
1819 SValue tmp;
1821 switch(op) {
1822 case '/':
1823 case TOK_PDIV:
1824 func = TOK___divdi3;
1825 goto gen_func;
1826 case TOK_UDIV:
1827 func = TOK___udivdi3;
1828 goto gen_func;
1829 case '%':
1830 func = TOK___moddi3;
1831 goto gen_mod_func;
1832 case TOK_UMOD:
1833 func = TOK___umoddi3;
1834 gen_mod_func:
1835 #ifdef TCC_ARM_EABI
1836 reg_iret = TREG_R2;
1837 reg_lret = TREG_R3;
1838 #endif
1839 gen_func:
1840 /* call generic long long function */
1841 vpush_global_sym(&func_old_type, func);
1842 vrott(3);
1843 gfunc_call(2);
1844 vpushi(0);
1845 vtop->r = reg_iret;
1846 vtop->r2 = reg_lret;
1847 break;
1848 case '^':
1849 case '&':
1850 case '|':
1851 case '*':
1852 case '+':
1853 case '-':
1854 //pv("gen_opl A",0,2);
1855 t = vtop->type.t;
1856 vswap();
1857 lexpand();
1858 vrotb(3);
1859 lexpand();
1860 /* stack: L1 H1 L2 H2 */
1861 tmp = vtop[0];
1862 vtop[0] = vtop[-3];
1863 vtop[-3] = tmp;
1864 tmp = vtop[-2];
1865 vtop[-2] = vtop[-3];
1866 vtop[-3] = tmp;
1867 vswap();
1868 /* stack: H1 H2 L1 L2 */
1869 //pv("gen_opl B",0,4);
1870 if (op == '*') {
1871 vpushv(vtop - 1);
1872 vpushv(vtop - 1);
1873 gen_op(TOK_UMULL);
1874 lexpand();
1875 /* stack: H1 H2 L1 L2 ML MH */
1876 for(i=0;i<4;i++)
1877 vrotb(6);
1878 /* stack: ML MH H1 H2 L1 L2 */
1879 tmp = vtop[0];
1880 vtop[0] = vtop[-2];
1881 vtop[-2] = tmp;
1882 /* stack: ML MH H1 L2 H2 L1 */
1883 gen_op('*');
1884 vrotb(3);
1885 vrotb(3);
1886 gen_op('*');
1887 /* stack: ML MH M1 M2 */
1888 gen_op('+');
1889 gen_op('+');
1890 } else if (op == '+' || op == '-') {
1891 /* XXX: add non carry method too (for MIPS or alpha) */
1892 if (op == '+')
1893 op1 = TOK_ADDC1;
1894 else
1895 op1 = TOK_SUBC1;
1896 gen_op(op1);
1897 /* stack: H1 H2 (L1 op L2) */
1898 vrotb(3);
1899 vrotb(3);
1900 gen_op(op1 + 1); /* TOK_xxxC2 */
1901 } else {
1902 gen_op(op);
1903 /* stack: H1 H2 (L1 op L2) */
1904 vrotb(3);
1905 vrotb(3);
1906 /* stack: (L1 op L2) H1 H2 */
1907 gen_op(op);
1908 /* stack: (L1 op L2) (H1 op H2) */
1910 /* stack: L H */
1911 lbuild(t);
1912 break;
1913 case TOK_SAR:
1914 case TOK_SHR:
1915 case TOK_SHL:
1916 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1917 t = vtop[-1].type.t;
1918 vswap();
1919 lexpand();
1920 vrotb(3);
1921 /* stack: L H shift */
1922 c = (int)vtop->c.i;
1923 /* constant: simpler */
1924 /* NOTE: all comments are for SHL. the other cases are
1925 done by swapping words */
1926 vpop();
1927 if (op != TOK_SHL)
1928 vswap();
1929 if (c >= 32) {
1930 /* stack: L H */
1931 vpop();
1932 if (c > 32) {
1933 vpushi(c - 32);
1934 gen_op(op);
1936 if (op != TOK_SAR) {
1937 vpushi(0);
1938 } else {
1939 gv_dup();
1940 vpushi(31);
1941 gen_op(TOK_SAR);
1943 vswap();
1944 } else {
1945 vswap();
1946 gv_dup();
1947 /* stack: H L L */
1948 vpushi(c);
1949 gen_op(op);
1950 vswap();
1951 vpushi(32 - c);
1952 if (op == TOK_SHL)
1953 gen_op(TOK_SHR);
1954 else
1955 gen_op(TOK_SHL);
1956 vrotb(3);
1957 /* stack: L L H */
1958 vpushi(c);
1959 if (op == TOK_SHL)
1960 gen_op(TOK_SHL);
1961 else
1962 gen_op(TOK_SHR);
1963 gen_op('|');
1965 if (op != TOK_SHL)
1966 vswap();
1967 lbuild(t);
1968 } else {
1969 /* XXX: should provide a faster fallback on x86 ? */
1970 switch(op) {
1971 case TOK_SAR:
1972 func = TOK___ashrdi3;
1973 goto gen_func;
1974 case TOK_SHR:
1975 func = TOK___lshrdi3;
1976 goto gen_func;
1977 case TOK_SHL:
1978 func = TOK___ashldi3;
1979 goto gen_func;
1982 break;
1983 default:
1984 /* compare operations */
1985 t = vtop->type.t;
1986 vswap();
1987 lexpand();
1988 vrotb(3);
1989 lexpand();
1990 /* stack: L1 H1 L2 H2 */
1991 tmp = vtop[-1];
1992 vtop[-1] = vtop[-2];
1993 vtop[-2] = tmp;
1994 /* stack: L1 L2 H1 H2 */
1995 save_regs(4);
1996 /* compare high */
1997 op1 = op;
1998 /* when values are equal, we need to compare low words. since
1999 the jump is inverted, we invert the test too. */
2000 if (op1 == TOK_LT)
2001 op1 = TOK_LE;
2002 else if (op1 == TOK_GT)
2003 op1 = TOK_GE;
2004 else if (op1 == TOK_ULT)
2005 op1 = TOK_ULE;
2006 else if (op1 == TOK_UGT)
2007 op1 = TOK_UGE;
2008 a = 0;
2009 b = 0;
2010 gen_op(op1);
2011 if (op == TOK_NE) {
2012 b = gvtst(0, 0);
2013 } else {
2014 a = gvtst(1, 0);
2015 if (op != TOK_EQ) {
2016 /* generate non equal test */
2017 vpushi(0);
2018 vset_VT_CMP(TOK_NE);
2019 b = gvtst(0, 0);
2022 /* compare low. Always unsigned */
2023 op1 = op;
2024 if (op1 == TOK_LT)
2025 op1 = TOK_ULT;
2026 else if (op1 == TOK_LE)
2027 op1 = TOK_ULE;
2028 else if (op1 == TOK_GT)
2029 op1 = TOK_UGT;
2030 else if (op1 == TOK_GE)
2031 op1 = TOK_UGE;
2032 gen_op(op1);
2033 #if 0//def TCC_TARGET_I386
2034 if (op == TOK_NE) { gsym(b); break; }
2035 if (op == TOK_EQ) { gsym(a); break; }
2036 #endif
2037 gvtst_set(1, a);
2038 gvtst_set(0, b);
2039 break;
2042 #endif
2044 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2046 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2047 return (a ^ b) >> 63 ? -x : x;
2050 static int gen_opic_lt(uint64_t a, uint64_t b)
2052 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2055 /* handle integer constant optimizations and various machine
2056 independent opt */
2057 static void gen_opic(int op)
2059 SValue *v1 = vtop - 1;
2060 SValue *v2 = vtop;
2061 int t1 = v1->type.t & VT_BTYPE;
2062 int t2 = v2->type.t & VT_BTYPE;
2063 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2064 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2065 uint64_t l1 = c1 ? v1->c.i : 0;
2066 uint64_t l2 = c2 ? v2->c.i : 0;
2067 int shm = (t1 == VT_LLONG) ? 63 : 31;
2069 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2070 l1 = ((uint32_t)l1 |
2071 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2072 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2073 l2 = ((uint32_t)l2 |
2074 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2076 if (c1 && c2) {
2077 switch(op) {
2078 case '+': l1 += l2; break;
2079 case '-': l1 -= l2; break;
2080 case '&': l1 &= l2; break;
2081 case '^': l1 ^= l2; break;
2082 case '|': l1 |= l2; break;
2083 case '*': l1 *= l2; break;
2085 case TOK_PDIV:
2086 case '/':
2087 case '%':
2088 case TOK_UDIV:
2089 case TOK_UMOD:
2090 /* if division by zero, generate explicit division */
2091 if (l2 == 0) {
2092 if (const_wanted)
2093 tcc_error("division by zero in constant");
2094 goto general_case;
2096 switch(op) {
2097 default: l1 = gen_opic_sdiv(l1, l2); break;
2098 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2099 case TOK_UDIV: l1 = l1 / l2; break;
2100 case TOK_UMOD: l1 = l1 % l2; break;
2102 break;
2103 case TOK_SHL: l1 <<= (l2 & shm); break;
2104 case TOK_SHR: l1 >>= (l2 & shm); break;
2105 case TOK_SAR:
2106 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2107 break;
2108 /* tests */
2109 case TOK_ULT: l1 = l1 < l2; break;
2110 case TOK_UGE: l1 = l1 >= l2; break;
2111 case TOK_EQ: l1 = l1 == l2; break;
2112 case TOK_NE: l1 = l1 != l2; break;
2113 case TOK_ULE: l1 = l1 <= l2; break;
2114 case TOK_UGT: l1 = l1 > l2; break;
2115 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2116 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2117 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2118 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2119 /* logical */
2120 case TOK_LAND: l1 = l1 && l2; break;
2121 case TOK_LOR: l1 = l1 || l2; break;
2122 default:
2123 goto general_case;
2125 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2126 l1 = ((uint32_t)l1 |
2127 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2128 v1->c.i = l1;
2129 vtop--;
2130 } else {
2131 /* if commutative ops, put c2 as constant */
2132 if (c1 && (op == '+' || op == '&' || op == '^' ||
2133 op == '|' || op == '*')) {
2134 vswap();
2135 c2 = c1; //c = c1, c1 = c2, c2 = c;
2136 l2 = l1; //l = l1, l1 = l2, l2 = l;
2138 if (!const_wanted &&
2139 c1 && ((l1 == 0 &&
2140 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2141 (l1 == -1 && op == TOK_SAR))) {
2142 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2143 vtop--;
2144 } else if (!const_wanted &&
2145 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2146 (op == '|' &&
2147 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2148 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2149 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2150 if (l2 == 1)
2151 vtop->c.i = 0;
2152 vswap();
2153 vtop--;
2154 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2155 op == TOK_PDIV) &&
2156 l2 == 1) ||
2157 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2158 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2159 l2 == 0) ||
2160 (op == '&' &&
2161 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2162 /* filter out NOP operations like x*1, x-0, x&-1... */
2163 vtop--;
2164 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2165 /* try to use shifts instead of muls or divs */
2166 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2167 int n = -1;
2168 while (l2) {
2169 l2 >>= 1;
2170 n++;
2172 vtop->c.i = n;
2173 if (op == '*')
2174 op = TOK_SHL;
2175 else if (op == TOK_PDIV)
2176 op = TOK_SAR;
2177 else
2178 op = TOK_SHR;
2180 goto general_case;
2181 } else if (c2 && (op == '+' || op == '-') &&
2182 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2183 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2184 /* symbol + constant case */
2185 if (op == '-')
2186 l2 = -l2;
2187 l2 += vtop[-1].c.i;
2188 /* The backends can't always deal with addends to symbols
2189 larger than +-1<<31. Don't construct such. */
2190 if ((int)l2 != l2)
2191 goto general_case;
2192 vtop--;
2193 vtop->c.i = l2;
2194 } else {
2195 general_case:
2196 /* call low level op generator */
2197 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2198 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2199 gen_opl(op);
2200 else
2201 gen_opi(op);
2206 /* generate a floating point operation with constant propagation */
2207 static void gen_opif(int op)
2209 int c1, c2;
2210 SValue *v1, *v2;
2211 #if defined _MSC_VER && defined __x86_64__
2212 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2213 volatile
2214 #endif
2215 long double f1, f2;
2217 v1 = vtop - 1;
2218 v2 = vtop;
2219 /* currently, we cannot do computations with forward symbols */
2220 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2221 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2222 if (c1 && c2) {
2223 if (v1->type.t == VT_FLOAT) {
2224 f1 = v1->c.f;
2225 f2 = v2->c.f;
2226 } else if (v1->type.t == VT_DOUBLE) {
2227 f1 = v1->c.d;
2228 f2 = v2->c.d;
2229 } else {
2230 f1 = v1->c.ld;
2231 f2 = v2->c.ld;
2234 /* NOTE: we only do constant propagation if finite number (not
2235 NaN or infinity) (ANSI spec) */
2236 if (!ieee_finite(f1) || !ieee_finite(f2))
2237 goto general_case;
2239 switch(op) {
2240 case '+': f1 += f2; break;
2241 case '-': f1 -= f2; break;
2242 case '*': f1 *= f2; break;
2243 case '/':
2244 if (f2 == 0.0) {
2245 /* If not in initializer we need to potentially generate
2246 FP exceptions at runtime, otherwise we want to fold. */
2247 if (!const_wanted)
2248 goto general_case;
2250 f1 /= f2;
2251 break;
2252 /* XXX: also handles tests ? */
2253 default:
2254 goto general_case;
2256 /* XXX: overflow test ? */
2257 if (v1->type.t == VT_FLOAT) {
2258 v1->c.f = f1;
2259 } else if (v1->type.t == VT_DOUBLE) {
2260 v1->c.d = f1;
2261 } else {
2262 v1->c.ld = f1;
2264 vtop--;
2265 } else {
2266 general_case:
2267 gen_opf(op);
2271 static int pointed_size(CType *type)
2273 int align;
2274 return type_size(pointed_type(type), &align);
2277 static void vla_runtime_pointed_size(CType *type)
2279 int align;
2280 vla_runtime_type_size(pointed_type(type), &align);
2283 static inline int is_null_pointer(SValue *p)
2285 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2286 return 0;
2287 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2288 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2289 ((p->type.t & VT_BTYPE) == VT_PTR &&
2290 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2291 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2292 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2295 static inline int is_integer_btype(int bt)
2297 return (bt == VT_BYTE || bt == VT_SHORT ||
2298 bt == VT_INT || bt == VT_LLONG);
2301 /* check types for comparison or subtraction of pointers */
2302 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2304 CType *type1, *type2, tmp_type1, tmp_type2;
2305 int bt1, bt2;
2307 /* null pointers are accepted for all comparisons as gcc */
2308 if (is_null_pointer(p1) || is_null_pointer(p2))
2309 return;
2310 type1 = &p1->type;
2311 type2 = &p2->type;
2312 bt1 = type1->t & VT_BTYPE;
2313 bt2 = type2->t & VT_BTYPE;
2314 /* accept comparison between pointer and integer with a warning */
2315 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2316 if (op != TOK_LOR && op != TOK_LAND )
2317 tcc_warning("comparison between pointer and integer");
2318 return;
2321 /* both must be pointers or implicit function pointers */
2322 if (bt1 == VT_PTR) {
2323 type1 = pointed_type(type1);
2324 } else if (bt1 != VT_FUNC)
2325 goto invalid_operands;
2327 if (bt2 == VT_PTR) {
2328 type2 = pointed_type(type2);
2329 } else if (bt2 != VT_FUNC) {
2330 invalid_operands:
2331 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2333 if ((type1->t & VT_BTYPE) == VT_VOID ||
2334 (type2->t & VT_BTYPE) == VT_VOID)
2335 return;
2336 tmp_type1 = *type1;
2337 tmp_type2 = *type2;
2338 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2339 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2340 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2341 /* gcc-like error if '-' is used */
2342 if (op == '-')
2343 goto invalid_operands;
2344 else
2345 tcc_warning("comparison of distinct pointer types lacks a cast");
2349 /* generic gen_op: handles types problems */
2350 ST_FUNC void gen_op(int op)
2352 int u, t1, t2, bt1, bt2, t;
2353 CType type1;
2355 redo:
2356 t1 = vtop[-1].type.t;
2357 t2 = vtop[0].type.t;
2358 bt1 = t1 & VT_BTYPE;
2359 bt2 = t2 & VT_BTYPE;
2361 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2362 tcc_error("operation on a struct");
2363 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2364 if (bt2 == VT_FUNC) {
2365 mk_pointer(&vtop->type);
2366 gaddrof();
2368 if (bt1 == VT_FUNC) {
2369 vswap();
2370 mk_pointer(&vtop->type);
2371 gaddrof();
2372 vswap();
2374 goto redo;
2375 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2376 /* at least one operand is a pointer */
2377 /* relational op: must be both pointers */
2378 if (op >= TOK_ULT && op <= TOK_LOR) {
2379 check_comparison_pointer_types(vtop - 1, vtop, op);
2380 /* pointers are handled are unsigned */
2381 #if PTR_SIZE == 8
2382 t = VT_LLONG | VT_UNSIGNED;
2383 #else
2384 t = VT_INT | VT_UNSIGNED;
2385 #endif
2386 goto std_op;
2388 /* if both pointers, then it must be the '-' op */
2389 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2390 if (op != '-')
2391 tcc_error("cannot use pointers here");
2392 check_comparison_pointer_types(vtop - 1, vtop, op);
2393 /* XXX: check that types are compatible */
2394 if (vtop[-1].type.t & VT_VLA) {
2395 vla_runtime_pointed_size(&vtop[-1].type);
2396 } else {
2397 vpushi(pointed_size(&vtop[-1].type));
2399 vrott(3);
2400 gen_opic(op);
2401 vtop->type.t = ptrdiff_type.t;
2402 vswap();
2403 gen_op(TOK_PDIV);
2404 } else {
2405 /* exactly one pointer : must be '+' or '-'. */
2406 if (op != '-' && op != '+')
2407 tcc_error("cannot use pointers here");
2408 /* Put pointer as first operand */
2409 if (bt2 == VT_PTR) {
2410 vswap();
2411 t = t1, t1 = t2, t2 = t;
2413 #if PTR_SIZE == 4
2414 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2415 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2416 gen_cast_s(VT_INT);
2417 #endif
2418 type1 = vtop[-1].type;
2419 type1.t &= ~VT_ARRAY;
2420 if (vtop[-1].type.t & VT_VLA)
2421 vla_runtime_pointed_size(&vtop[-1].type);
2422 else {
2423 u = pointed_size(&vtop[-1].type);
2424 if (u < 0)
2425 tcc_error("unknown array element size");
2426 #if PTR_SIZE == 8
2427 vpushll(u);
2428 #else
2429 /* XXX: cast to int ? (long long case) */
2430 vpushi(u);
2431 #endif
2433 gen_op('*');
2434 #if 0
2435 /* #ifdef CONFIG_TCC_BCHECK
2436 The main reason to removing this code:
2437 #include <stdio.h>
2438 int main ()
2440 int v[10];
2441 int i = 10;
2442 int j = 9;
2443 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2444 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2446 When this code is on. then the output looks like
2447 v+i-j = 0xfffffffe
2448 v+(i-j) = 0xbff84000
2450 /* if evaluating constant expression, no code should be
2451 generated, so no bound check */
2452 if (tcc_state->do_bounds_check && !const_wanted) {
2453 /* if bounded pointers, we generate a special code to
2454 test bounds */
2455 if (op == '-') {
2456 vpushi(0);
2457 vswap();
2458 gen_op('-');
2460 gen_bounded_ptr_add();
2461 } else
2462 #endif
2464 gen_opic(op);
2466 /* put again type if gen_opic() swaped operands */
2467 vtop->type = type1;
2469 } else if (is_float(bt1) || is_float(bt2)) {
2470 /* compute bigger type and do implicit casts */
2471 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2472 t = VT_LDOUBLE;
2473 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2474 t = VT_DOUBLE;
2475 } else {
2476 t = VT_FLOAT;
2478 /* floats can only be used for a few operations */
2479 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2480 (op < TOK_ULT || op > TOK_GT))
2481 tcc_error("invalid operands for binary operation");
2482 goto std_op;
2483 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2484 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2485 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2486 t |= VT_UNSIGNED;
2487 t |= (VT_LONG & t1);
2488 goto std_op;
2489 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2490 /* cast to biggest op */
2491 t = VT_LLONG | VT_LONG;
2492 if (bt1 == VT_LLONG)
2493 t &= t1;
2494 if (bt2 == VT_LLONG)
2495 t &= t2;
2496 /* convert to unsigned if it does not fit in a long long */
2497 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2498 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2499 t |= VT_UNSIGNED;
2500 goto std_op;
2501 } else {
2502 /* integer operations */
2503 t = VT_INT | (VT_LONG & (t1 | t2));
2504 /* convert to unsigned if it does not fit in an integer */
2505 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2506 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2507 t |= VT_UNSIGNED;
2508 std_op:
2509 /* XXX: currently, some unsigned operations are explicit, so
2510 we modify them here */
2511 if (t & VT_UNSIGNED) {
2512 if (op == TOK_SAR)
2513 op = TOK_SHR;
2514 else if (op == '/')
2515 op = TOK_UDIV;
2516 else if (op == '%')
2517 op = TOK_UMOD;
2518 else if (op == TOK_LT)
2519 op = TOK_ULT;
2520 else if (op == TOK_GT)
2521 op = TOK_UGT;
2522 else if (op == TOK_LE)
2523 op = TOK_ULE;
2524 else if (op == TOK_GE)
2525 op = TOK_UGE;
2527 vswap();
2528 type1.t = t;
2529 type1.ref = NULL;
2530 gen_cast(&type1);
2531 vswap();
2532 /* special case for shifts and long long: we keep the shift as
2533 an integer */
2534 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2535 type1.t = VT_INT;
2536 gen_cast(&type1);
2537 if (is_float(t))
2538 gen_opif(op);
2539 else
2540 gen_opic(op);
2541 if (op >= TOK_ULT && op <= TOK_GT) {
2542 /* relational op: the result is an int */
2543 vtop->type.t = VT_INT;
2544 } else {
2545 vtop->type.t = t;
2548 // Make sure that we have converted to an rvalue:
2549 if (vtop->r & VT_LVAL)
2550 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2553 #ifndef TCC_TARGET_ARM
2554 /* generic itof for unsigned long long case */
2555 static void gen_cvt_itof1(int t)
2557 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2558 gen_cvt_itof(t);
2559 #else
2560 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2561 (VT_LLONG | VT_UNSIGNED)) {
2563 if (t == VT_FLOAT)
2564 vpush_global_sym(&func_old_type, TOK___floatundisf);
2565 #if LDOUBLE_SIZE != 8
2566 else if (t == VT_LDOUBLE)
2567 vpush_global_sym(&func_old_type, TOK___floatundixf);
2568 #endif
2569 else
2570 vpush_global_sym(&func_old_type, TOK___floatundidf);
2571 vrott(2);
2572 gfunc_call(1);
2573 vpushi(0);
2574 vtop->r = reg_fret(t);
2575 } else {
2576 gen_cvt_itof(t);
2578 #endif
2580 #endif
2582 /* generic ftoi for unsigned long long case */
2583 static void gen_cvt_ftoi1(int t)
2585 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2586 gen_cvt_ftoi(t);
2587 #else
2588 int st;
2590 if (t == (VT_LLONG | VT_UNSIGNED)) {
2591 /* not handled natively */
2592 st = vtop->type.t & VT_BTYPE;
2593 if (st == VT_FLOAT)
2594 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2595 #if LDOUBLE_SIZE != 8
2596 else if (st == VT_LDOUBLE)
2597 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2598 #endif
2599 else
2600 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2601 vrott(2);
2602 gfunc_call(1);
2603 vpushi(0);
2604 vtop->r = REG_IRET;
2605 vtop->r2 = REG_LRET;
2606 } else {
2607 gen_cvt_ftoi(t);
2609 #endif
2612 /* force char or short cast */
2613 static void force_charshort_cast(int t)
2615 int bits, dbt;
2617 /* cannot cast static initializers */
2618 if (STATIC_DATA_WANTED)
2619 return;
2621 dbt = t & VT_BTYPE;
2622 /* XXX: add optimization if lvalue : just change type and offset */
2623 if (dbt == VT_BYTE)
2624 bits = 8;
2625 else
2626 bits = 16;
2627 if (t & VT_UNSIGNED) {
2628 vpushi((1 << bits) - 1);
2629 gen_op('&');
2630 } else {
2631 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2632 bits = 64 - bits;
2633 else
2634 bits = 32 - bits;
2635 vpushi(bits);
2636 gen_op(TOK_SHL);
2637 /* result must be signed or the SAR is converted to an SHL
2638 This was not the case when "t" was a signed short
2639 and the last value on the stack was an unsigned int */
2640 vtop->type.t &= ~VT_UNSIGNED;
2641 vpushi(bits);
2642 gen_op(TOK_SAR);
2646 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2647 static void gen_cast_s(int t)
2649 CType type;
2650 type.t = t;
2651 type.ref = NULL;
2652 gen_cast(&type);
2655 static void gen_cast(CType *type)
2657 int sbt, dbt, sf, df, c, p;
2659 /* special delayed cast for char/short */
2660 /* XXX: in some cases (multiple cascaded casts), it may still
2661 be incorrect */
2662 if (vtop->r & VT_MUSTCAST) {
2663 vtop->r &= ~VT_MUSTCAST;
2664 force_charshort_cast(vtop->type.t);
2667 /* bitfields first get cast to ints */
2668 if (vtop->type.t & VT_BITFIELD) {
2669 gv(RC_INT);
2672 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2673 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2675 if (sbt != dbt) {
2676 sf = is_float(sbt);
2677 df = is_float(dbt);
2678 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2679 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2680 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2681 c &= dbt != VT_LDOUBLE;
2682 #endif
2683 if (c) {
2684 /* constant case: we can do it now */
2685 /* XXX: in ISOC, cannot do it if error in convert */
2686 if (sbt == VT_FLOAT)
2687 vtop->c.ld = vtop->c.f;
2688 else if (sbt == VT_DOUBLE)
2689 vtop->c.ld = vtop->c.d;
2691 if (df) {
2692 if ((sbt & VT_BTYPE) == VT_LLONG) {
2693 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2694 vtop->c.ld = vtop->c.i;
2695 else
2696 vtop->c.ld = -(long double)-vtop->c.i;
2697 } else if(!sf) {
2698 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2699 vtop->c.ld = (uint32_t)vtop->c.i;
2700 else
2701 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2704 if (dbt == VT_FLOAT)
2705 vtop->c.f = (float)vtop->c.ld;
2706 else if (dbt == VT_DOUBLE)
2707 vtop->c.d = (double)vtop->c.ld;
2708 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2709 vtop->c.i = vtop->c.ld;
2710 } else if (sf && dbt == VT_BOOL) {
2711 vtop->c.i = (vtop->c.ld != 0);
2712 } else {
2713 if(sf)
2714 vtop->c.i = vtop->c.ld;
2715 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2717 else if (sbt & VT_UNSIGNED)
2718 vtop->c.i = (uint32_t)vtop->c.i;
2719 #if PTR_SIZE == 8
2720 else if (sbt == VT_PTR)
2722 #endif
2723 else if (sbt != VT_LLONG)
2724 vtop->c.i = ((uint32_t)vtop->c.i |
2725 -(vtop->c.i & 0x80000000));
2727 if (dbt == (VT_LLONG|VT_UNSIGNED))
2729 else if (dbt == VT_BOOL)
2730 vtop->c.i = (vtop->c.i != 0);
2731 #if PTR_SIZE == 8
2732 else if (dbt == VT_PTR)
2734 #endif
2735 else if (dbt != VT_LLONG) {
2736 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2737 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2738 0xffffffff);
2739 vtop->c.i &= m;
2740 if (!(dbt & VT_UNSIGNED))
2741 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2744 } else if (p && dbt == VT_BOOL) {
2745 vtop->r = VT_CONST;
2746 vtop->c.i = 1;
2747 } else {
2748 /* non constant case: generate code */
2749 if (sf && df) {
2750 /* convert from fp to fp */
2751 gen_cvt_ftof(dbt);
2752 } else if (df) {
2753 /* convert int to fp */
2754 gen_cvt_itof1(dbt);
2755 } else if (sf) {
2756 /* convert fp to int */
2757 if (dbt == VT_BOOL) {
2758 vpushi(0);
2759 gen_op(TOK_NE);
2760 } else {
2761 /* we handle char/short/etc... with generic code */
2762 if (dbt != (VT_INT | VT_UNSIGNED) &&
2763 dbt != (VT_LLONG | VT_UNSIGNED) &&
2764 dbt != VT_LLONG)
2765 dbt = VT_INT;
2766 gen_cvt_ftoi1(dbt);
2767 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2768 /* additional cast for char/short... */
2769 vtop->type.t = dbt;
2770 gen_cast(type);
2773 #if PTR_SIZE == 4
2774 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2775 if ((sbt & VT_BTYPE) != VT_LLONG) {
2776 /* scalar to long long */
2777 /* machine independent conversion */
2778 gv(RC_INT);
2779 /* generate high word */
2780 if (sbt == (VT_INT | VT_UNSIGNED)) {
2781 vpushi(0);
2782 gv(RC_INT);
2783 } else {
2784 if (sbt == VT_PTR) {
2785 /* cast from pointer to int before we apply
2786 shift operation, which pointers don't support*/
2787 gen_cast_s(VT_INT);
2789 gv_dup();
2790 vpushi(31);
2791 gen_op(TOK_SAR);
2793 /* patch second register */
2794 vtop[-1].r2 = vtop->r;
2795 vpop();
2797 #else
2798 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2799 (dbt & VT_BTYPE) == VT_PTR ||
2800 (dbt & VT_BTYPE) == VT_FUNC) {
2801 if ((sbt & VT_BTYPE) != VT_LLONG &&
2802 (sbt & VT_BTYPE) != VT_PTR &&
2803 (sbt & VT_BTYPE) != VT_FUNC) {
2804 /* need to convert from 32bit to 64bit */
2805 gv(RC_INT);
2806 if (sbt != (VT_INT | VT_UNSIGNED)) {
2807 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2808 gen_cvt_sxtw();
2809 #elif defined(TCC_TARGET_X86_64)
2810 int r = gv(RC_INT);
2811 /* x86_64 specific: movslq */
2812 o(0x6348);
2813 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2814 #else
2815 #error
2816 #endif
2819 #endif
2820 } else if (dbt == VT_BOOL) {
2821 /* scalar to bool */
2822 vpushi(0);
2823 gen_op(TOK_NE);
2824 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2825 (dbt & VT_BTYPE) == VT_SHORT) {
2826 if (sbt == VT_PTR) {
2827 vtop->type.t = VT_INT;
2828 tcc_warning("nonportable conversion from pointer to char/short");
2830 force_charshort_cast(dbt);
2831 } else if ((dbt & VT_BTYPE) == VT_INT) {
2832 /* scalar to int */
2833 if ((sbt & VT_BTYPE) == VT_LLONG) {
2834 #if PTR_SIZE == 4
2835 /* from long long: just take low order word */
2836 lexpand();
2837 vpop();
2838 #else
2839 vpushi(0xffffffff);
2840 vtop->type.t |= VT_UNSIGNED;
2841 gen_op('&');
2842 #endif
2844 /* if lvalue and single word type, nothing to do because
2845 the lvalue already contains the real type size (see
2846 VT_LVAL_xxx constants) */
2849 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2850 /* if we are casting between pointer types,
2851 we must update the VT_LVAL_xxx size */
2852 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2853 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2855 vtop->type = *type;
2856 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2859 /* return type size as known at compile time. Put alignment at 'a' */
2860 ST_FUNC int type_size(CType *type, int *a)
2862 Sym *s;
2863 int bt;
2865 bt = type->t & VT_BTYPE;
2866 if (bt == VT_STRUCT) {
2867 /* struct/union */
2868 s = type->ref;
2869 *a = s->r;
2870 return s->c;
2871 } else if (bt == VT_PTR) {
2872 if (type->t & VT_ARRAY) {
2873 int ts;
2875 s = type->ref;
2876 ts = type_size(&s->type, a);
2878 if (ts < 0 && s->c < 0)
2879 ts = -ts;
2881 return ts * s->c;
2882 } else {
2883 *a = PTR_SIZE;
2884 return PTR_SIZE;
2886 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2887 return -1; /* incomplete enum */
2888 } else if (bt == VT_LDOUBLE) {
2889 *a = LDOUBLE_ALIGN;
2890 return LDOUBLE_SIZE;
2891 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2892 #ifdef TCC_TARGET_I386
2893 #ifdef TCC_TARGET_PE
2894 *a = 8;
2895 #else
2896 *a = 4;
2897 #endif
2898 #elif defined(TCC_TARGET_ARM)
2899 #ifdef TCC_ARM_EABI
2900 *a = 8;
2901 #else
2902 *a = 4;
2903 #endif
2904 #else
2905 *a = 8;
2906 #endif
2907 return 8;
2908 } else if (bt == VT_INT || bt == VT_FLOAT) {
2909 *a = 4;
2910 return 4;
2911 } else if (bt == VT_SHORT) {
2912 *a = 2;
2913 return 2;
2914 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2915 *a = 8;
2916 return 16;
2917 } else {
2918 /* char, void, function, _Bool */
2919 *a = 1;
2920 return 1;
2924 /* push type size as known at runtime time on top of value stack. Put
2925 alignment at 'a' */
2926 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2928 if (type->t & VT_VLA) {
2929 type_size(&type->ref->type, a);
2930 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2931 } else {
2932 vpushi(type_size(type, a));
2936 /* return the pointed type of t */
2937 static inline CType *pointed_type(CType *type)
2939 return &type->ref->type;
2942 /* modify type so that its it is a pointer to type. */
2943 ST_FUNC void mk_pointer(CType *type)
2945 Sym *s;
2946 s = sym_push(SYM_FIELD, type, 0, -1);
2947 type->t = VT_PTR | (type->t & VT_STORAGE);
2948 type->ref = s;
2951 /* compare function types. OLD functions match any new functions */
2952 static int is_compatible_func(CType *type1, CType *type2)
2954 Sym *s1, *s2;
2956 s1 = type1->ref;
2957 s2 = type2->ref;
2958 if (s1->f.func_call != s2->f.func_call)
2959 return 0;
2960 if (s1->f.func_type != s2->f.func_type
2961 && s1->f.func_type != FUNC_OLD
2962 && s2->f.func_type != FUNC_OLD)
2963 return 0;
2964 /* we should check the function return type for FUNC_OLD too
2965 but that causes problems with the internally used support
2966 functions such as TOK_memmove */
2967 if (s1->f.func_type == FUNC_OLD && !s1->next)
2968 return 1;
2969 if (s2->f.func_type == FUNC_OLD && !s2->next)
2970 return 1;
2971 for (;;) {
2972 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2973 return 0;
2974 s1 = s1->next;
2975 s2 = s2->next;
2976 if (!s1)
2977 return !s2;
2978 if (!s2)
2979 return 0;
2983 /* return true if type1 and type2 are the same. If unqualified is
2984 true, qualifiers on the types are ignored.
2986 static int compare_types(CType *type1, CType *type2, int unqualified)
2988 int bt1, t1, t2;
2990 t1 = type1->t & VT_TYPE;
2991 t2 = type2->t & VT_TYPE;
2992 if (unqualified) {
2993 /* strip qualifiers before comparing */
2994 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2995 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2998 /* Default Vs explicit signedness only matters for char */
2999 if ((t1 & VT_BTYPE) != VT_BYTE) {
3000 t1 &= ~VT_DEFSIGN;
3001 t2 &= ~VT_DEFSIGN;
3003 /* XXX: bitfields ? */
3004 if (t1 != t2)
3005 return 0;
3007 if ((t1 & VT_ARRAY)
3008 && !(type1->ref->c < 0
3009 || type2->ref->c < 0
3010 || type1->ref->c == type2->ref->c))
3011 return 0;
3013 /* test more complicated cases */
3014 bt1 = t1 & VT_BTYPE;
3015 if (bt1 == VT_PTR) {
3016 type1 = pointed_type(type1);
3017 type2 = pointed_type(type2);
3018 return is_compatible_types(type1, type2);
3019 } else if (bt1 == VT_STRUCT) {
3020 return (type1->ref == type2->ref);
3021 } else if (bt1 == VT_FUNC) {
3022 return is_compatible_func(type1, type2);
3023 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3024 return type1->ref == type2->ref;
3025 } else {
3026 return 1;
3030 /* return true if type1 and type2 are exactly the same (including
3031 qualifiers).
3033 static int is_compatible_types(CType *type1, CType *type2)
3035 return compare_types(type1,type2,0);
3038 /* return true if type1 and type2 are the same (ignoring qualifiers).
3040 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3042 return compare_types(type1,type2,1);
3045 /* print a type. If 'varstr' is not NULL, then the variable is also
3046 printed in the type */
3047 /* XXX: union */
3048 /* XXX: add array and function pointers */
3049 static void type_to_str(char *buf, int buf_size,
3050 CType *type, const char *varstr)
3052 int bt, v, t;
3053 Sym *s, *sa;
3054 char buf1[256];
3055 const char *tstr;
3057 t = type->t;
3058 bt = t & VT_BTYPE;
3059 buf[0] = '\0';
3061 if (t & VT_EXTERN)
3062 pstrcat(buf, buf_size, "extern ");
3063 if (t & VT_STATIC)
3064 pstrcat(buf, buf_size, "static ");
3065 if (t & VT_TYPEDEF)
3066 pstrcat(buf, buf_size, "typedef ");
3067 if (t & VT_INLINE)
3068 pstrcat(buf, buf_size, "inline ");
3069 if (t & VT_VOLATILE)
3070 pstrcat(buf, buf_size, "volatile ");
3071 if (t & VT_CONSTANT)
3072 pstrcat(buf, buf_size, "const ");
3074 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3075 || ((t & VT_UNSIGNED)
3076 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3077 && !IS_ENUM(t)
3079 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3081 buf_size -= strlen(buf);
3082 buf += strlen(buf);
3084 switch(bt) {
3085 case VT_VOID:
3086 tstr = "void";
3087 goto add_tstr;
3088 case VT_BOOL:
3089 tstr = "_Bool";
3090 goto add_tstr;
3091 case VT_BYTE:
3092 tstr = "char";
3093 goto add_tstr;
3094 case VT_SHORT:
3095 tstr = "short";
3096 goto add_tstr;
3097 case VT_INT:
3098 tstr = "int";
3099 goto maybe_long;
3100 case VT_LLONG:
3101 tstr = "long long";
3102 maybe_long:
3103 if (t & VT_LONG)
3104 tstr = "long";
3105 if (!IS_ENUM(t))
3106 goto add_tstr;
3107 tstr = "enum ";
3108 goto tstruct;
3109 case VT_FLOAT:
3110 tstr = "float";
3111 goto add_tstr;
3112 case VT_DOUBLE:
3113 tstr = "double";
3114 goto add_tstr;
3115 case VT_LDOUBLE:
3116 tstr = "long double";
3117 add_tstr:
3118 pstrcat(buf, buf_size, tstr);
3119 break;
3120 case VT_STRUCT:
3121 tstr = "struct ";
3122 if (IS_UNION(t))
3123 tstr = "union ";
3124 tstruct:
3125 pstrcat(buf, buf_size, tstr);
3126 v = type->ref->v & ~SYM_STRUCT;
3127 if (v >= SYM_FIRST_ANOM)
3128 pstrcat(buf, buf_size, "<anonymous>");
3129 else
3130 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3131 break;
3132 case VT_FUNC:
3133 s = type->ref;
3134 buf1[0]=0;
3135 if (varstr && '*' == *varstr) {
3136 pstrcat(buf1, sizeof(buf1), "(");
3137 pstrcat(buf1, sizeof(buf1), varstr);
3138 pstrcat(buf1, sizeof(buf1), ")");
3140 pstrcat(buf1, buf_size, "(");
3141 sa = s->next;
3142 while (sa != NULL) {
3143 char buf2[256];
3144 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3145 pstrcat(buf1, sizeof(buf1), buf2);
3146 sa = sa->next;
3147 if (sa)
3148 pstrcat(buf1, sizeof(buf1), ", ");
3150 if (s->f.func_type == FUNC_ELLIPSIS)
3151 pstrcat(buf1, sizeof(buf1), ", ...");
3152 pstrcat(buf1, sizeof(buf1), ")");
3153 type_to_str(buf, buf_size, &s->type, buf1);
3154 goto no_var;
3155 case VT_PTR:
3156 s = type->ref;
3157 if (t & VT_ARRAY) {
3158 if (varstr && '*' == *varstr)
3159 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3160 else
3161 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3162 type_to_str(buf, buf_size, &s->type, buf1);
3163 goto no_var;
3165 pstrcpy(buf1, sizeof(buf1), "*");
3166 if (t & VT_CONSTANT)
3167 pstrcat(buf1, buf_size, "const ");
3168 if (t & VT_VOLATILE)
3169 pstrcat(buf1, buf_size, "volatile ");
3170 if (varstr)
3171 pstrcat(buf1, sizeof(buf1), varstr);
3172 type_to_str(buf, buf_size, &s->type, buf1);
3173 goto no_var;
3175 if (varstr) {
3176 pstrcat(buf, buf_size, " ");
3177 pstrcat(buf, buf_size, varstr);
3179 no_var: ;
3182 /* verify type compatibility to store vtop in 'dt' type, and generate
3183 casts if needed. */
3184 static void gen_assign_cast(CType *dt)
3186 CType *st, *type1, *type2;
3187 char buf1[256], buf2[256];
3188 int dbt, sbt, qualwarn, lvl;
3190 st = &vtop->type; /* source type */
3191 dbt = dt->t & VT_BTYPE;
3192 sbt = st->t & VT_BTYPE;
3193 if (sbt == VT_VOID || dbt == VT_VOID) {
3194 if (sbt == VT_VOID && dbt == VT_VOID)
3195 ; /* It is Ok if both are void */
3196 else
3197 tcc_error("cannot cast from/to void");
3199 if (dt->t & VT_CONSTANT)
3200 tcc_warning("assignment of read-only location");
3201 switch(dbt) {
3202 case VT_PTR:
3203 /* special cases for pointers */
3204 /* '0' can also be a pointer */
3205 if (is_null_pointer(vtop))
3206 break;
3207 /* accept implicit pointer to integer cast with warning */
3208 if (is_integer_btype(sbt)) {
3209 tcc_warning("assignment makes pointer from integer without a cast");
3210 break;
3212 type1 = pointed_type(dt);
3213 if (sbt == VT_PTR)
3214 type2 = pointed_type(st);
3215 else if (sbt == VT_FUNC)
3216 type2 = st; /* a function is implicitly a function pointer */
3217 else
3218 goto error;
3219 if (is_compatible_types(type1, type2))
3220 break;
3221 for (qualwarn = lvl = 0;; ++lvl) {
3222 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3223 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3224 qualwarn = 1;
3225 dbt = type1->t & (VT_BTYPE|VT_LONG);
3226 sbt = type2->t & (VT_BTYPE|VT_LONG);
3227 if (dbt != VT_PTR || sbt != VT_PTR)
3228 break;
3229 type1 = pointed_type(type1);
3230 type2 = pointed_type(type2);
3232 if (!is_compatible_unqualified_types(type1, type2)) {
3233 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3234 /* void * can match anything */
3235 } else if (dbt == sbt
3236 && is_integer_btype(sbt & VT_BTYPE)
3237 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3238 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3239 /* Like GCC don't warn by default for merely changes
3240 in pointer target signedness. Do warn for different
3241 base types, though, in particular for unsigned enums
3242 and signed int targets. */
3243 } else {
3244 tcc_warning("assignment from incompatible pointer type");
3245 break;
3248 if (qualwarn)
3249 tcc_warning("assignment discards qualifiers from pointer target type");
3250 break;
3251 case VT_BYTE:
3252 case VT_SHORT:
3253 case VT_INT:
3254 case VT_LLONG:
3255 if (sbt == VT_PTR || sbt == VT_FUNC) {
3256 tcc_warning("assignment makes integer from pointer without a cast");
3257 } else if (sbt == VT_STRUCT) {
3258 goto case_VT_STRUCT;
3260 /* XXX: more tests */
3261 break;
3262 case VT_STRUCT:
3263 case_VT_STRUCT:
3264 if (!is_compatible_unqualified_types(dt, st)) {
3265 error:
3266 type_to_str(buf1, sizeof(buf1), st, NULL);
3267 type_to_str(buf2, sizeof(buf2), dt, NULL);
3268 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3270 break;
3272 gen_cast(dt);
3275 /* store vtop in lvalue pushed on stack */
3276 ST_FUNC void vstore(void)
3278 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3280 ft = vtop[-1].type.t;
3281 sbt = vtop->type.t & VT_BTYPE;
3282 dbt = ft & VT_BTYPE;
3283 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3284 (sbt == VT_INT && dbt == VT_SHORT))
3285 && !(vtop->type.t & VT_BITFIELD)) {
3286 /* optimize char/short casts */
3287 delayed_cast = VT_MUSTCAST;
3288 vtop->type.t = ft & VT_TYPE;
3289 /* XXX: factorize */
3290 if (ft & VT_CONSTANT)
3291 tcc_warning("assignment of read-only location");
3292 } else {
3293 delayed_cast = 0;
3294 if (!(ft & VT_BITFIELD))
3295 gen_assign_cast(&vtop[-1].type);
3298 if (sbt == VT_STRUCT) {
3299 /* if structure, only generate pointer */
3300 /* structure assignment : generate memcpy */
3301 /* XXX: optimize if small size */
3302 size = type_size(&vtop->type, &align);
3304 /* destination */
3305 vswap();
3306 vtop->type.t = VT_PTR;
3307 gaddrof();
3309 /* address of memcpy() */
3310 #ifdef TCC_ARM_EABI
3311 if(!(align & 7))
3312 vpush_global_sym(&func_old_type, TOK_memcpy8);
3313 else if(!(align & 3))
3314 vpush_global_sym(&func_old_type, TOK_memcpy4);
3315 else
3316 #endif
3317 /* Use memmove, rather than memcpy, as dest and src may be same: */
3318 vpush_global_sym(&func_old_type, TOK_memmove);
3320 vswap();
3321 /* source */
3322 vpushv(vtop - 2);
3323 vtop->type.t = VT_PTR;
3324 gaddrof();
3325 /* type size */
3326 vpushi(size);
3327 gfunc_call(3);
3329 /* leave source on stack */
3330 } else if (ft & VT_BITFIELD) {
3331 /* bitfield store handling */
3333 /* save lvalue as expression result (example: s.b = s.a = n;) */
3334 vdup(), vtop[-1] = vtop[-2];
3336 bit_pos = BIT_POS(ft);
3337 bit_size = BIT_SIZE(ft);
3338 /* remove bit field info to avoid loops */
3339 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3341 if ((ft & VT_BTYPE) == VT_BOOL) {
3342 gen_cast(&vtop[-1].type);
3343 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3346 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3347 if (r == VT_STRUCT) {
3348 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3349 store_packed_bf(bit_pos, bit_size);
3350 } else {
3351 unsigned long long mask = (1ULL << bit_size) - 1;
3352 if ((ft & VT_BTYPE) != VT_BOOL) {
3353 /* mask source */
3354 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3355 vpushll(mask);
3356 else
3357 vpushi((unsigned)mask);
3358 gen_op('&');
3360 /* shift source */
3361 vpushi(bit_pos);
3362 gen_op(TOK_SHL);
3363 vswap();
3364 /* duplicate destination */
3365 vdup();
3366 vrott(3);
3367 /* load destination, mask and or with source */
3368 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3369 vpushll(~(mask << bit_pos));
3370 else
3371 vpushi(~((unsigned)mask << bit_pos));
3372 gen_op('&');
3373 gen_op('|');
3374 /* store result */
3375 vstore();
3376 /* ... and discard */
3377 vpop();
3379 } else if (dbt == VT_VOID) {
3380 --vtop;
3381 } else {
3382 #ifdef CONFIG_TCC_BCHECK
3383 /* bound check case */
3384 if (vtop[-1].r & VT_MUSTBOUND) {
3385 vswap();
3386 gbound();
3387 vswap();
3389 #endif
3390 rc = RC_INT;
3391 if (is_float(ft)) {
3392 rc = RC_FLOAT;
3393 #ifdef TCC_TARGET_X86_64
3394 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3395 rc = RC_ST0;
3396 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3397 rc = RC_FRET;
3399 #endif
3401 r = gv(rc); /* generate value */
3402 /* if lvalue was saved on stack, must read it */
3403 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3404 SValue sv;
3405 t = get_reg(RC_INT);
3406 #if PTR_SIZE == 8
3407 sv.type.t = VT_PTR;
3408 #else
3409 sv.type.t = VT_INT;
3410 #endif
3411 sv.r = VT_LOCAL | VT_LVAL;
3412 sv.c.i = vtop[-1].c.i;
3413 load(t, &sv);
3414 vtop[-1].r = t | VT_LVAL;
3416 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3417 #if PTR_SIZE == 8
3418 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3419 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3420 #else
3421 if ((ft & VT_BTYPE) == VT_LLONG) {
3422 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3423 #endif
3424 vtop[-1].type.t = load_type;
3425 store(r, vtop - 1);
3426 vswap();
3427 /* convert to int to increment easily */
3428 vtop->type.t = addr_type;
3429 gaddrof();
3430 vpushi(load_size);
3431 gen_op('+');
3432 vtop->r |= VT_LVAL;
3433 vswap();
3434 vtop[-1].type.t = load_type;
3435 /* XXX: it works because r2 is spilled last ! */
3436 store(vtop->r2, vtop - 1);
3437 } else {
3438 store(r, vtop - 1);
3441 vswap();
3442 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3443 vtop->r |= delayed_cast;
3447 /* post defines POST/PRE add. c is the token ++ or -- */
3448 ST_FUNC void inc(int post, int c)
3450 test_lvalue();
3451 vdup(); /* save lvalue */
3452 if (post) {
3453 gv_dup(); /* duplicate value */
3454 vrotb(3);
3455 vrotb(3);
3457 /* add constant */
3458 vpushi(c - TOK_MID);
3459 gen_op('+');
3460 vstore(); /* store value */
3461 if (post)
3462 vpop(); /* if post op, return saved value */
3465 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3467 /* read the string */
3468 if (tok != TOK_STR)
3469 expect(msg);
3470 cstr_new(astr);
3471 while (tok == TOK_STR) {
3472 /* XXX: add \0 handling too ? */
3473 cstr_cat(astr, tokc.str.data, -1);
3474 next();
3476 cstr_ccat(astr, '\0');
3479 /* If I is >= 1 and a power of two, returns log2(i)+1.
3480 If I is 0 returns 0. */
3481 static int exact_log2p1(int i)
3483 int ret;
3484 if (!i)
3485 return 0;
3486 for (ret = 1; i >= 1 << 8; ret += 8)
3487 i >>= 8;
3488 if (i >= 1 << 4)
3489 ret += 4, i >>= 4;
3490 if (i >= 1 << 2)
3491 ret += 2, i >>= 2;
3492 if (i >= 1 << 1)
3493 ret++;
3494 return ret;
3497 /* Parse __attribute__((...)) GNUC extension. */
3498 static void parse_attribute(AttributeDef *ad)
3500 int t, n;
3501 CString astr;
3503 redo:
3504 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3505 return;
3506 next();
3507 skip('(');
3508 skip('(');
3509 while (tok != ')') {
3510 if (tok < TOK_IDENT)
3511 expect("attribute name");
3512 t = tok;
3513 next();
3514 switch(t) {
3515 case TOK_CLEANUP1:
3516 case TOK_CLEANUP2:
3518 Sym *s;
3520 skip('(');
3521 s = sym_find(tok);
3522 if (!s) {
3523 tcc_warning("implicit declaration of function '%s'",
3524 get_tok_str(tok, &tokc));
3525 s = external_global_sym(tok, &func_old_type);
3527 ad->cleanup_func = s;
3528 next();
3529 skip(')');
3530 break;
3532 case TOK_SECTION1:
3533 case TOK_SECTION2:
3534 skip('(');
3535 parse_mult_str(&astr, "section name");
3536 ad->section = find_section(tcc_state, (char *)astr.data);
3537 skip(')');
3538 cstr_free(&astr);
3539 break;
3540 case TOK_ALIAS1:
3541 case TOK_ALIAS2:
3542 skip('(');
3543 parse_mult_str(&astr, "alias(\"target\")");
3544 ad->alias_target = /* save string as token, for later */
3545 tok_alloc((char*)astr.data, astr.size-1)->tok;
3546 skip(')');
3547 cstr_free(&astr);
3548 break;
3549 case TOK_VISIBILITY1:
3550 case TOK_VISIBILITY2:
3551 skip('(');
3552 parse_mult_str(&astr,
3553 "visibility(\"default|hidden|internal|protected\")");
3554 if (!strcmp (astr.data, "default"))
3555 ad->a.visibility = STV_DEFAULT;
3556 else if (!strcmp (astr.data, "hidden"))
3557 ad->a.visibility = STV_HIDDEN;
3558 else if (!strcmp (astr.data, "internal"))
3559 ad->a.visibility = STV_INTERNAL;
3560 else if (!strcmp (astr.data, "protected"))
3561 ad->a.visibility = STV_PROTECTED;
3562 else
3563 expect("visibility(\"default|hidden|internal|protected\")");
3564 skip(')');
3565 cstr_free(&astr);
3566 break;
3567 case TOK_ALIGNED1:
3568 case TOK_ALIGNED2:
3569 if (tok == '(') {
3570 next();
3571 n = expr_const();
3572 if (n <= 0 || (n & (n - 1)) != 0)
3573 tcc_error("alignment must be a positive power of two");
3574 skip(')');
3575 } else {
3576 n = MAX_ALIGN;
3578 ad->a.aligned = exact_log2p1(n);
3579 if (n != 1 << (ad->a.aligned - 1))
3580 tcc_error("alignment of %d is larger than implemented", n);
3581 break;
3582 case TOK_PACKED1:
3583 case TOK_PACKED2:
3584 ad->a.packed = 1;
3585 break;
3586 case TOK_WEAK1:
3587 case TOK_WEAK2:
3588 ad->a.weak = 1;
3589 break;
3590 case TOK_UNUSED1:
3591 case TOK_UNUSED2:
3592 /* currently, no need to handle it because tcc does not
3593 track unused objects */
3594 break;
3595 case TOK_NORETURN1:
3596 case TOK_NORETURN2:
3597 ad->f.func_noreturn = 1;
3598 break;
3599 case TOK_CDECL1:
3600 case TOK_CDECL2:
3601 case TOK_CDECL3:
3602 ad->f.func_call = FUNC_CDECL;
3603 break;
3604 case TOK_STDCALL1:
3605 case TOK_STDCALL2:
3606 case TOK_STDCALL3:
3607 ad->f.func_call = FUNC_STDCALL;
3608 break;
3609 #ifdef TCC_TARGET_I386
3610 case TOK_REGPARM1:
3611 case TOK_REGPARM2:
3612 skip('(');
3613 n = expr_const();
3614 if (n > 3)
3615 n = 3;
3616 else if (n < 0)
3617 n = 0;
3618 if (n > 0)
3619 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3620 skip(')');
3621 break;
3622 case TOK_FASTCALL1:
3623 case TOK_FASTCALL2:
3624 case TOK_FASTCALL3:
3625 ad->f.func_call = FUNC_FASTCALLW;
3626 break;
3627 #endif
3628 case TOK_MODE:
3629 skip('(');
3630 switch(tok) {
3631 case TOK_MODE_DI:
3632 ad->attr_mode = VT_LLONG + 1;
3633 break;
3634 case TOK_MODE_QI:
3635 ad->attr_mode = VT_BYTE + 1;
3636 break;
3637 case TOK_MODE_HI:
3638 ad->attr_mode = VT_SHORT + 1;
3639 break;
3640 case TOK_MODE_SI:
3641 case TOK_MODE_word:
3642 ad->attr_mode = VT_INT + 1;
3643 break;
3644 default:
3645 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3646 break;
3648 next();
3649 skip(')');
3650 break;
3651 case TOK_DLLEXPORT:
3652 ad->a.dllexport = 1;
3653 break;
3654 case TOK_NODECORATE:
3655 ad->a.nodecorate = 1;
3656 break;
3657 case TOK_DLLIMPORT:
3658 ad->a.dllimport = 1;
3659 break;
3660 default:
3661 if (tcc_state->warn_unsupported)
3662 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3663 /* skip parameters */
3664 if (tok == '(') {
3665 int parenthesis = 0;
3666 do {
3667 if (tok == '(')
3668 parenthesis++;
3669 else if (tok == ')')
3670 parenthesis--;
3671 next();
3672 } while (parenthesis && tok != -1);
3674 break;
3676 if (tok != ',')
3677 break;
3678 next();
3680 skip(')');
3681 skip(')');
3682 goto redo;
3685 static Sym * find_field (CType *type, int v, int *cumofs)
3687 Sym *s = type->ref;
3688 v |= SYM_FIELD;
3689 while ((s = s->next) != NULL) {
3690 if ((s->v & SYM_FIELD) &&
3691 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3692 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3693 Sym *ret = find_field (&s->type, v, cumofs);
3694 if (ret) {
3695 *cumofs += s->c;
3696 return ret;
3699 if (s->v == v)
3700 break;
3702 return s;
3705 static void struct_layout(CType *type, AttributeDef *ad)
3707 int size, align, maxalign, offset, c, bit_pos, bit_size;
3708 int packed, a, bt, prevbt, prev_bit_size;
3709 int pcc = !tcc_state->ms_bitfields;
3710 int pragma_pack = *tcc_state->pack_stack_ptr;
3711 Sym *f;
3713 maxalign = 1;
3714 offset = 0;
3715 c = 0;
3716 bit_pos = 0;
3717 prevbt = VT_STRUCT; /* make it never match */
3718 prev_bit_size = 0;
3720 //#define BF_DEBUG
3722 for (f = type->ref->next; f; f = f->next) {
3723 if (f->type.t & VT_BITFIELD)
3724 bit_size = BIT_SIZE(f->type.t);
3725 else
3726 bit_size = -1;
3727 size = type_size(&f->type, &align);
3728 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3729 packed = 0;
3731 if (pcc && bit_size == 0) {
3732 /* in pcc mode, packing does not affect zero-width bitfields */
3734 } else {
3735 /* in pcc mode, attribute packed overrides if set. */
3736 if (pcc && (f->a.packed || ad->a.packed))
3737 align = packed = 1;
3739 /* pragma pack overrides align if lesser and packs bitfields always */
3740 if (pragma_pack) {
3741 packed = 1;
3742 if (pragma_pack < align)
3743 align = pragma_pack;
3744 /* in pcc mode pragma pack also overrides individual align */
3745 if (pcc && pragma_pack < a)
3746 a = 0;
3749 /* some individual align was specified */
3750 if (a)
3751 align = a;
3753 if (type->ref->type.t == VT_UNION) {
3754 if (pcc && bit_size >= 0)
3755 size = (bit_size + 7) >> 3;
3756 offset = 0;
3757 if (size > c)
3758 c = size;
3760 } else if (bit_size < 0) {
3761 if (pcc)
3762 c += (bit_pos + 7) >> 3;
3763 c = (c + align - 1) & -align;
3764 offset = c;
3765 if (size > 0)
3766 c += size;
3767 bit_pos = 0;
3768 prevbt = VT_STRUCT;
3769 prev_bit_size = 0;
3771 } else {
3772 /* A bit-field. Layout is more complicated. There are two
3773 options: PCC (GCC) compatible and MS compatible */
3774 if (pcc) {
3775 /* In PCC layout a bit-field is placed adjacent to the
3776 preceding bit-fields, except if:
3777 - it has zero-width
3778 - an individual alignment was given
3779 - it would overflow its base type container and
3780 there is no packing */
3781 if (bit_size == 0) {
3782 new_field:
3783 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3784 bit_pos = 0;
3785 } else if (f->a.aligned) {
3786 goto new_field;
3787 } else if (!packed) {
3788 int a8 = align * 8;
3789 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3790 if (ofs > size / align)
3791 goto new_field;
3794 /* in pcc mode, long long bitfields have type int if they fit */
3795 if (size == 8 && bit_size <= 32)
3796 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3798 while (bit_pos >= align * 8)
3799 c += align, bit_pos -= align * 8;
3800 offset = c;
3802 /* In PCC layout named bit-fields influence the alignment
3803 of the containing struct using the base types alignment,
3804 except for packed fields (which here have correct align). */
3805 if (f->v & SYM_FIRST_ANOM
3806 // && bit_size // ??? gcc on ARM/rpi does that
3808 align = 1;
3810 } else {
3811 bt = f->type.t & VT_BTYPE;
3812 if ((bit_pos + bit_size > size * 8)
3813 || (bit_size > 0) == (bt != prevbt)
3815 c = (c + align - 1) & -align;
3816 offset = c;
3817 bit_pos = 0;
3818 /* In MS bitfield mode a bit-field run always uses
3819 at least as many bits as the underlying type.
3820 To start a new run it's also required that this
3821 or the last bit-field had non-zero width. */
3822 if (bit_size || prev_bit_size)
3823 c += size;
3825 /* In MS layout the records alignment is normally
3826 influenced by the field, except for a zero-width
3827 field at the start of a run (but by further zero-width
3828 fields it is again). */
3829 if (bit_size == 0 && prevbt != bt)
3830 align = 1;
3831 prevbt = bt;
3832 prev_bit_size = bit_size;
3835 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3836 | (bit_pos << VT_STRUCT_SHIFT);
3837 bit_pos += bit_size;
3839 if (align > maxalign)
3840 maxalign = align;
3842 #ifdef BF_DEBUG
3843 printf("set field %s offset %-2d size %-2d align %-2d",
3844 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3845 if (f->type.t & VT_BITFIELD) {
3846 printf(" pos %-2d bits %-2d",
3847 BIT_POS(f->type.t),
3848 BIT_SIZE(f->type.t)
3851 printf("\n");
3852 #endif
3854 f->c = offset;
3855 f->r = 0;
3858 if (pcc)
3859 c += (bit_pos + 7) >> 3;
3861 /* store size and alignment */
3862 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3863 if (a < maxalign)
3864 a = maxalign;
3865 type->ref->r = a;
3866 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3867 /* can happen if individual align for some member was given. In
3868 this case MSVC ignores maxalign when aligning the size */
3869 a = pragma_pack;
3870 if (a < bt)
3871 a = bt;
3873 c = (c + a - 1) & -a;
3874 type->ref->c = c;
3876 #ifdef BF_DEBUG
3877 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3878 #endif
3880 /* check whether we can access bitfields by their type */
3881 for (f = type->ref->next; f; f = f->next) {
3882 int s, px, cx, c0;
3883 CType t;
3885 if (0 == (f->type.t & VT_BITFIELD))
3886 continue;
3887 f->type.ref = f;
3888 f->auxtype = -1;
3889 bit_size = BIT_SIZE(f->type.t);
3890 if (bit_size == 0)
3891 continue;
3892 bit_pos = BIT_POS(f->type.t);
3893 size = type_size(&f->type, &align);
3894 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3895 continue;
3897 /* try to access the field using a different type */
3898 c0 = -1, s = align = 1;
3899 for (;;) {
3900 px = f->c * 8 + bit_pos;
3901 cx = (px >> 3) & -align;
3902 px = px - (cx << 3);
3903 if (c0 == cx)
3904 break;
3905 s = (px + bit_size + 7) >> 3;
3906 if (s > 4) {
3907 t.t = VT_LLONG;
3908 } else if (s > 2) {
3909 t.t = VT_INT;
3910 } else if (s > 1) {
3911 t.t = VT_SHORT;
3912 } else {
3913 t.t = VT_BYTE;
3915 s = type_size(&t, &align);
3916 c0 = cx;
3919 if (px + bit_size <= s * 8 && cx + s <= c) {
3920 /* update offset and bit position */
3921 f->c = cx;
3922 bit_pos = px;
3923 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3924 | (bit_pos << VT_STRUCT_SHIFT);
3925 if (s != size)
3926 f->auxtype = t.t;
3927 #ifdef BF_DEBUG
3928 printf("FIX field %s offset %-2d size %-2d align %-2d "
3929 "pos %-2d bits %-2d\n",
3930 get_tok_str(f->v & ~SYM_FIELD, NULL),
3931 cx, s, align, px, bit_size);
3932 #endif
3933 } else {
3934 /* fall back to load/store single-byte wise */
3935 f->auxtype = VT_STRUCT;
3936 #ifdef BF_DEBUG
3937 printf("FIX field %s : load byte-wise\n",
3938 get_tok_str(f->v & ~SYM_FIELD, NULL));
3939 #endif
3944 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3945 static void struct_decl(CType *type, int u)
3947 int v, c, size, align, flexible;
3948 int bit_size, bsize, bt;
3949 Sym *s, *ss, **ps;
3950 AttributeDef ad, ad1;
3951 CType type1, btype;
3953 memset(&ad, 0, sizeof ad);
3954 next();
3955 parse_attribute(&ad);
3956 if (tok != '{') {
3957 v = tok;
3958 next();
3959 /* struct already defined ? return it */
3960 if (v < TOK_IDENT)
3961 expect("struct/union/enum name");
3962 s = struct_find(v);
3963 if (s && (s->sym_scope == local_scope || tok != '{')) {
3964 if (u == s->type.t)
3965 goto do_decl;
3966 if (u == VT_ENUM && IS_ENUM(s->type.t))
3967 goto do_decl;
3968 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3970 } else {
3971 v = anon_sym++;
3973 /* Record the original enum/struct/union token. */
3974 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3975 type1.ref = NULL;
3976 /* we put an undefined size for struct/union */
3977 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3978 s->r = 0; /* default alignment is zero as gcc */
3979 do_decl:
3980 type->t = s->type.t;
3981 type->ref = s;
3983 if (tok == '{') {
3984 next();
3985 if (s->c != -1)
3986 tcc_error("struct/union/enum already defined");
3987 s->c = -2;
3988 /* cannot be empty */
3989 /* non empty enums are not allowed */
3990 ps = &s->next;
3991 if (u == VT_ENUM) {
3992 long long ll = 0, pl = 0, nl = 0;
3993 CType t;
3994 t.ref = s;
3995 /* enum symbols have static storage */
3996 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3997 for(;;) {
3998 v = tok;
3999 if (v < TOK_UIDENT)
4000 expect("identifier");
4001 ss = sym_find(v);
4002 if (ss && !local_stack)
4003 tcc_error("redefinition of enumerator '%s'",
4004 get_tok_str(v, NULL));
4005 next();
4006 if (tok == '=') {
4007 next();
4008 ll = expr_const64();
4010 ss = sym_push(v, &t, VT_CONST, 0);
4011 ss->enum_val = ll;
4012 *ps = ss, ps = &ss->next;
4013 if (ll < nl)
4014 nl = ll;
4015 if (ll > pl)
4016 pl = ll;
4017 if (tok != ',')
4018 break;
4019 next();
4020 ll++;
4021 /* NOTE: we accept a trailing comma */
4022 if (tok == '}')
4023 break;
4025 skip('}');
4026 /* set integral type of the enum */
4027 t.t = VT_INT;
4028 if (nl >= 0) {
4029 if (pl != (unsigned)pl)
4030 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4031 t.t |= VT_UNSIGNED;
4032 } else if (pl != (int)pl || nl != (int)nl)
4033 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4034 s->type.t = type->t = t.t | VT_ENUM;
4035 s->c = 0;
4036 /* set type for enum members */
4037 for (ss = s->next; ss; ss = ss->next) {
4038 ll = ss->enum_val;
4039 if (ll == (int)ll) /* default is int if it fits */
4040 continue;
4041 if (t.t & VT_UNSIGNED) {
4042 ss->type.t |= VT_UNSIGNED;
4043 if (ll == (unsigned)ll)
4044 continue;
4046 ss->type.t = (ss->type.t & ~VT_BTYPE)
4047 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4049 } else {
4050 c = 0;
4051 flexible = 0;
4052 while (tok != '}') {
4053 if (!parse_btype(&btype, &ad1)) {
4054 skip(';');
4055 continue;
4057 while (1) {
4058 if (flexible)
4059 tcc_error("flexible array member '%s' not at the end of struct",
4060 get_tok_str(v, NULL));
4061 bit_size = -1;
4062 v = 0;
4063 type1 = btype;
4064 if (tok != ':') {
4065 if (tok != ';')
4066 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4067 if (v == 0) {
4068 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4069 expect("identifier");
4070 else {
4071 int v = btype.ref->v;
4072 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4073 if (tcc_state->ms_extensions == 0)
4074 expect("identifier");
4078 if (type_size(&type1, &align) < 0) {
4079 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4080 flexible = 1;
4081 else
4082 tcc_error("field '%s' has incomplete type",
4083 get_tok_str(v, NULL));
4085 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4086 (type1.t & VT_BTYPE) == VT_VOID ||
4087 (type1.t & VT_STORAGE))
4088 tcc_error("invalid type for '%s'",
4089 get_tok_str(v, NULL));
4091 if (tok == ':') {
4092 next();
4093 bit_size = expr_const();
4094 /* XXX: handle v = 0 case for messages */
4095 if (bit_size < 0)
4096 tcc_error("negative width in bit-field '%s'",
4097 get_tok_str(v, NULL));
4098 if (v && bit_size == 0)
4099 tcc_error("zero width for bit-field '%s'",
4100 get_tok_str(v, NULL));
4101 parse_attribute(&ad1);
4103 size = type_size(&type1, &align);
4104 if (bit_size >= 0) {
4105 bt = type1.t & VT_BTYPE;
4106 if (bt != VT_INT &&
4107 bt != VT_BYTE &&
4108 bt != VT_SHORT &&
4109 bt != VT_BOOL &&
4110 bt != VT_LLONG)
4111 tcc_error("bitfields must have scalar type");
4112 bsize = size * 8;
4113 if (bit_size > bsize) {
4114 tcc_error("width of '%s' exceeds its type",
4115 get_tok_str(v, NULL));
4116 } else if (bit_size == bsize
4117 && !ad.a.packed && !ad1.a.packed) {
4118 /* no need for bit fields */
4120 } else if (bit_size == 64) {
4121 tcc_error("field width 64 not implemented");
4122 } else {
4123 type1.t = (type1.t & ~VT_STRUCT_MASK)
4124 | VT_BITFIELD
4125 | (bit_size << (VT_STRUCT_SHIFT + 6));
4128 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4129 /* Remember we've seen a real field to check
4130 for placement of flexible array member. */
4131 c = 1;
4133 /* If member is a struct or bit-field, enforce
4134 placing into the struct (as anonymous). */
4135 if (v == 0 &&
4136 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4137 bit_size >= 0)) {
4138 v = anon_sym++;
4140 if (v) {
4141 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4142 ss->a = ad1.a;
4143 *ps = ss;
4144 ps = &ss->next;
4146 if (tok == ';' || tok == TOK_EOF)
4147 break;
4148 skip(',');
4150 skip(';');
4152 skip('}');
4153 parse_attribute(&ad);
4154 struct_layout(type, &ad);
4159 static void sym_to_attr(AttributeDef *ad, Sym *s)
4161 merge_symattr(&ad->a, &s->a);
4162 merge_funcattr(&ad->f, &s->f);
4165 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4166 are added to the element type, copied because it could be a typedef. */
4167 static void parse_btype_qualify(CType *type, int qualifiers)
4169 while (type->t & VT_ARRAY) {
4170 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4171 type = &type->ref->type;
4173 type->t |= qualifiers;
4176 /* return 0 if no type declaration. otherwise, return the basic type
4177 and skip it.
4179 static int parse_btype(CType *type, AttributeDef *ad)
4181 int t, u, bt, st, type_found, typespec_found, g, n;
4182 Sym *s;
4183 CType type1;
4185 memset(ad, 0, sizeof(AttributeDef));
4186 type_found = 0;
4187 typespec_found = 0;
4188 t = VT_INT;
4189 bt = st = -1;
4190 type->ref = NULL;
4192 while(1) {
4193 switch(tok) {
4194 case TOK_EXTENSION:
4195 /* currently, we really ignore extension */
4196 next();
4197 continue;
4199 /* basic types */
4200 case TOK_CHAR:
4201 u = VT_BYTE;
4202 basic_type:
4203 next();
4204 basic_type1:
4205 if (u == VT_SHORT || u == VT_LONG) {
4206 if (st != -1 || (bt != -1 && bt != VT_INT))
4207 tmbt: tcc_error("too many basic types");
4208 st = u;
4209 } else {
4210 if (bt != -1 || (st != -1 && u != VT_INT))
4211 goto tmbt;
4212 bt = u;
4214 if (u != VT_INT)
4215 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4216 typespec_found = 1;
4217 break;
4218 case TOK_VOID:
4219 u = VT_VOID;
4220 goto basic_type;
4221 case TOK_SHORT:
4222 u = VT_SHORT;
4223 goto basic_type;
4224 case TOK_INT:
4225 u = VT_INT;
4226 goto basic_type;
4227 case TOK_ALIGNAS:
4228 { int n;
4229 AttributeDef ad1;
4230 next();
4231 skip('(');
4232 memset(&ad1, 0, sizeof(AttributeDef));
4233 if (parse_btype(&type1, &ad1)) {
4234 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4235 if (ad1.a.aligned)
4236 n = 1 << (ad1.a.aligned - 1);
4237 else
4238 type_size(&type1, &n);
4239 } else {
4240 n = expr_const();
4241 if (n <= 0 || (n & (n - 1)) != 0)
4242 tcc_error("alignment must be a positive power of two");
4244 skip(')');
4245 ad->a.aligned = exact_log2p1(n);
4247 continue;
4248 case TOK_LONG:
4249 if ((t & VT_BTYPE) == VT_DOUBLE) {
4250 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4251 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4252 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4253 } else {
4254 u = VT_LONG;
4255 goto basic_type;
4257 next();
4258 break;
4259 #ifdef TCC_TARGET_ARM64
4260 case TOK_UINT128:
4261 /* GCC's __uint128_t appears in some Linux header files. Make it a
4262 synonym for long double to get the size and alignment right. */
4263 u = VT_LDOUBLE;
4264 goto basic_type;
4265 #endif
4266 case TOK_BOOL:
4267 u = VT_BOOL;
4268 goto basic_type;
4269 case TOK_FLOAT:
4270 u = VT_FLOAT;
4271 goto basic_type;
4272 case TOK_DOUBLE:
4273 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4274 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4275 } else {
4276 u = VT_DOUBLE;
4277 goto basic_type;
4279 next();
4280 break;
4281 case TOK_ENUM:
4282 struct_decl(&type1, VT_ENUM);
4283 basic_type2:
4284 u = type1.t;
4285 type->ref = type1.ref;
4286 goto basic_type1;
4287 case TOK_STRUCT:
4288 struct_decl(&type1, VT_STRUCT);
4289 goto basic_type2;
4290 case TOK_UNION:
4291 struct_decl(&type1, VT_UNION);
4292 goto basic_type2;
4294 /* type modifiers */
4295 case TOK_CONST1:
4296 case TOK_CONST2:
4297 case TOK_CONST3:
4298 type->t = t;
4299 parse_btype_qualify(type, VT_CONSTANT);
4300 t = type->t;
4301 next();
4302 break;
4303 case TOK_VOLATILE1:
4304 case TOK_VOLATILE2:
4305 case TOK_VOLATILE3:
4306 type->t = t;
4307 parse_btype_qualify(type, VT_VOLATILE);
4308 t = type->t;
4309 next();
4310 break;
4311 case TOK_SIGNED1:
4312 case TOK_SIGNED2:
4313 case TOK_SIGNED3:
4314 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4315 tcc_error("signed and unsigned modifier");
4316 t |= VT_DEFSIGN;
4317 next();
4318 typespec_found = 1;
4319 break;
4320 case TOK_REGISTER:
4321 case TOK_AUTO:
4322 case TOK_RESTRICT1:
4323 case TOK_RESTRICT2:
4324 case TOK_RESTRICT3:
4325 next();
4326 break;
4327 case TOK_UNSIGNED:
4328 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4329 tcc_error("signed and unsigned modifier");
4330 t |= VT_DEFSIGN | VT_UNSIGNED;
4331 next();
4332 typespec_found = 1;
4333 break;
4335 /* storage */
4336 case TOK_EXTERN:
4337 g = VT_EXTERN;
4338 goto storage;
4339 case TOK_STATIC:
4340 g = VT_STATIC;
4341 goto storage;
4342 case TOK_TYPEDEF:
4343 g = VT_TYPEDEF;
4344 goto storage;
4345 storage:
4346 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4347 tcc_error("multiple storage classes");
4348 t |= g;
4349 next();
4350 break;
4351 case TOK_INLINE1:
4352 case TOK_INLINE2:
4353 case TOK_INLINE3:
4354 t |= VT_INLINE;
4355 next();
4356 break;
4357 case TOK_NORETURN3:
4358 /* currently, no need to handle it because tcc does not
4359 track unused objects */
4360 next();
4361 break;
4362 /* GNUC attribute */
4363 case TOK_ATTRIBUTE1:
4364 case TOK_ATTRIBUTE2:
4365 parse_attribute(ad);
4366 if (ad->attr_mode) {
4367 u = ad->attr_mode -1;
4368 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4370 continue;
4371 /* GNUC typeof */
4372 case TOK_TYPEOF1:
4373 case TOK_TYPEOF2:
4374 case TOK_TYPEOF3:
4375 next();
4376 parse_expr_type(&type1);
4377 /* remove all storage modifiers except typedef */
4378 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4379 if (type1.ref)
4380 sym_to_attr(ad, type1.ref);
4381 goto basic_type2;
4382 default:
4383 if (typespec_found)
4384 goto the_end;
4385 s = sym_find(tok);
4386 if (!s || !(s->type.t & VT_TYPEDEF))
4387 goto the_end;
4389 n = tok, next();
4390 if (tok == ':' && !in_generic) {
4391 /* ignore if it's a label */
4392 unget_tok(n);
4393 goto the_end;
4396 t &= ~(VT_BTYPE|VT_LONG);
4397 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4398 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4399 type->ref = s->type.ref;
4400 if (t)
4401 parse_btype_qualify(type, t);
4402 t = type->t;
4403 /* get attributes from typedef */
4404 sym_to_attr(ad, s);
4405 typespec_found = 1;
4406 st = bt = -2;
4407 break;
4409 type_found = 1;
4411 the_end:
4412 if (tcc_state->char_is_unsigned) {
4413 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4414 t |= VT_UNSIGNED;
4416 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4417 bt = t & (VT_BTYPE|VT_LONG);
4418 if (bt == VT_LONG)
4419 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4420 #ifdef TCC_TARGET_PE
4421 if (bt == VT_LDOUBLE)
4422 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4423 #endif
4424 type->t = t;
4425 return type_found;
4428 /* convert a function parameter type (array to pointer and function to
4429 function pointer) */
4430 static inline void convert_parameter_type(CType *pt)
4432 /* remove const and volatile qualifiers (XXX: const could be used
4433 to indicate a const function parameter */
4434 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4435 /* array must be transformed to pointer according to ANSI C */
4436 pt->t &= ~VT_ARRAY;
4437 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4438 mk_pointer(pt);
4442 ST_FUNC void parse_asm_str(CString *astr)
4444 skip('(');
4445 parse_mult_str(astr, "string constant");
4448 /* Parse an asm label and return the token */
4449 static int asm_label_instr(void)
4451 int v;
4452 CString astr;
4454 next();
4455 parse_asm_str(&astr);
4456 skip(')');
4457 #ifdef ASM_DEBUG
4458 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4459 #endif
4460 v = tok_alloc(astr.data, astr.size - 1)->tok;
4461 cstr_free(&astr);
4462 return v;
4465 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4467 int n, l, t1, arg_size, align, unused_align;
4468 Sym **plast, *s, *first;
4469 AttributeDef ad1;
4470 CType pt;
4472 if (tok == '(') {
4473 /* function type, or recursive declarator (return if so) */
4474 next();
4475 if (td && !(td & TYPE_ABSTRACT))
4476 return 0;
4477 if (tok == ')')
4478 l = 0;
4479 else if (parse_btype(&pt, &ad1))
4480 l = FUNC_NEW;
4481 else if (td) {
4482 merge_attr (ad, &ad1);
4483 return 0;
4484 } else
4485 l = FUNC_OLD;
4486 first = NULL;
4487 plast = &first;
4488 arg_size = 0;
4489 if (l) {
4490 for(;;) {
4491 /* read param name and compute offset */
4492 if (l != FUNC_OLD) {
4493 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4494 break;
4495 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4496 if ((pt.t & VT_BTYPE) == VT_VOID)
4497 tcc_error("parameter declared as void");
4498 } else {
4499 n = tok;
4500 if (n < TOK_UIDENT)
4501 expect("identifier");
4502 pt.t = VT_VOID; /* invalid type */
4503 pt.ref = NULL;
4504 next();
4506 convert_parameter_type(&pt);
4507 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4508 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4509 *plast = s;
4510 plast = &s->next;
4511 if (tok == ')')
4512 break;
4513 skip(',');
4514 if (l == FUNC_NEW && tok == TOK_DOTS) {
4515 l = FUNC_ELLIPSIS;
4516 next();
4517 break;
4519 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4520 tcc_error("invalid type");
4522 } else
4523 /* if no parameters, then old type prototype */
4524 l = FUNC_OLD;
4525 skip(')');
4526 /* NOTE: const is ignored in returned type as it has a special
4527 meaning in gcc / C++ */
4528 type->t &= ~VT_CONSTANT;
4529 /* some ancient pre-K&R C allows a function to return an array
4530 and the array brackets to be put after the arguments, such
4531 that "int c()[]" means something like "int[] c()" */
4532 if (tok == '[') {
4533 next();
4534 skip(']'); /* only handle simple "[]" */
4535 mk_pointer(type);
4537 /* we push a anonymous symbol which will contain the function prototype */
4538 ad->f.func_args = arg_size;
4539 ad->f.func_type = l;
4540 s = sym_push(SYM_FIELD, type, 0, 0);
4541 s->a = ad->a;
4542 s->f = ad->f;
4543 s->next = first;
4544 type->t = VT_FUNC;
4545 type->ref = s;
4546 } else if (tok == '[') {
4547 int saved_nocode_wanted = nocode_wanted;
4548 /* array definition */
4549 next();
4550 while (1) {
4551 /* XXX The optional type-quals and static should only be accepted
4552 in parameter decls. The '*' as well, and then even only
4553 in prototypes (not function defs). */
4554 switch (tok) {
4555 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4556 case TOK_CONST1:
4557 case TOK_VOLATILE1:
4558 case TOK_STATIC:
4559 case '*':
4560 next();
4561 continue;
4562 default:
4563 break;
4565 break;
4567 n = -1;
4568 t1 = 0;
4569 if (tok != ']') {
4570 if (!local_stack || (storage & VT_STATIC))
4571 vpushi(expr_const());
4572 else {
4573 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4574 length must always be evaluated, even under nocode_wanted,
4575 so that its size slot is initialized (e.g. under sizeof
4576 or typeof). */
4577 nocode_wanted = 0;
4578 gexpr();
4580 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4581 n = vtop->c.i;
4582 if (n < 0)
4583 tcc_error("invalid array size");
4584 } else {
4585 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4586 tcc_error("size of variable length array should be an integer");
4587 n = 0;
4588 t1 = VT_VLA;
4591 skip(']');
4592 /* parse next post type */
4593 post_type(type, ad, storage, 0);
4595 if ((type->t & VT_BTYPE) == VT_FUNC)
4596 tcc_error("declaration of an array of functions");
4597 if ((type->t & VT_BTYPE) == VT_VOID
4598 || type_size(type, &unused_align) < 0)
4599 tcc_error("declaration of an array of incomplete type elements");
4601 t1 |= type->t & VT_VLA;
4603 if (t1 & VT_VLA) {
4604 if (n < 0)
4605 tcc_error("need explicit inner array size in VLAs");
4606 loc -= type_size(&int_type, &align);
4607 loc &= -align;
4608 n = loc;
4610 vla_runtime_type_size(type, &align);
4611 gen_op('*');
4612 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4613 vswap();
4614 vstore();
4616 if (n != -1)
4617 vpop();
4618 nocode_wanted = saved_nocode_wanted;
4620 /* we push an anonymous symbol which will contain the array
4621 element type */
4622 s = sym_push(SYM_FIELD, type, 0, n);
4623 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4624 type->ref = s;
4626 return 1;
4629 /* Parse a type declarator (except basic type), and return the type
4630 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4631 expected. 'type' should contain the basic type. 'ad' is the
4632 attribute definition of the basic type. It can be modified by
4633 type_decl(). If this (possibly abstract) declarator is a pointer chain
4634 it returns the innermost pointed to type (equals *type, but is a different
4635 pointer), otherwise returns type itself, that's used for recursive calls. */
4636 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4638 CType *post, *ret;
4639 int qualifiers, storage;
4641 /* recursive type, remove storage bits first, apply them later again */
4642 storage = type->t & VT_STORAGE;
4643 type->t &= ~VT_STORAGE;
4644 post = ret = type;
4646 while (tok == '*') {
4647 qualifiers = 0;
4648 redo:
4649 next();
4650 switch(tok) {
4651 case TOK_CONST1:
4652 case TOK_CONST2:
4653 case TOK_CONST3:
4654 qualifiers |= VT_CONSTANT;
4655 goto redo;
4656 case TOK_VOLATILE1:
4657 case TOK_VOLATILE2:
4658 case TOK_VOLATILE3:
4659 qualifiers |= VT_VOLATILE;
4660 goto redo;
4661 case TOK_RESTRICT1:
4662 case TOK_RESTRICT2:
4663 case TOK_RESTRICT3:
4664 goto redo;
4665 /* XXX: clarify attribute handling */
4666 case TOK_ATTRIBUTE1:
4667 case TOK_ATTRIBUTE2:
4668 parse_attribute(ad);
4669 break;
4671 mk_pointer(type);
4672 type->t |= qualifiers;
4673 if (ret == type)
4674 /* innermost pointed to type is the one for the first derivation */
4675 ret = pointed_type(type);
4678 if (tok == '(') {
4679 /* This is possibly a parameter type list for abstract declarators
4680 ('int ()'), use post_type for testing this. */
4681 if (!post_type(type, ad, 0, td)) {
4682 /* It's not, so it's a nested declarator, and the post operations
4683 apply to the innermost pointed to type (if any). */
4684 /* XXX: this is not correct to modify 'ad' at this point, but
4685 the syntax is not clear */
4686 parse_attribute(ad);
4687 post = type_decl(type, ad, v, td);
4688 skip(')');
4689 } else
4690 goto abstract;
4691 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4692 /* type identifier */
4693 *v = tok;
4694 next();
4695 } else {
4696 abstract:
4697 if (!(td & TYPE_ABSTRACT))
4698 expect("identifier");
4699 *v = 0;
4701 post_type(post, ad, storage, 0);
4702 parse_attribute(ad);
4703 type->t |= storage;
4704 return ret;
4707 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4708 ST_FUNC int lvalue_type(int t)
4710 int bt, r;
4711 r = VT_LVAL;
4712 bt = t & VT_BTYPE;
4713 if (bt == VT_BYTE || bt == VT_BOOL)
4714 r |= VT_LVAL_BYTE;
4715 else if (bt == VT_SHORT)
4716 r |= VT_LVAL_SHORT;
4717 else
4718 return r;
4719 if (t & VT_UNSIGNED)
4720 r |= VT_LVAL_UNSIGNED;
4721 return r;
4724 /* indirection with full error checking and bound check */
4725 ST_FUNC void indir(void)
4727 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4728 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4729 return;
4730 expect("pointer");
4732 if (vtop->r & VT_LVAL)
4733 gv(RC_INT);
4734 vtop->type = *pointed_type(&vtop->type);
4735 /* Arrays and functions are never lvalues */
4736 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4737 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4738 vtop->r |= lvalue_type(vtop->type.t);
4739 /* if bound checking, the referenced pointer must be checked */
4740 #ifdef CONFIG_TCC_BCHECK
4741 if (tcc_state->do_bounds_check)
4742 vtop->r |= VT_MUSTBOUND;
4743 #endif
4747 /* pass a parameter to a function and do type checking and casting */
4748 static void gfunc_param_typed(Sym *func, Sym *arg)
4750 int func_type;
4751 CType type;
4753 func_type = func->f.func_type;
4754 if (func_type == FUNC_OLD ||
4755 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4756 /* default casting : only need to convert float to double */
4757 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4758 gen_cast_s(VT_DOUBLE);
4759 } else if (vtop->type.t & VT_BITFIELD) {
4760 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4761 type.ref = vtop->type.ref;
4762 gen_cast(&type);
4764 } else if (arg == NULL) {
4765 tcc_error("too many arguments to function");
4766 } else {
4767 type = arg->type;
4768 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4769 gen_assign_cast(&type);
4773 /* parse an expression and return its type without any side effect. */
4774 static void expr_type(CType *type, void (*expr_fn)(void))
4776 nocode_wanted++;
4777 expr_fn();
4778 *type = vtop->type;
4779 vpop();
4780 nocode_wanted--;
4783 /* parse an expression of the form '(type)' or '(expr)' and return its
4784 type */
4785 static void parse_expr_type(CType *type)
4787 int n;
4788 AttributeDef ad;
4790 skip('(');
4791 if (parse_btype(type, &ad)) {
4792 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4793 } else {
4794 expr_type(type, gexpr);
4796 skip(')');
4799 static void parse_type(CType *type)
4801 AttributeDef ad;
4802 int n;
4804 if (!parse_btype(type, &ad)) {
4805 expect("type");
4807 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4810 static void parse_builtin_params(int nc, const char *args)
4812 char c, sep = '(';
4813 CType t;
4814 if (nc)
4815 nocode_wanted++;
4816 next();
4817 while ((c = *args++)) {
4818 skip(sep);
4819 sep = ',';
4820 switch (c) {
4821 case 'e': expr_eq(); continue;
4822 case 't': parse_type(&t); vpush(&t); continue;
4823 default: tcc_error("internal error"); break;
4826 skip(')');
4827 if (nc)
4828 nocode_wanted--;
4831 ST_FUNC void unary(void)
4833 int n, t, align, size, r, sizeof_caller;
4834 CType type;
4835 Sym *s;
4836 AttributeDef ad;
4838 sizeof_caller = in_sizeof;
4839 in_sizeof = 0;
4840 type.ref = NULL;
4841 /* XXX: GCC 2.95.3 does not generate a table although it should be
4842 better here */
4843 tok_next:
4844 switch(tok) {
4845 case TOK_EXTENSION:
4846 next();
4847 goto tok_next;
4848 case TOK_LCHAR:
4849 #ifdef TCC_TARGET_PE
4850 t = VT_SHORT|VT_UNSIGNED;
4851 goto push_tokc;
4852 #endif
4853 case TOK_CINT:
4854 case TOK_CCHAR:
4855 t = VT_INT;
4856 push_tokc:
4857 type.t = t;
4858 vsetc(&type, VT_CONST, &tokc);
4859 next();
4860 break;
4861 case TOK_CUINT:
4862 t = VT_INT | VT_UNSIGNED;
4863 goto push_tokc;
4864 case TOK_CLLONG:
4865 t = VT_LLONG;
4866 goto push_tokc;
4867 case TOK_CULLONG:
4868 t = VT_LLONG | VT_UNSIGNED;
4869 goto push_tokc;
4870 case TOK_CFLOAT:
4871 t = VT_FLOAT;
4872 goto push_tokc;
4873 case TOK_CDOUBLE:
4874 t = VT_DOUBLE;
4875 goto push_tokc;
4876 case TOK_CLDOUBLE:
4877 t = VT_LDOUBLE;
4878 goto push_tokc;
4879 case TOK_CLONG:
4880 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4881 goto push_tokc;
4882 case TOK_CULONG:
4883 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4884 goto push_tokc;
4885 case TOK___FUNCTION__:
4886 if (!gnu_ext)
4887 goto tok_identifier;
4888 /* fall thru */
4889 case TOK___FUNC__:
4891 void *ptr;
4892 int len;
4893 /* special function name identifier */
4894 len = strlen(funcname) + 1;
4895 /* generate char[len] type */
4896 type.t = VT_BYTE;
4897 mk_pointer(&type);
4898 type.t |= VT_ARRAY;
4899 type.ref->c = len;
4900 vpush_ref(&type, data_section, data_section->data_offset, len);
4901 if (!NODATA_WANTED) {
4902 ptr = section_ptr_add(data_section, len);
4903 memcpy(ptr, funcname, len);
4905 next();
4907 break;
4908 case TOK_LSTR:
4909 #ifdef TCC_TARGET_PE
4910 t = VT_SHORT | VT_UNSIGNED;
4911 #else
4912 t = VT_INT;
4913 #endif
4914 goto str_init;
4915 case TOK_STR:
4916 /* string parsing */
4917 t = VT_BYTE;
4918 if (tcc_state->char_is_unsigned)
4919 t = VT_BYTE | VT_UNSIGNED;
4920 str_init:
4921 if (tcc_state->warn_write_strings)
4922 t |= VT_CONSTANT;
4923 type.t = t;
4924 mk_pointer(&type);
4925 type.t |= VT_ARRAY;
4926 memset(&ad, 0, sizeof(AttributeDef));
4927 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4928 break;
4929 case '(':
4930 next();
4931 /* cast ? */
4932 if (parse_btype(&type, &ad)) {
4933 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4934 skip(')');
4935 /* check ISOC99 compound literal */
4936 if (tok == '{') {
4937 /* data is allocated locally by default */
4938 if (global_expr)
4939 r = VT_CONST;
4940 else
4941 r = VT_LOCAL;
4942 /* all except arrays are lvalues */
4943 if (!(type.t & VT_ARRAY))
4944 r |= lvalue_type(type.t);
4945 memset(&ad, 0, sizeof(AttributeDef));
4946 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4947 } else {
4948 if (sizeof_caller) {
4949 vpush(&type);
4950 return;
4952 unary();
4953 gen_cast(&type);
4955 } else if (tok == '{') {
4956 int saved_nocode_wanted = nocode_wanted;
4957 if (const_wanted)
4958 tcc_error("expected constant");
4959 /* save all registers */
4960 save_regs(0);
4961 /* statement expression : we do not accept break/continue
4962 inside as GCC does. We do retain the nocode_wanted state,
4963 as statement expressions can't ever be entered from the
4964 outside, so any reactivation of code emission (from labels
4965 or loop heads) can be disabled again after the end of it. */
4966 block(1);
4967 nocode_wanted = saved_nocode_wanted;
4968 skip(')');
4969 } else {
4970 gexpr();
4971 skip(')');
4973 break;
4974 case '*':
4975 next();
4976 unary();
4977 indir();
4978 break;
4979 case '&':
4980 next();
4981 unary();
4982 /* functions names must be treated as function pointers,
4983 except for unary '&' and sizeof. Since we consider that
4984 functions are not lvalues, we only have to handle it
4985 there and in function calls. */
4986 /* arrays can also be used although they are not lvalues */
4987 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4988 !(vtop->type.t & VT_ARRAY))
4989 test_lvalue();
4990 mk_pointer(&vtop->type);
4991 gaddrof();
4992 break;
4993 case '!':
4994 next();
4995 unary();
4996 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4997 gen_cast_s(VT_BOOL);
4998 vtop->c.i = !vtop->c.i;
4999 } else if (vtop->r == VT_CMP) {
5000 vtop->cmp_op ^= 1;
5001 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5002 } else {
5003 vpushi(0);
5004 gen_op(TOK_EQ);
5006 break;
5007 case '~':
5008 next();
5009 unary();
5010 vpushi(-1);
5011 gen_op('^');
5012 break;
5013 case '+':
5014 next();
5015 unary();
5016 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5017 tcc_error("pointer not accepted for unary plus");
5018 /* In order to force cast, we add zero, except for floating point
5019 where we really need an noop (otherwise -0.0 will be transformed
5020 into +0.0). */
5021 if (!is_float(vtop->type.t)) {
5022 vpushi(0);
5023 gen_op('+');
5025 break;
5026 case TOK_SIZEOF:
5027 case TOK_ALIGNOF1:
5028 case TOK_ALIGNOF2:
5029 case TOK_ALIGNOF3:
5030 t = tok;
5031 next();
5032 in_sizeof++;
5033 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5034 s = NULL;
5035 if (vtop[1].r & VT_SYM)
5036 s = vtop[1].sym; /* hack: accessing previous vtop */
5037 size = type_size(&type, &align);
5038 if (s && s->a.aligned)
5039 align = 1 << (s->a.aligned - 1);
5040 if (t == TOK_SIZEOF) {
5041 if (!(type.t & VT_VLA)) {
5042 if (size < 0)
5043 tcc_error("sizeof applied to an incomplete type");
5044 vpushs(size);
5045 } else {
5046 vla_runtime_type_size(&type, &align);
5048 } else {
5049 vpushs(align);
5051 vtop->type.t |= VT_UNSIGNED;
5052 break;
5054 case TOK_builtin_expect:
5055 /* __builtin_expect is a no-op for now */
5056 parse_builtin_params(0, "ee");
5057 vpop();
5058 break;
5059 case TOK_builtin_types_compatible_p:
5060 parse_builtin_params(0, "tt");
5061 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5062 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5063 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5064 vtop -= 2;
5065 vpushi(n);
5066 break;
5067 case TOK_builtin_choose_expr:
5069 int64_t c;
5070 next();
5071 skip('(');
5072 c = expr_const64();
5073 skip(',');
5074 if (!c) {
5075 nocode_wanted++;
5077 expr_eq();
5078 if (!c) {
5079 vpop();
5080 nocode_wanted--;
5082 skip(',');
5083 if (c) {
5084 nocode_wanted++;
5086 expr_eq();
5087 if (c) {
5088 vpop();
5089 nocode_wanted--;
5091 skip(')');
5093 break;
5094 case TOK_builtin_constant_p:
5095 parse_builtin_params(1, "e");
5096 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5097 vtop--;
5098 vpushi(n);
5099 break;
5100 case TOK_builtin_frame_address:
5101 case TOK_builtin_return_address:
5103 int tok1 = tok;
5104 int level;
5105 next();
5106 skip('(');
5107 if (tok != TOK_CINT) {
5108 tcc_error("%s only takes positive integers",
5109 tok1 == TOK_builtin_return_address ?
5110 "__builtin_return_address" :
5111 "__builtin_frame_address");
5113 level = (uint32_t)tokc.i;
5114 next();
5115 skip(')');
5116 type.t = VT_VOID;
5117 mk_pointer(&type);
5118 vset(&type, VT_LOCAL, 0); /* local frame */
5119 while (level--) {
5120 mk_pointer(&vtop->type);
5121 indir(); /* -> parent frame */
5123 if (tok1 == TOK_builtin_return_address) {
5124 // assume return address is just above frame pointer on stack
5125 vpushi(PTR_SIZE);
5126 gen_op('+');
5127 mk_pointer(&vtop->type);
5128 indir();
5131 break;
5132 #ifdef TCC_TARGET_X86_64
5133 #ifdef TCC_TARGET_PE
5134 case TOK_builtin_va_start:
5135 parse_builtin_params(0, "ee");
5136 r = vtop->r & VT_VALMASK;
5137 if (r == VT_LLOCAL)
5138 r = VT_LOCAL;
5139 if (r != VT_LOCAL)
5140 tcc_error("__builtin_va_start expects a local variable");
5141 vtop->r = r;
5142 vtop->type = char_pointer_type;
5143 vtop->c.i += 8;
5144 vstore();
5145 break;
5146 #else
5147 case TOK_builtin_va_arg_types:
5148 parse_builtin_params(0, "t");
5149 vpushi(classify_x86_64_va_arg(&vtop->type));
5150 vswap();
5151 vpop();
5152 break;
5153 #endif
5154 #endif
5156 #ifdef TCC_TARGET_ARM64
5157 case TOK___va_start: {
5158 parse_builtin_params(0, "ee");
5159 //xx check types
5160 gen_va_start();
5161 vpushi(0);
5162 vtop->type.t = VT_VOID;
5163 break;
5165 case TOK___va_arg: {
5166 parse_builtin_params(0, "et");
5167 type = vtop->type;
5168 vpop();
5169 //xx check types
5170 gen_va_arg(&type);
5171 vtop->type = type;
5172 break;
5174 case TOK___arm64_clear_cache: {
5175 parse_builtin_params(0, "ee");
5176 gen_clear_cache();
5177 vpushi(0);
5178 vtop->type.t = VT_VOID;
5179 break;
5181 #endif
5182 /* pre operations */
5183 case TOK_INC:
5184 case TOK_DEC:
5185 t = tok;
5186 next();
5187 unary();
5188 inc(0, t);
5189 break;
5190 case '-':
5191 next();
5192 unary();
5193 t = vtop->type.t & VT_BTYPE;
5194 if (is_float(t)) {
5195 /* In IEEE negate(x) isn't subtract(0,x), but rather
5196 subtract(-0, x). */
5197 vpush(&vtop->type);
5198 if (t == VT_FLOAT)
5199 vtop->c.f = -1.0 * 0.0;
5200 else if (t == VT_DOUBLE)
5201 vtop->c.d = -1.0 * 0.0;
5202 else
5203 vtop->c.ld = -1.0 * 0.0;
5204 } else
5205 vpushi(0);
5206 vswap();
5207 gen_op('-');
5208 break;
5209 case TOK_LAND:
5210 if (!gnu_ext)
5211 goto tok_identifier;
5212 next();
5213 /* allow to take the address of a label */
5214 if (tok < TOK_UIDENT)
5215 expect("label identifier");
5216 s = label_find(tok);
5217 if (!s) {
5218 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5219 } else {
5220 if (s->r == LABEL_DECLARED)
5221 s->r = LABEL_FORWARD;
5223 if (!s->type.t) {
5224 s->type.t = VT_VOID;
5225 mk_pointer(&s->type);
5226 s->type.t |= VT_STATIC;
5228 vpushsym(&s->type, s);
5229 next();
5230 break;
5232 case TOK_GENERIC:
5234 CType controlling_type;
5235 int has_default = 0;
5236 int has_match = 0;
5237 int learn = 0;
5238 TokenString *str = NULL;
5239 int saved_const_wanted = const_wanted;
5241 next();
5242 skip('(');
5243 const_wanted = 0;
5244 expr_type(&controlling_type, expr_eq);
5245 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5246 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5247 mk_pointer(&controlling_type);
5248 const_wanted = saved_const_wanted;
5249 for (;;) {
5250 learn = 0;
5251 skip(',');
5252 if (tok == TOK_DEFAULT) {
5253 if (has_default)
5254 tcc_error("too many 'default'");
5255 has_default = 1;
5256 if (!has_match)
5257 learn = 1;
5258 next();
5259 } else {
5260 AttributeDef ad_tmp;
5261 int itmp;
5262 CType cur_type;
5264 in_generic++;
5265 parse_btype(&cur_type, &ad_tmp);
5266 in_generic--;
5268 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5269 if (compare_types(&controlling_type, &cur_type, 0)) {
5270 if (has_match) {
5271 tcc_error("type match twice");
5273 has_match = 1;
5274 learn = 1;
5277 skip(':');
5278 if (learn) {
5279 if (str)
5280 tok_str_free(str);
5281 skip_or_save_block(&str);
5282 } else {
5283 skip_or_save_block(NULL);
5285 if (tok == ')')
5286 break;
5288 if (!str) {
5289 char buf[60];
5290 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5291 tcc_error("type '%s' does not match any association", buf);
5293 begin_macro(str, 1);
5294 next();
5295 expr_eq();
5296 if (tok != TOK_EOF)
5297 expect(",");
5298 end_macro();
5299 next();
5300 break;
5302 // special qnan , snan and infinity values
5303 case TOK___NAN__:
5304 n = 0x7fc00000;
5305 special_math_val:
5306 vpushi(n);
5307 vtop->type.t = VT_FLOAT;
5308 next();
5309 break;
5310 case TOK___SNAN__:
5311 n = 0x7f800001;
5312 goto special_math_val;
5313 case TOK___INF__:
5314 n = 0x7f800000;
5315 goto special_math_val;
5317 default:
5318 tok_identifier:
5319 t = tok;
5320 next();
5321 if (t < TOK_UIDENT)
5322 expect("identifier");
5323 s = sym_find(t);
5324 if (!s || IS_ASM_SYM(s)) {
5325 const char *name = get_tok_str(t, NULL);
5326 if (tok != '(')
5327 tcc_error("'%s' undeclared", name);
5328 /* for simple function calls, we tolerate undeclared
5329 external reference to int() function */
5330 if (tcc_state->warn_implicit_function_declaration
5331 #ifdef TCC_TARGET_PE
5332 /* people must be warned about using undeclared WINAPI functions
5333 (which usually start with uppercase letter) */
5334 || (name[0] >= 'A' && name[0] <= 'Z')
5335 #endif
5337 tcc_warning("implicit declaration of function '%s'", name);
5338 s = external_global_sym(t, &func_old_type);
5341 r = s->r;
5342 /* A symbol that has a register is a local register variable,
5343 which starts out as VT_LOCAL value. */
5344 if ((r & VT_VALMASK) < VT_CONST)
5345 r = (r & ~VT_VALMASK) | VT_LOCAL;
5347 vset(&s->type, r, s->c);
5348 /* Point to s as backpointer (even without r&VT_SYM).
5349 Will be used by at least the x86 inline asm parser for
5350 regvars. */
5351 vtop->sym = s;
5353 if (r & VT_SYM) {
5354 vtop->c.i = 0;
5355 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5356 vtop->c.i = s->enum_val;
5358 break;
5361 /* post operations */
5362 while (1) {
5363 if (tok == TOK_INC || tok == TOK_DEC) {
5364 inc(1, tok);
5365 next();
5366 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5367 int qualifiers, cumofs = 0;
5368 /* field */
5369 if (tok == TOK_ARROW)
5370 indir();
5371 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5372 test_lvalue();
5373 gaddrof();
5374 /* expect pointer on structure */
5375 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5376 expect("struct or union");
5377 if (tok == TOK_CDOUBLE)
5378 expect("field name");
5379 next();
5380 if (tok == TOK_CINT || tok == TOK_CUINT)
5381 expect("field name");
5382 s = find_field(&vtop->type, tok, &cumofs);
5383 if (!s)
5384 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5385 /* add field offset to pointer */
5386 vtop->type = char_pointer_type; /* change type to 'char *' */
5387 vpushi(cumofs + s->c);
5388 gen_op('+');
5389 /* change type to field type, and set to lvalue */
5390 vtop->type = s->type;
5391 vtop->type.t |= qualifiers;
5392 /* an array is never an lvalue */
5393 if (!(vtop->type.t & VT_ARRAY)) {
5394 vtop->r |= lvalue_type(vtop->type.t);
5395 #ifdef CONFIG_TCC_BCHECK
5396 /* if bound checking, the referenced pointer must be checked */
5397 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5398 vtop->r |= VT_MUSTBOUND;
5399 #endif
5401 next();
5402 } else if (tok == '[') {
5403 next();
5404 gexpr();
5405 gen_op('+');
5406 indir();
5407 skip(']');
5408 } else if (tok == '(') {
5409 SValue ret;
5410 Sym *sa;
5411 int nb_args, ret_nregs, ret_align, regsize, variadic;
5413 /* function call */
5414 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5415 /* pointer test (no array accepted) */
5416 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5417 vtop->type = *pointed_type(&vtop->type);
5418 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5419 goto error_func;
5420 } else {
5421 error_func:
5422 expect("function pointer");
5424 } else {
5425 vtop->r &= ~VT_LVAL; /* no lvalue */
5427 /* get return type */
5428 s = vtop->type.ref;
5429 next();
5430 sa = s->next; /* first parameter */
5431 nb_args = regsize = 0;
5432 ret.r2 = VT_CONST;
5433 /* compute first implicit argument if a structure is returned */
5434 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5435 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5436 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5437 &ret_align, &regsize);
5438 if (!ret_nregs) {
5439 /* get some space for the returned structure */
5440 size = type_size(&s->type, &align);
5441 #ifdef TCC_TARGET_ARM64
5442 /* On arm64, a small struct is return in registers.
5443 It is much easier to write it to memory if we know
5444 that we are allowed to write some extra bytes, so
5445 round the allocated space up to a power of 2: */
5446 if (size < 16)
5447 while (size & (size - 1))
5448 size = (size | (size - 1)) + 1;
5449 #endif
5450 loc = (loc - size) & -align;
5451 ret.type = s->type;
5452 ret.r = VT_LOCAL | VT_LVAL;
5453 /* pass it as 'int' to avoid structure arg passing
5454 problems */
5455 vseti(VT_LOCAL, loc);
5456 ret.c = vtop->c;
5457 nb_args++;
5459 } else {
5460 ret_nregs = 1;
5461 ret.type = s->type;
5464 if (ret_nregs) {
5465 /* return in register */
5466 if (is_float(ret.type.t)) {
5467 ret.r = reg_fret(ret.type.t);
5468 #ifdef TCC_TARGET_X86_64
5469 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5470 ret.r2 = REG_QRET;
5471 #endif
5472 } else {
5473 #ifndef TCC_TARGET_ARM64
5474 #ifndef TCC_TARGET_RISCV64
5475 #ifdef TCC_TARGET_X86_64
5476 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5477 #else
5478 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5479 #endif
5480 ret.r2 = REG_LRET;
5481 #endif
5482 #endif
5483 ret.r = REG_IRET;
5485 ret.c.i = 0;
5487 if (tok != ')') {
5488 for(;;) {
5489 expr_eq();
5490 gfunc_param_typed(s, sa);
5491 nb_args++;
5492 if (sa)
5493 sa = sa->next;
5494 if (tok == ')')
5495 break;
5496 skip(',');
5499 if (sa)
5500 tcc_error("too few arguments to function");
5501 skip(')');
5502 gfunc_call(nb_args);
5504 /* return value */
5505 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5506 vsetc(&ret.type, r, &ret.c);
5507 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5510 /* handle packed struct return */
5511 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5512 int addr, offset;
5514 size = type_size(&s->type, &align);
5515 /* We're writing whole regs often, make sure there's enough
5516 space. Assume register size is power of 2. */
5517 if (regsize > align)
5518 align = regsize;
5519 loc = (loc - size) & -align;
5520 addr = loc;
5521 offset = 0;
5522 for (;;) {
5523 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5524 vswap();
5525 vstore();
5526 vtop--;
5527 if (--ret_nregs == 0)
5528 break;
5529 offset += regsize;
5531 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5533 if (s->f.func_noreturn)
5534 CODE_OFF();
5535 } else {
5536 break;
5541 ST_FUNC void expr_prod(void)
5543 int t;
5545 unary();
5546 while (tok == '*' || tok == '/' || tok == '%') {
5547 t = tok;
5548 next();
5549 unary();
5550 gen_op(t);
5554 ST_FUNC void expr_sum(void)
5556 int t;
5558 expr_prod();
5559 while (tok == '+' || tok == '-') {
5560 t = tok;
5561 next();
5562 expr_prod();
5563 gen_op(t);
5567 static void expr_shift(void)
5569 int t;
5571 expr_sum();
5572 while (tok == TOK_SHL || tok == TOK_SAR) {
5573 t = tok;
5574 next();
5575 expr_sum();
5576 gen_op(t);
5580 static void expr_cmp(void)
5582 int t;
5584 expr_shift();
5585 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5586 tok == TOK_ULT || tok == TOK_UGE) {
5587 t = tok;
5588 next();
5589 expr_shift();
5590 gen_op(t);
5594 static void expr_cmpeq(void)
5596 int t;
5598 expr_cmp();
5599 while (tok == TOK_EQ || tok == TOK_NE) {
5600 t = tok;
5601 next();
5602 expr_cmp();
5603 gen_op(t);
5607 static void expr_and(void)
5609 expr_cmpeq();
5610 while (tok == '&') {
5611 next();
5612 expr_cmpeq();
5613 gen_op('&');
5617 static void expr_xor(void)
5619 expr_and();
5620 while (tok == '^') {
5621 next();
5622 expr_and();
5623 gen_op('^');
5627 static void expr_or(void)
5629 expr_xor();
5630 while (tok == '|') {
5631 next();
5632 expr_xor();
5633 gen_op('|');
5637 static int condition_3way(void);
5639 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5641 int t = 0, cc = 1, f = 0, c;
5642 for(;;) {
5643 c = f ? i : condition_3way();
5644 if (c < 0) {
5645 save_regs(1), cc = 0;
5646 } else if (c != i) {
5647 nocode_wanted++, f = 1;
5649 if (tok != e_op) {
5650 if (cc || f) {
5651 vpop();
5652 vpushi(i ^ f);
5653 gsym(t);
5654 nocode_wanted -= f;
5655 } else {
5656 gvtst_set(i, t);
5658 break;
5660 if (c < 0)
5661 t = gvtst(i, t);
5662 else
5663 vpop();
5664 next();
5665 e_fn();
5669 static void expr_land(void)
5671 expr_or();
5672 if (tok == TOK_LAND)
5673 expr_landor(expr_or, TOK_LAND, 1);
5676 static void expr_lor(void)
5678 expr_land();
5679 if (tok == TOK_LOR)
5680 expr_landor(expr_land, TOK_LOR, 0);
5683 /* Assuming vtop is a value used in a conditional context
5684 (i.e. compared with zero) return 0 if it's false, 1 if
5685 true and -1 if it can't be statically determined. */
5686 static int condition_3way(void)
5688 int c = -1;
5689 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5690 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5691 vdup();
5692 gen_cast_s(VT_BOOL);
5693 c = vtop->c.i;
5694 vpop();
5696 return c;
5699 static int is_cond_bool(SValue *sv)
5701 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5702 && (sv->type.t & VT_BTYPE) == VT_INT)
5703 return (unsigned)sv->c.i < 2;
5704 if (sv->r == VT_CMP)
5705 return 1;
5706 return 0;
5709 static void expr_cond(void)
5711 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5712 SValue sv;
5713 CType type, type1, type2;
5714 int ncw_prev;
5716 expr_lor();
5717 if (tok == '?') {
5718 next();
5719 c = condition_3way();
5720 g = (tok == ':' && gnu_ext);
5721 tt = 0;
5722 if (!g) {
5723 if (c < 0) {
5724 save_regs(1);
5725 tt = gvtst(1, 0);
5726 } else {
5727 vpop();
5729 } else if (c < 0) {
5730 /* needed to avoid having different registers saved in
5731 each branch */
5732 rc = RC_INT;
5733 if (is_float(vtop->type.t)) {
5734 rc = RC_FLOAT;
5735 #ifdef TCC_TARGET_X86_64
5736 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5737 rc = RC_ST0;
5739 #endif
5741 gv(rc);
5742 save_regs(1);
5743 gv_dup();
5744 tt = gvtst(0, 0);
5747 ncw_prev = nocode_wanted;
5748 if (1) {
5749 if (c == 0)
5750 nocode_wanted++;
5751 if (!g)
5752 gexpr();
5754 if (c < 0 && vtop->r == VT_CMP) {
5755 t1 = gvtst(0, 0);
5756 vpushi(0);
5757 gvtst_set(0, t1);
5760 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5761 mk_pointer(&vtop->type);
5762 type1 = vtop->type;
5763 sv = *vtop; /* save value to handle it later */
5764 vtop--; /* no vpop so that FP stack is not flushed */
5766 if (g) {
5767 u = tt;
5768 } else if (c < 0) {
5769 u = gjmp(0);
5770 gsym(tt);
5771 } else
5772 u = 0;
5774 nocode_wanted = ncw_prev;
5775 if (c == 1)
5776 nocode_wanted++;
5777 skip(':');
5778 expr_cond();
5780 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5781 if (sv.r == VT_CMP) {
5782 t1 = sv.jtrue;
5783 t2 = u;
5784 } else {
5785 t1 = gvtst(0, 0);
5786 t2 = gjmp(0);
5787 gsym(u);
5788 vpushv(&sv);
5790 gvtst_set(0, t1);
5791 gvtst_set(1, t2);
5792 nocode_wanted = ncw_prev;
5793 // tcc_warning("two conditions expr_cond");
5794 return;
5797 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5798 mk_pointer(&vtop->type);
5799 type2=vtop->type;
5800 t1 = type1.t;
5801 bt1 = t1 & VT_BTYPE;
5802 t2 = type2.t;
5803 bt2 = t2 & VT_BTYPE;
5804 type.ref = NULL;
5806 /* cast operands to correct type according to ISOC rules */
5807 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5808 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5809 } else if (is_float(bt1) || is_float(bt2)) {
5810 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5811 type.t = VT_LDOUBLE;
5813 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5814 type.t = VT_DOUBLE;
5815 } else {
5816 type.t = VT_FLOAT;
5818 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5819 /* cast to biggest op */
5820 type.t = VT_LLONG | VT_LONG;
5821 if (bt1 == VT_LLONG)
5822 type.t &= t1;
5823 if (bt2 == VT_LLONG)
5824 type.t &= t2;
5825 /* convert to unsigned if it does not fit in a long long */
5826 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5827 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5828 type.t |= VT_UNSIGNED;
5829 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5830 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5831 /* If one is a null ptr constant the result type
5832 is the other. */
5833 if (is_null_pointer (vtop)) type = type1;
5834 else if (is_null_pointer (&sv)) type = type2;
5835 else if (bt1 != bt2)
5836 tcc_error("incompatible types in conditional expressions");
5837 else {
5838 CType *pt1 = pointed_type(&type1);
5839 CType *pt2 = pointed_type(&type2);
5840 int pbt1 = pt1->t & VT_BTYPE;
5841 int pbt2 = pt2->t & VT_BTYPE;
5842 int newquals, copied = 0;
5843 /* pointers to void get preferred, otherwise the
5844 pointed to types minus qualifs should be compatible */
5845 type = (pbt1 == VT_VOID) ? type1 : type2;
5846 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5847 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5848 tcc_warning("pointer type mismatch in conditional expression\n");
5850 /* combine qualifs */
5851 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5852 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5853 & newquals)
5855 /* copy the pointer target symbol */
5856 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5857 0, type.ref->c);
5858 copied = 1;
5859 pointed_type(&type)->t |= newquals;
5861 /* pointers to incomplete arrays get converted to
5862 pointers to completed ones if possible */
5863 if (pt1->t & VT_ARRAY
5864 && pt2->t & VT_ARRAY
5865 && pointed_type(&type)->ref->c < 0
5866 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5868 if (!copied)
5869 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5870 0, type.ref->c);
5871 pointed_type(&type)->ref =
5872 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5873 0, pointed_type(&type)->ref->c);
5874 pointed_type(&type)->ref->c =
5875 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5878 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5879 /* XXX: test structure compatibility */
5880 type = bt1 == VT_STRUCT ? type1 : type2;
5881 } else {
5882 /* integer operations */
5883 type.t = VT_INT | (VT_LONG & (t1 | t2));
5884 /* convert to unsigned if it does not fit in an integer */
5885 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5886 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5887 type.t |= VT_UNSIGNED;
5889 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5890 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5891 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5893 /* now we convert second operand */
5894 if (c != 1) {
5895 gen_cast(&type);
5896 if (islv) {
5897 mk_pointer(&vtop->type);
5898 gaddrof();
5899 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5900 gaddrof();
5903 rc = RC_INT;
5904 if (is_float(type.t)) {
5905 rc = RC_FLOAT;
5906 #ifdef TCC_TARGET_X86_64
5907 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5908 rc = RC_ST0;
5910 #endif
5911 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5912 /* for long longs, we use fixed registers to avoid having
5913 to handle a complicated move */
5914 rc = RC_IRET;
5917 tt = r2 = 0;
5918 if (c < 0) {
5919 r2 = gv(rc);
5920 tt = gjmp(0);
5922 gsym(u);
5923 nocode_wanted = ncw_prev;
5925 /* this is horrible, but we must also convert first
5926 operand */
5927 if (c != 0) {
5928 *vtop = sv;
5929 gen_cast(&type);
5930 if (islv) {
5931 mk_pointer(&vtop->type);
5932 gaddrof();
5933 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5934 gaddrof();
5937 if (c < 0) {
5938 r1 = gv(rc);
5939 move_reg(r2, r1, type.t);
5940 vtop->r = r2;
5941 gsym(tt);
5944 if (islv)
5945 indir();
5950 static void expr_eq(void)
5952 int t;
5954 expr_cond();
5955 if (tok == '=' ||
5956 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5957 tok == TOK_A_XOR || tok == TOK_A_OR ||
5958 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5959 test_lvalue();
5960 t = tok;
5961 next();
5962 if (t == '=') {
5963 expr_eq();
5964 } else {
5965 vdup();
5966 expr_eq();
5967 gen_op(t & 0x7f);
5969 vstore();
5973 ST_FUNC void gexpr(void)
5975 while (1) {
5976 expr_eq();
5977 if (tok != ',')
5978 break;
5979 vpop();
5980 next();
5984 /* parse a constant expression and return value in vtop. */
5985 static void expr_const1(void)
5987 const_wanted++;
5988 nocode_wanted++;
5989 expr_cond();
5990 nocode_wanted--;
5991 const_wanted--;
5994 /* parse an integer constant and return its value. */
5995 static inline int64_t expr_const64(void)
5997 int64_t c;
5998 expr_const1();
5999 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6000 expect("constant expression");
6001 c = vtop->c.i;
6002 vpop();
6003 return c;
6006 /* parse an integer constant and return its value.
6007 Complain if it doesn't fit 32bit (signed or unsigned). */
6008 ST_FUNC int expr_const(void)
6010 int c;
6011 int64_t wc = expr_const64();
6012 c = wc;
6013 if (c != wc && (unsigned)c != wc)
6014 tcc_error("constant exceeds 32 bit");
6015 return c;
6018 /* ------------------------------------------------------------------------- */
6019 /* return from function */
6021 #ifndef TCC_TARGET_ARM64
6022 #ifndef TCC_TARGET_RISCV64
6023 static void gfunc_return(CType *func_type)
6025 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6026 CType type, ret_type;
6027 int ret_align, ret_nregs, regsize;
6028 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6029 &ret_align, &regsize);
6030 if (0 == ret_nregs) {
6031 /* if returning structure, must copy it to implicit
6032 first pointer arg location */
6033 type = *func_type;
6034 mk_pointer(&type);
6035 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6036 indir();
6037 vswap();
6038 /* copy structure value to pointer */
6039 vstore();
6040 } else {
6041 /* returning structure packed into registers */
6042 int r, size, addr, align;
6043 size = type_size(func_type,&align);
6044 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6045 (vtop->c.i & (ret_align-1)))
6046 && (align & (ret_align-1))) {
6047 loc = (loc - size) & -ret_align;
6048 addr = loc;
6049 type = *func_type;
6050 vset(&type, VT_LOCAL | VT_LVAL, addr);
6051 vswap();
6052 vstore();
6053 vpop();
6054 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6056 vtop->type = ret_type;
6057 if (is_float(ret_type.t))
6058 r = rc_fret(ret_type.t);
6059 else
6060 r = RC_IRET;
6062 if (ret_nregs == 1)
6063 gv(r);
6064 else {
6065 for (;;) {
6066 vdup();
6067 gv(r);
6068 vpop();
6069 if (--ret_nregs == 0)
6070 break;
6071 /* We assume that when a structure is returned in multiple
6072 registers, their classes are consecutive values of the
6073 suite s(n) = 2^n */
6074 r <<= 1;
6075 vtop->c.i += regsize;
6079 } else if (is_float(func_type->t)) {
6080 gv(rc_fret(func_type->t));
6081 } else {
6082 gv(RC_IRET);
6084 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6086 #endif
6087 #endif
6089 static void check_func_return(void)
6091 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6092 return;
6093 if (!strcmp (funcname, "main")
6094 && (func_vt.t & VT_BTYPE) == VT_INT) {
6095 /* main returns 0 by default */
6096 vpushi(0);
6097 gen_assign_cast(&func_vt);
6098 gfunc_return(&func_vt);
6099 } else {
6100 tcc_warning("function might return no value: '%s'", funcname);
6104 /* ------------------------------------------------------------------------- */
6105 /* switch/case */
6107 static int case_cmp(const void *pa, const void *pb)
6109 int64_t a = (*(struct case_t**) pa)->v1;
6110 int64_t b = (*(struct case_t**) pb)->v1;
6111 return a < b ? -1 : a > b;
6114 static void gtst_addr(int t, int a)
6116 gsym_addr(gvtst(0, t), a);
6119 static void gcase(struct case_t **base, int len, int *bsym)
6121 struct case_t *p;
6122 int e;
6123 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6124 while (len > 8) {
6125 /* binary search */
6126 p = base[len/2];
6127 vdup();
6128 if (ll)
6129 vpushll(p->v2);
6130 else
6131 vpushi(p->v2);
6132 gen_op(TOK_LE);
6133 e = gvtst(1, 0);
6134 vdup();
6135 if (ll)
6136 vpushll(p->v1);
6137 else
6138 vpushi(p->v1);
6139 gen_op(TOK_GE);
6140 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6141 /* x < v1 */
6142 gcase(base, len/2, bsym);
6143 /* x > v2 */
6144 gsym(e);
6145 e = len/2 + 1;
6146 base += e; len -= e;
6148 /* linear scan */
6149 while (len--) {
6150 p = *base++;
6151 vdup();
6152 if (ll)
6153 vpushll(p->v2);
6154 else
6155 vpushi(p->v2);
6156 if (p->v1 == p->v2) {
6157 gen_op(TOK_EQ);
6158 gtst_addr(0, p->sym);
6159 } else {
6160 gen_op(TOK_LE);
6161 e = gvtst(1, 0);
6162 vdup();
6163 if (ll)
6164 vpushll(p->v1);
6165 else
6166 vpushi(p->v1);
6167 gen_op(TOK_GE);
6168 gtst_addr(0, p->sym);
6169 gsym(e);
6172 *bsym = gjmp(*bsym);
6175 /* ------------------------------------------------------------------------- */
6176 /* __attribute__((cleanup(fn))) */
6178 static void try_call_scope_cleanup(Sym *stop)
6180 Sym *cls = cur_scope->cl.s;
6182 for (; cls != stop; cls = cls->ncl) {
6183 Sym *fs = cls->next;
6184 Sym *vs = cls->prev_tok;
6186 vpushsym(&fs->type, fs);
6187 vset(&vs->type, vs->r, vs->c);
6188 vtop->sym = vs;
6189 mk_pointer(&vtop->type);
6190 gaddrof();
6191 gfunc_call(1);
6195 static void try_call_cleanup_goto(Sym *cleanupstate)
6197 Sym *oc, *cc;
6198 int ocd, ccd;
6200 if (!cur_scope->cl.s)
6201 return;
6203 /* search NCA of both cleanup chains given parents and initial depth */
6204 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6205 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6207 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6209 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6212 try_call_scope_cleanup(cc);
6215 /* call 'func' for each __attribute__((cleanup(func))) */
6216 static void block_cleanup(struct scope *o)
6218 int jmp = 0;
6219 Sym *g, **pg;
6220 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6221 if (g->prev_tok->r & LABEL_FORWARD) {
6222 Sym *pcl = g->next;
6223 if (!jmp)
6224 jmp = gjmp(0);
6225 gsym(pcl->jnext);
6226 try_call_scope_cleanup(o->cl.s);
6227 pcl->jnext = gjmp(0);
6228 if (!o->cl.n)
6229 goto remove_pending;
6230 g->c = o->cl.n;
6231 pg = &g->prev;
6232 } else {
6233 remove_pending:
6234 *pg = g->prev;
6235 sym_free(g);
6238 gsym(jmp);
6239 try_call_scope_cleanup(o->cl.s);
6242 /* ------------------------------------------------------------------------- */
6243 /* VLA */
6245 static void vla_restore(int loc)
6247 if (loc)
6248 gen_vla_sp_restore(loc);
6251 static void vla_leave(struct scope *o)
6253 if (o->vla.num < cur_scope->vla.num)
6254 vla_restore(o->vla.loc);
6257 /* ------------------------------------------------------------------------- */
6258 /* local scopes */
6260 void new_scope(struct scope *o)
6262 /* copy and link previous scope */
6263 *o = *cur_scope;
6264 o->prev = cur_scope;
6265 cur_scope = o;
6267 /* record local declaration stack position */
6268 o->lstk = local_stack;
6269 o->llstk = local_label_stack;
6271 ++local_scope;
6274 void prev_scope(struct scope *o, int is_expr)
6276 vla_leave(o->prev);
6278 if (o->cl.s != o->prev->cl.s)
6279 block_cleanup(o->prev);
6281 /* pop locally defined labels */
6282 label_pop(&local_label_stack, o->llstk, is_expr);
6284 /* In the is_expr case (a statement expression is finished here),
6285 vtop might refer to symbols on the local_stack. Either via the
6286 type or via vtop->sym. We can't pop those nor any that in turn
6287 might be referred to. To make it easier we don't roll back
6288 any symbols in that case; some upper level call to block() will
6289 do that. We do have to remove such symbols from the lookup
6290 tables, though. sym_pop will do that. */
6292 /* pop locally defined symbols */
6293 sym_pop(&local_stack, o->lstk, is_expr);
6295 cur_scope = o->prev;
6296 --local_scope;
6299 /* leave a scope via break/continue(/goto) */
6300 void leave_scope(struct scope *o)
6302 if (!o)
6303 return;
6304 try_call_scope_cleanup(o->cl.s);
6305 vla_leave(o);
6308 /* ------------------------------------------------------------------------- */
6309 /* call block from 'for do while' loops */
6311 static void lblock(int *bsym, int *csym)
6313 struct scope *lo = loop_scope, *co = cur_scope;
6314 int *b = co->bsym, *c = co->csym;
6315 if (csym) {
6316 co->csym = csym;
6317 loop_scope = co;
6319 co->bsym = bsym;
6320 block(0);
6321 co->bsym = b;
6322 if (csym) {
6323 co->csym = c;
6324 loop_scope = lo;
6328 static void block(int is_expr)
6330 int a, b, c, d, e, t;
6331 Sym *s;
6333 if (is_expr) {
6334 /* default return value is (void) */
6335 vpushi(0);
6336 vtop->type.t = VT_VOID;
6339 again:
6340 t = tok, next();
6342 if (t == TOK_IF) {
6343 skip('(');
6344 gexpr();
6345 skip(')');
6346 a = gvtst(1, 0);
6347 block(0);
6348 if (tok == TOK_ELSE) {
6349 d = gjmp(0);
6350 gsym(a);
6351 next();
6352 block(0);
6353 gsym(d); /* patch else jmp */
6354 } else {
6355 gsym(a);
6358 } else if (t == TOK_WHILE) {
6359 d = gind();
6360 skip('(');
6361 gexpr();
6362 skip(')');
6363 a = gvtst(1, 0);
6364 b = 0;
6365 lblock(&a, &b);
6366 gjmp_addr(d);
6367 gsym_addr(b, d);
6368 gsym(a);
6370 } else if (t == '{') {
6371 struct scope o;
6372 new_scope(&o);
6374 /* handle local labels declarations */
6375 while (tok == TOK_LABEL) {
6376 do {
6377 next();
6378 if (tok < TOK_UIDENT)
6379 expect("label identifier");
6380 label_push(&local_label_stack, tok, LABEL_DECLARED);
6381 next();
6382 } while (tok == ',');
6383 skip(';');
6386 while (tok != '}') {
6387 decl(VT_LOCAL);
6388 if (tok != '}') {
6389 if (is_expr)
6390 vpop();
6391 block(is_expr);
6395 prev_scope(&o, is_expr);
6397 if (0 == local_scope && !nocode_wanted)
6398 check_func_return();
6399 next();
6401 } else if (t == TOK_RETURN) {
6402 a = tok != ';';
6403 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6404 if (a)
6405 gexpr(), gen_assign_cast(&func_vt);
6406 leave_scope(root_scope);
6407 if (a && b)
6408 gfunc_return(&func_vt);
6409 else if (a)
6410 vtop--;
6411 else if (b)
6412 tcc_warning("'return' with no value.");
6413 skip(';');
6414 /* jump unless last stmt in top-level block */
6415 if (tok != '}' || local_scope != 1)
6416 rsym = gjmp(rsym);
6417 CODE_OFF();
6419 } else if (t == TOK_BREAK) {
6420 /* compute jump */
6421 if (!cur_scope->bsym)
6422 tcc_error("cannot break");
6423 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6424 leave_scope(loop_scope);
6425 else
6426 leave_scope(cur_switch->scope);
6427 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6428 skip(';');
6430 } else if (t == TOK_CONTINUE) {
6431 /* compute jump */
6432 if (!cur_scope->csym)
6433 tcc_error("cannot continue");
6434 leave_scope(loop_scope);
6435 *cur_scope->csym = gjmp(*cur_scope->csym);
6436 skip(';');
6438 } else if (t == TOK_FOR) {
6439 struct scope o;
6440 new_scope(&o);
6442 skip('(');
6443 if (tok != ';') {
6444 /* c99 for-loop init decl? */
6445 if (!decl0(VT_LOCAL, 1, NULL)) {
6446 /* no, regular for-loop init expr */
6447 gexpr();
6448 vpop();
6451 skip(';');
6452 a = b = 0;
6453 c = d = gind();
6454 if (tok != ';') {
6455 gexpr();
6456 a = gvtst(1, 0);
6458 skip(';');
6459 if (tok != ')') {
6460 e = gjmp(0);
6461 d = gind();
6462 gexpr();
6463 vpop();
6464 gjmp_addr(c);
6465 gsym(e);
6467 skip(')');
6468 lblock(&a, &b);
6469 gjmp_addr(d);
6470 gsym_addr(b, d);
6471 gsym(a);
6472 prev_scope(&o, 0);
6474 } else if (t == TOK_DO) {
6475 a = b = 0;
6476 d = gind();
6477 lblock(&a, &b);
6478 gsym(b);
6479 skip(TOK_WHILE);
6480 skip('(');
6481 gexpr();
6482 skip(')');
6483 skip(';');
6484 c = gvtst(0, 0);
6485 gsym_addr(c, d);
6486 gsym(a);
6488 } else if (t == TOK_SWITCH) {
6489 struct switch_t *saved, sw;
6490 SValue switchval;
6492 sw.p = NULL;
6493 sw.n = 0;
6494 sw.def_sym = 0;
6495 sw.bsym = &a;
6496 sw.scope = cur_scope;
6498 saved = cur_switch;
6499 cur_switch = &sw;
6501 skip('(');
6502 gexpr();
6503 skip(')');
6504 switchval = *vtop--;
6506 a = 0;
6507 b = gjmp(0); /* jump to first case */
6508 lblock(&a, NULL);
6509 a = gjmp(a); /* add implicit break */
6510 /* case lookup */
6511 gsym(b);
6513 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6514 for (b = 1; b < sw.n; b++)
6515 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6516 tcc_error("duplicate case value");
6518 /* Our switch table sorting is signed, so the compared
6519 value needs to be as well when it's 64bit. */
6520 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6521 switchval.type.t &= ~VT_UNSIGNED;
6522 vpushv(&switchval);
6523 gv(RC_INT);
6524 d = 0, gcase(sw.p, sw.n, &d);
6525 vpop();
6526 if (sw.def_sym)
6527 gsym_addr(d, sw.def_sym);
6528 else
6529 gsym(d);
6530 /* break label */
6531 gsym(a);
6533 dynarray_reset(&sw.p, &sw.n);
6534 cur_switch = saved;
6536 } else if (t == TOK_CASE) {
6537 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6538 if (!cur_switch)
6539 expect("switch");
6540 cr->v1 = cr->v2 = expr_const64();
6541 if (gnu_ext && tok == TOK_DOTS) {
6542 next();
6543 cr->v2 = expr_const64();
6544 if (cr->v2 < cr->v1)
6545 tcc_warning("empty case range");
6547 cr->sym = gind();
6548 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6549 skip(':');
6550 is_expr = 0;
6551 goto block_after_label;
6553 } else if (t == TOK_DEFAULT) {
6554 if (!cur_switch)
6555 expect("switch");
6556 if (cur_switch->def_sym)
6557 tcc_error("too many 'default'");
6558 cur_switch->def_sym = gind();
6559 skip(':');
6560 is_expr = 0;
6561 goto block_after_label;
6563 } else if (t == TOK_GOTO) {
6564 vla_restore(root_scope->vla.loc);
6565 if (tok == '*' && gnu_ext) {
6566 /* computed goto */
6567 next();
6568 gexpr();
6569 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6570 expect("pointer");
6571 ggoto();
6573 } else if (tok >= TOK_UIDENT) {
6574 s = label_find(tok);
6575 /* put forward definition if needed */
6576 if (!s)
6577 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6578 else if (s->r == LABEL_DECLARED)
6579 s->r = LABEL_FORWARD;
6581 if (s->r & LABEL_FORWARD) {
6582 /* start new goto chain for cleanups, linked via label->next */
6583 if (cur_scope->cl.s && !nocode_wanted) {
6584 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6585 pending_gotos->prev_tok = s;
6586 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6587 pending_gotos->next = s;
6589 s->jnext = gjmp(s->jnext);
6590 } else {
6591 try_call_cleanup_goto(s->cleanupstate);
6592 gjmp_addr(s->jnext);
6594 next();
6596 } else {
6597 expect("label identifier");
6599 skip(';');
6601 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6602 asm_instr();
6604 } else {
6605 if (tok == ':' && t >= TOK_UIDENT) {
6606 /* label case */
6607 next();
6608 s = label_find(t);
6609 if (s) {
6610 if (s->r == LABEL_DEFINED)
6611 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6612 s->r = LABEL_DEFINED;
6613 if (s->next) {
6614 Sym *pcl; /* pending cleanup goto */
6615 for (pcl = s->next; pcl; pcl = pcl->prev)
6616 gsym(pcl->jnext);
6617 sym_pop(&s->next, NULL, 0);
6618 } else
6619 gsym(s->jnext);
6620 } else {
6621 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6623 s->jnext = gind();
6624 s->cleanupstate = cur_scope->cl.s;
6626 block_after_label:
6627 vla_restore(cur_scope->vla.loc);
6628 /* we accept this, but it is a mistake */
6629 if (tok == '}') {
6630 tcc_warning("deprecated use of label at end of compound statement");
6631 } else {
6632 goto again;
6635 } else {
6636 /* expression case */
6637 if (t != ';') {
6638 unget_tok(t);
6639 if (is_expr) {
6640 vpop();
6641 gexpr();
6642 } else {
6643 gexpr();
6644 vpop();
6646 skip(';');
6652 /* This skips over a stream of tokens containing balanced {} and ()
6653 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6654 with a '{'). If STR then allocates and stores the skipped tokens
6655 in *STR. This doesn't check if () and {} are nested correctly,
6656 i.e. "({)}" is accepted. */
6657 static void skip_or_save_block(TokenString **str)
6659 int braces = tok == '{';
6660 int level = 0;
6661 if (str)
6662 *str = tok_str_alloc();
6664 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6665 int t;
6666 if (tok == TOK_EOF) {
6667 if (str || level > 0)
6668 tcc_error("unexpected end of file");
6669 else
6670 break;
6672 if (str)
6673 tok_str_add_tok(*str);
6674 t = tok;
6675 next();
6676 if (t == '{' || t == '(') {
6677 level++;
6678 } else if (t == '}' || t == ')') {
6679 level--;
6680 if (level == 0 && braces && t == '}')
6681 break;
6684 if (str) {
6685 tok_str_add(*str, -1);
6686 tok_str_add(*str, 0);
6690 #define EXPR_CONST 1
6691 #define EXPR_ANY 2
6693 static void parse_init_elem(int expr_type)
6695 int saved_global_expr;
6696 switch(expr_type) {
6697 case EXPR_CONST:
6698 /* compound literals must be allocated globally in this case */
6699 saved_global_expr = global_expr;
6700 global_expr = 1;
6701 expr_const1();
6702 global_expr = saved_global_expr;
6703 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6704 (compound literals). */
6705 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6706 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6707 || vtop->sym->v < SYM_FIRST_ANOM))
6708 #ifdef TCC_TARGET_PE
6709 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6710 #endif
6712 tcc_error("initializer element is not constant");
6713 break;
6714 case EXPR_ANY:
6715 expr_eq();
6716 break;
6720 /* put zeros for variable based init */
6721 static void init_putz(Section *sec, unsigned long c, int size)
6723 if (sec) {
6724 /* nothing to do because globals are already set to zero */
6725 } else {
6726 vpush_global_sym(&func_old_type, TOK_memset);
6727 vseti(VT_LOCAL, c);
6728 #ifdef TCC_TARGET_ARM
6729 vpushs(size);
6730 vpushi(0);
6731 #else
6732 vpushi(0);
6733 vpushs(size);
6734 #endif
6735 gfunc_call(3);
6739 #define DIF_FIRST 1
6740 #define DIF_SIZE_ONLY 2
6741 #define DIF_HAVE_ELEM 4
6743 /* t is the array or struct type. c is the array or struct
6744 address. cur_field is the pointer to the current
6745 field, for arrays the 'c' member contains the current start
6746 index. 'flags' is as in decl_initializer.
6747 'al' contains the already initialized length of the
6748 current container (starting at c). This returns the new length of that. */
6749 static int decl_designator(CType *type, Section *sec, unsigned long c,
6750 Sym **cur_field, int flags, int al)
6752 Sym *s, *f;
6753 int index, index_last, align, l, nb_elems, elem_size;
6754 unsigned long corig = c;
6756 elem_size = 0;
6757 nb_elems = 1;
6759 if (flags & DIF_HAVE_ELEM)
6760 goto no_designator;
6762 if (gnu_ext && tok >= TOK_UIDENT) {
6763 l = tok, next();
6764 if (tok == ':')
6765 goto struct_field;
6766 unget_tok(l);
6769 /* NOTE: we only support ranges for last designator */
6770 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6771 if (tok == '[') {
6772 if (!(type->t & VT_ARRAY))
6773 expect("array type");
6774 next();
6775 index = index_last = expr_const();
6776 if (tok == TOK_DOTS && gnu_ext) {
6777 next();
6778 index_last = expr_const();
6780 skip(']');
6781 s = type->ref;
6782 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6783 index_last < index)
6784 tcc_error("invalid index");
6785 if (cur_field)
6786 (*cur_field)->c = index_last;
6787 type = pointed_type(type);
6788 elem_size = type_size(type, &align);
6789 c += index * elem_size;
6790 nb_elems = index_last - index + 1;
6791 } else {
6792 int cumofs;
6793 next();
6794 l = tok;
6795 struct_field:
6796 next();
6797 if ((type->t & VT_BTYPE) != VT_STRUCT)
6798 expect("struct/union type");
6799 cumofs = 0;
6800 f = find_field(type, l, &cumofs);
6801 if (!f)
6802 expect("field");
6803 if (cur_field)
6804 *cur_field = f;
6805 type = &f->type;
6806 c += cumofs + f->c;
6808 cur_field = NULL;
6810 if (!cur_field) {
6811 if (tok == '=') {
6812 next();
6813 } else if (!gnu_ext) {
6814 expect("=");
6816 } else {
6817 no_designator:
6818 if (type->t & VT_ARRAY) {
6819 index = (*cur_field)->c;
6820 if (type->ref->c >= 0 && index >= type->ref->c)
6821 tcc_error("index too large");
6822 type = pointed_type(type);
6823 c += index * type_size(type, &align);
6824 } else {
6825 f = *cur_field;
6826 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6827 *cur_field = f = f->next;
6828 if (!f)
6829 tcc_error("too many field init");
6830 type = &f->type;
6831 c += f->c;
6834 /* must put zero in holes (note that doing it that way
6835 ensures that it even works with designators) */
6836 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6837 init_putz(sec, corig + al, c - corig - al);
6838 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6840 /* XXX: make it more general */
6841 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6842 unsigned long c_end;
6843 uint8_t *src, *dst;
6844 int i;
6846 if (!sec) {
6847 vset(type, VT_LOCAL|VT_LVAL, c);
6848 for (i = 1; i < nb_elems; i++) {
6849 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6850 vswap();
6851 vstore();
6853 vpop();
6854 } else if (!NODATA_WANTED) {
6855 c_end = c + nb_elems * elem_size;
6856 if (c_end > sec->data_allocated)
6857 section_realloc(sec, c_end);
6858 src = sec->data + c;
6859 dst = src;
6860 for(i = 1; i < nb_elems; i++) {
6861 dst += elem_size;
6862 memcpy(dst, src, elem_size);
6866 c += nb_elems * type_size(type, &align);
6867 if (c - corig > al)
6868 al = c - corig;
6869 return al;
6872 /* store a value or an expression directly in global data or in local array */
6873 static void init_putv(CType *type, Section *sec, unsigned long c)
6875 int bt;
6876 void *ptr;
6877 CType dtype;
6879 dtype = *type;
6880 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6882 if (sec) {
6883 int size, align;
6884 /* XXX: not portable */
6885 /* XXX: generate error if incorrect relocation */
6886 gen_assign_cast(&dtype);
6887 bt = type->t & VT_BTYPE;
6889 if ((vtop->r & VT_SYM)
6890 && bt != VT_PTR
6891 && bt != VT_FUNC
6892 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6893 || (type->t & VT_BITFIELD))
6894 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6896 tcc_error("initializer element is not computable at load time");
6898 if (NODATA_WANTED) {
6899 vtop--;
6900 return;
6903 size = type_size(type, &align);
6904 section_reserve(sec, c + size);
6905 ptr = sec->data + c;
6907 /* XXX: make code faster ? */
6908 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6909 vtop->sym->v >= SYM_FIRST_ANOM &&
6910 /* XXX This rejects compound literals like
6911 '(void *){ptr}'. The problem is that '&sym' is
6912 represented the same way, which would be ruled out
6913 by the SYM_FIRST_ANOM check above, but also '"string"'
6914 in 'char *p = "string"' is represented the same
6915 with the type being VT_PTR and the symbol being an
6916 anonymous one. That is, there's no difference in vtop
6917 between '(void *){x}' and '&(void *){x}'. Ignore
6918 pointer typed entities here. Hopefully no real code
6919 will every use compound literals with scalar type. */
6920 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6921 /* These come from compound literals, memcpy stuff over. */
6922 Section *ssec;
6923 ElfSym *esym;
6924 ElfW_Rel *rel;
6925 esym = elfsym(vtop->sym);
6926 ssec = tcc_state->sections[esym->st_shndx];
6927 memmove (ptr, ssec->data + esym->st_value, size);
6928 if (ssec->reloc) {
6929 /* We need to copy over all memory contents, and that
6930 includes relocations. Use the fact that relocs are
6931 created it order, so look from the end of relocs
6932 until we hit one before the copied region. */
6933 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6934 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6935 while (num_relocs--) {
6936 rel--;
6937 if (rel->r_offset >= esym->st_value + size)
6938 continue;
6939 if (rel->r_offset < esym->st_value)
6940 break;
6941 /* Note: if the same fields are initialized multiple
6942 times (possible with designators) then we possibly
6943 add multiple relocations for the same offset here.
6944 That would lead to wrong code, the last reloc needs
6945 to win. We clean this up later after the whole
6946 initializer is parsed. */
6947 put_elf_reloca(symtab_section, sec,
6948 c + rel->r_offset - esym->st_value,
6949 ELFW(R_TYPE)(rel->r_info),
6950 ELFW(R_SYM)(rel->r_info),
6951 #if PTR_SIZE == 8
6952 rel->r_addend
6953 #else
6955 #endif
6959 } else {
6960 if (type->t & VT_BITFIELD) {
6961 int bit_pos, bit_size, bits, n;
6962 unsigned char *p, v, m;
6963 bit_pos = BIT_POS(vtop->type.t);
6964 bit_size = BIT_SIZE(vtop->type.t);
6965 p = (unsigned char*)ptr + (bit_pos >> 3);
6966 bit_pos &= 7, bits = 0;
6967 while (bit_size) {
6968 n = 8 - bit_pos;
6969 if (n > bit_size)
6970 n = bit_size;
6971 v = vtop->c.i >> bits << bit_pos;
6972 m = ((1 << n) - 1) << bit_pos;
6973 *p = (*p & ~m) | (v & m);
6974 bits += n, bit_size -= n, bit_pos = 0, ++p;
6976 } else
6977 switch(bt) {
6978 /* XXX: when cross-compiling we assume that each type has the
6979 same representation on host and target, which is likely to
6980 be wrong in the case of long double */
6981 case VT_BOOL:
6982 vtop->c.i = vtop->c.i != 0;
6983 case VT_BYTE:
6984 *(char *)ptr |= vtop->c.i;
6985 break;
6986 case VT_SHORT:
6987 *(short *)ptr |= vtop->c.i;
6988 break;
6989 case VT_FLOAT:
6990 *(float*)ptr = vtop->c.f;
6991 break;
6992 case VT_DOUBLE:
6993 *(double *)ptr = vtop->c.d;
6994 break;
6995 case VT_LDOUBLE:
6996 #if defined TCC_IS_NATIVE_387
6997 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6998 memcpy(ptr, &vtop->c.ld, 10);
6999 #ifdef __TINYC__
7000 else if (sizeof (long double) == sizeof (double))
7001 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7002 #endif
7003 else if (vtop->c.ld == 0.0)
7005 else
7006 #endif
7007 if (sizeof(long double) == LDOUBLE_SIZE)
7008 *(long double*)ptr = vtop->c.ld;
7009 else if (sizeof(double) == LDOUBLE_SIZE)
7010 *(double *)ptr = (double)vtop->c.ld;
7011 else
7012 tcc_error("can't cross compile long double constants");
7013 break;
7014 #if PTR_SIZE != 8
7015 case VT_LLONG:
7016 *(long long *)ptr |= vtop->c.i;
7017 break;
7018 #else
7019 case VT_LLONG:
7020 #endif
7021 case VT_PTR:
7023 addr_t val = vtop->c.i;
7024 #if PTR_SIZE == 8
7025 if (vtop->r & VT_SYM)
7026 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7027 else
7028 *(addr_t *)ptr |= val;
7029 #else
7030 if (vtop->r & VT_SYM)
7031 greloc(sec, vtop->sym, c, R_DATA_PTR);
7032 *(addr_t *)ptr |= val;
7033 #endif
7034 break;
7036 default:
7038 int val = vtop->c.i;
7039 #if PTR_SIZE == 8
7040 if (vtop->r & VT_SYM)
7041 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7042 else
7043 *(int *)ptr |= val;
7044 #else
7045 if (vtop->r & VT_SYM)
7046 greloc(sec, vtop->sym, c, R_DATA_PTR);
7047 *(int *)ptr |= val;
7048 #endif
7049 break;
7053 vtop--;
7054 } else {
7055 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7056 vswap();
7057 vstore();
7058 vpop();
7062 /* 't' contains the type and storage info. 'c' is the offset of the
7063 object in section 'sec'. If 'sec' is NULL, it means stack based
7064 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7065 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7066 size only evaluation is wanted (only for arrays). */
7067 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7068 int flags)
7070 int len, n, no_oblock, nb, i;
7071 int size1, align1;
7072 Sym *s, *f;
7073 Sym indexsym;
7074 CType *t1;
7076 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7077 /* In case of strings we have special handling for arrays, so
7078 don't consume them as initializer value (which would commit them
7079 to some anonymous symbol). */
7080 tok != TOK_LSTR && tok != TOK_STR &&
7081 !(flags & DIF_SIZE_ONLY)) {
7082 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7083 flags |= DIF_HAVE_ELEM;
7086 if ((flags & DIF_HAVE_ELEM) &&
7087 !(type->t & VT_ARRAY) &&
7088 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7089 The source type might have VT_CONSTANT set, which is
7090 of course assignable to non-const elements. */
7091 is_compatible_unqualified_types(type, &vtop->type)) {
7092 init_putv(type, sec, c);
7093 } else if (type->t & VT_ARRAY) {
7094 s = type->ref;
7095 n = s->c;
7096 t1 = pointed_type(type);
7097 size1 = type_size(t1, &align1);
7099 no_oblock = 1;
7100 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7101 tok == '{') {
7102 if (tok != '{')
7103 tcc_error("character array initializer must be a literal,"
7104 " optionally enclosed in braces");
7105 skip('{');
7106 no_oblock = 0;
7109 /* only parse strings here if correct type (otherwise: handle
7110 them as ((w)char *) expressions */
7111 if ((tok == TOK_LSTR &&
7112 #ifdef TCC_TARGET_PE
7113 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7114 #else
7115 (t1->t & VT_BTYPE) == VT_INT
7116 #endif
7117 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7118 len = 0;
7119 while (tok == TOK_STR || tok == TOK_LSTR) {
7120 int cstr_len, ch;
7122 /* compute maximum number of chars wanted */
7123 if (tok == TOK_STR)
7124 cstr_len = tokc.str.size;
7125 else
7126 cstr_len = tokc.str.size / sizeof(nwchar_t);
7127 cstr_len--;
7128 nb = cstr_len;
7129 if (n >= 0 && nb > (n - len))
7130 nb = n - len;
7131 if (!(flags & DIF_SIZE_ONLY)) {
7132 if (cstr_len > nb)
7133 tcc_warning("initializer-string for array is too long");
7134 /* in order to go faster for common case (char
7135 string in global variable, we handle it
7136 specifically */
7137 if (sec && tok == TOK_STR && size1 == 1) {
7138 if (!NODATA_WANTED)
7139 memcpy(sec->data + c + len, tokc.str.data, nb);
7140 } else {
7141 for(i=0;i<nb;i++) {
7142 if (tok == TOK_STR)
7143 ch = ((unsigned char *)tokc.str.data)[i];
7144 else
7145 ch = ((nwchar_t *)tokc.str.data)[i];
7146 vpushi(ch);
7147 init_putv(t1, sec, c + (len + i) * size1);
7151 len += nb;
7152 next();
7154 /* only add trailing zero if enough storage (no
7155 warning in this case since it is standard) */
7156 if (n < 0 || len < n) {
7157 if (!(flags & DIF_SIZE_ONLY)) {
7158 vpushi(0);
7159 init_putv(t1, sec, c + (len * size1));
7161 len++;
7163 len *= size1;
7164 } else {
7165 indexsym.c = 0;
7166 f = &indexsym;
7168 do_init_list:
7169 len = 0;
7170 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7171 len = decl_designator(type, sec, c, &f, flags, len);
7172 flags &= ~DIF_HAVE_ELEM;
7173 if (type->t & VT_ARRAY) {
7174 ++indexsym.c;
7175 /* special test for multi dimensional arrays (may not
7176 be strictly correct if designators are used at the
7177 same time) */
7178 if (no_oblock && len >= n*size1)
7179 break;
7180 } else {
7181 if (s->type.t == VT_UNION)
7182 f = NULL;
7183 else
7184 f = f->next;
7185 if (no_oblock && f == NULL)
7186 break;
7189 if (tok == '}')
7190 break;
7191 skip(',');
7194 /* put zeros at the end */
7195 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7196 init_putz(sec, c + len, n*size1 - len);
7197 if (!no_oblock)
7198 skip('}');
7199 /* patch type size if needed, which happens only for array types */
7200 if (n < 0)
7201 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7202 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7203 size1 = 1;
7204 no_oblock = 1;
7205 if ((flags & DIF_FIRST) || tok == '{') {
7206 skip('{');
7207 no_oblock = 0;
7209 s = type->ref;
7210 f = s->next;
7211 n = s->c;
7212 goto do_init_list;
7213 } else if (tok == '{') {
7214 if (flags & DIF_HAVE_ELEM)
7215 skip(';');
7216 next();
7217 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7218 skip('}');
7219 } else if ((flags & DIF_SIZE_ONLY)) {
7220 /* If we supported only ISO C we wouldn't have to accept calling
7221 this on anything than an array if DIF_SIZE_ONLY (and even then
7222 only on the outermost level, so no recursion would be needed),
7223 because initializing a flex array member isn't supported.
7224 But GNU C supports it, so we need to recurse even into
7225 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7226 /* just skip expression */
7227 skip_or_save_block(NULL);
7228 } else {
7229 if (!(flags & DIF_HAVE_ELEM)) {
7230 /* This should happen only when we haven't parsed
7231 the init element above for fear of committing a
7232 string constant to memory too early. */
7233 if (tok != TOK_STR && tok != TOK_LSTR)
7234 expect("string constant");
7235 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7237 init_putv(type, sec, c);
7241 /* parse an initializer for type 't' if 'has_init' is non zero, and
7242 allocate space in local or global data space ('r' is either
7243 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7244 variable 'v' of scope 'scope' is declared before initializers
7245 are parsed. If 'v' is zero, then a reference to the new object
7246 is put in the value stack. If 'has_init' is 2, a special parsing
7247 is done to handle string constants. */
7248 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7249 int has_init, int v, int scope)
7251 int size, align, addr;
7252 TokenString *init_str = NULL;
7254 Section *sec;
7255 Sym *flexible_array;
7256 Sym *sym = NULL;
7257 int saved_nocode_wanted = nocode_wanted;
7258 #ifdef CONFIG_TCC_BCHECK
7259 int bcheck;
7260 #endif
7262 /* Always allocate static or global variables */
7263 if (v && (r & VT_VALMASK) == VT_CONST)
7264 nocode_wanted |= 0x80000000;
7266 #ifdef CONFIG_TCC_BCHECK
7267 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7268 #endif
7270 flexible_array = NULL;
7271 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7272 Sym *field = type->ref->next;
7273 if (field) {
7274 while (field->next)
7275 field = field->next;
7276 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7277 flexible_array = field;
7281 size = type_size(type, &align);
7282 /* If unknown size, we must evaluate it before
7283 evaluating initializers because
7284 initializers can generate global data too
7285 (e.g. string pointers or ISOC99 compound
7286 literals). It also simplifies local
7287 initializers handling */
7288 if (size < 0 || (flexible_array && has_init)) {
7289 if (!has_init)
7290 tcc_error("unknown type size");
7291 /* get all init string */
7292 if (has_init == 2) {
7293 init_str = tok_str_alloc();
7294 /* only get strings */
7295 while (tok == TOK_STR || tok == TOK_LSTR) {
7296 tok_str_add_tok(init_str);
7297 next();
7299 tok_str_add(init_str, -1);
7300 tok_str_add(init_str, 0);
7301 } else {
7302 skip_or_save_block(&init_str);
7304 unget_tok(0);
7306 /* compute size */
7307 begin_macro(init_str, 1);
7308 next();
7309 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7310 /* prepare second initializer parsing */
7311 macro_ptr = init_str->str;
7312 next();
7314 /* if still unknown size, error */
7315 size = type_size(type, &align);
7316 if (size < 0)
7317 tcc_error("unknown type size");
7319 /* If there's a flex member and it was used in the initializer
7320 adjust size. */
7321 if (flexible_array &&
7322 flexible_array->type.ref->c > 0)
7323 size += flexible_array->type.ref->c
7324 * pointed_size(&flexible_array->type);
7325 /* take into account specified alignment if bigger */
7326 if (ad->a.aligned) {
7327 int speca = 1 << (ad->a.aligned - 1);
7328 if (speca > align)
7329 align = speca;
7330 } else if (ad->a.packed) {
7331 align = 1;
7334 if (!v && NODATA_WANTED)
7335 size = 0, align = 1;
7337 if ((r & VT_VALMASK) == VT_LOCAL) {
7338 sec = NULL;
7339 #ifdef CONFIG_TCC_BCHECK
7340 if (bcheck && (type->t & VT_ARRAY)) {
7341 loc--;
7343 #endif
7344 loc = (loc - size) & -align;
7345 addr = loc;
7346 #ifdef CONFIG_TCC_BCHECK
7347 /* handles bounds */
7348 /* XXX: currently, since we do only one pass, we cannot track
7349 '&' operators, so we add only arrays */
7350 if (bcheck && (type->t & VT_ARRAY)) {
7351 addr_t *bounds_ptr;
7352 /* add padding between regions */
7353 loc--;
7354 /* then add local bound info */
7355 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7356 bounds_ptr[0] = addr;
7357 bounds_ptr[1] = size;
7359 #endif
7360 if (v) {
7361 /* local variable */
7362 #ifdef CONFIG_TCC_ASM
7363 if (ad->asm_label) {
7364 int reg = asm_parse_regvar(ad->asm_label);
7365 if (reg >= 0)
7366 r = (r & ~VT_VALMASK) | reg;
7368 #endif
7369 sym = sym_push(v, type, r, addr);
7370 if (ad->cleanup_func) {
7371 Sym *cls = sym_push2(&all_cleanups,
7372 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7373 cls->prev_tok = sym;
7374 cls->next = ad->cleanup_func;
7375 cls->ncl = cur_scope->cl.s;
7376 cur_scope->cl.s = cls;
7379 sym->a = ad->a;
7380 } else {
7381 /* push local reference */
7382 vset(type, r, addr);
7384 } else {
7385 if (v && scope == VT_CONST) {
7386 /* see if the symbol was already defined */
7387 sym = sym_find(v);
7388 if (sym) {
7389 patch_storage(sym, ad, type);
7390 /* we accept several definitions of the same global variable. */
7391 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7392 goto no_alloc;
7396 /* allocate symbol in corresponding section */
7397 sec = ad->section;
7398 if (!sec) {
7399 if (has_init)
7400 sec = data_section;
7401 else if (tcc_state->nocommon)
7402 sec = bss_section;
7405 if (sec) {
7406 addr = section_add(sec, size, align);
7407 #ifdef CONFIG_TCC_BCHECK
7408 /* add padding if bound check */
7409 if (bcheck)
7410 section_add(sec, 1, 1);
7411 #endif
7412 } else {
7413 addr = align; /* SHN_COMMON is special, symbol value is align */
7414 sec = common_section;
7417 if (v) {
7418 if (!sym) {
7419 sym = sym_push(v, type, r | VT_SYM, 0);
7420 patch_storage(sym, ad, NULL);
7422 /* update symbol definition */
7423 put_extern_sym(sym, sec, addr, size);
7424 } else {
7425 /* push global reference */
7426 vpush_ref(type, sec, addr, size);
7427 sym = vtop->sym;
7428 vtop->r |= r;
7431 #ifdef CONFIG_TCC_BCHECK
7432 /* handles bounds now because the symbol must be defined
7433 before for the relocation */
7434 if (bcheck) {
7435 addr_t *bounds_ptr;
7437 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7438 /* then add global bound info */
7439 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7440 bounds_ptr[0] = 0; /* relocated */
7441 bounds_ptr[1] = size;
7443 #endif
7446 if (type->t & VT_VLA) {
7447 int a;
7449 if (NODATA_WANTED)
7450 goto no_alloc;
7452 /* save current stack pointer */
7453 if (root_scope->vla.loc == 0) {
7454 struct scope *v = cur_scope;
7455 gen_vla_sp_save(loc -= PTR_SIZE);
7456 do v->vla.loc = loc; while ((v = v->prev));
7459 vla_runtime_type_size(type, &a);
7460 gen_vla_alloc(type, a);
7461 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7462 /* on _WIN64, because of the function args scratch area, the
7463 result of alloca differs from RSP and is returned in RAX. */
7464 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7465 #endif
7466 gen_vla_sp_save(addr);
7467 cur_scope->vla.loc = addr;
7468 cur_scope->vla.num++;
7470 } else if (has_init) {
7471 size_t oldreloc_offset = 0;
7472 if (sec && sec->reloc)
7473 oldreloc_offset = sec->reloc->data_offset;
7474 decl_initializer(type, sec, addr, DIF_FIRST);
7475 if (sec && sec->reloc)
7476 squeeze_multi_relocs(sec, oldreloc_offset);
7477 /* patch flexible array member size back to -1, */
7478 /* for possible subsequent similar declarations */
7479 if (flexible_array)
7480 flexible_array->type.ref->c = -1;
7483 no_alloc:
7484 /* restore parse state if needed */
7485 if (init_str) {
7486 end_macro();
7487 next();
7490 nocode_wanted = saved_nocode_wanted;
7493 /* parse a function defined by symbol 'sym' and generate its code in
7494 'cur_text_section' */
7495 static void gen_function(Sym *sym)
7497 /* Initialize VLA state */
7498 struct scope f = { 0 };
7499 cur_scope = root_scope = &f;
7501 nocode_wanted = 0;
7502 ind = cur_text_section->data_offset;
7503 if (sym->a.aligned) {
7504 size_t newoff = section_add(cur_text_section, 0,
7505 1 << (sym->a.aligned - 1));
7506 gen_fill_nops(newoff - ind);
7508 /* NOTE: we patch the symbol size later */
7509 put_extern_sym(sym, cur_text_section, ind, 0);
7511 funcname = get_tok_str(sym->v, NULL);
7512 func_ind = ind;
7514 /* put debug symbol */
7515 tcc_debug_funcstart(tcc_state, sym);
7516 /* push a dummy symbol to enable local sym storage */
7517 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7518 local_scope = 1; /* for function parameters */
7519 gfunc_prolog(&sym->type);
7520 local_scope = 0;
7521 rsym = 0;
7522 clear_temp_local_var_list();
7523 block(0);
7524 gsym(rsym);
7525 nocode_wanted = 0;
7526 gfunc_epilog();
7527 cur_text_section->data_offset = ind;
7528 /* reset local stack */
7529 sym_pop(&local_stack, NULL, 0);
7530 local_scope = 0;
7531 label_pop(&global_label_stack, NULL, 0);
7532 sym_pop(&all_cleanups, NULL, 0);
7533 /* patch symbol size */
7534 elfsym(sym)->st_size = ind - func_ind;
7535 /* end of function */
7536 tcc_debug_funcend(tcc_state, ind - func_ind);
7537 /* It's better to crash than to generate wrong code */
7538 cur_text_section = NULL;
7539 funcname = ""; /* for safety */
7540 func_vt.t = VT_VOID; /* for safety */
7541 func_var = 0; /* for safety */
7542 ind = 0; /* for safety */
7543 nocode_wanted = 0x80000000;
7544 check_vstack();
7547 static void gen_inline_functions(TCCState *s)
7549 Sym *sym;
7550 int inline_generated, i;
7551 struct InlineFunc *fn;
7553 tcc_open_bf(s, ":inline:", 0);
7554 /* iterate while inline function are referenced */
7555 do {
7556 inline_generated = 0;
7557 for (i = 0; i < s->nb_inline_fns; ++i) {
7558 fn = s->inline_fns[i];
7559 sym = fn->sym;
7560 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7561 /* the function was used or forced (and then not internal):
7562 generate its code and convert it to a normal function */
7563 fn->sym = NULL;
7564 if (file)
7565 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7566 begin_macro(fn->func_str, 1);
7567 next();
7568 cur_text_section = text_section;
7569 gen_function(sym);
7570 end_macro();
7572 inline_generated = 1;
7575 } while (inline_generated);
7576 tcc_close();
7579 ST_FUNC void free_inline_functions(TCCState *s)
7581 int i;
7582 /* free tokens of unused inline functions */
7583 for (i = 0; i < s->nb_inline_fns; ++i) {
7584 struct InlineFunc *fn = s->inline_fns[i];
7585 if (fn->sym)
7586 tok_str_free(fn->func_str);
7588 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7591 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7592 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7593 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7595 int v, has_init, r;
7596 CType type, btype;
7597 Sym *sym;
7598 AttributeDef ad, adbase;
7600 while (1) {
7601 if (tok == TOK_STATIC_ASSERT) {
7602 int c;
7604 next();
7605 skip('(');
7606 c = expr_const();
7607 skip(',');
7608 if (c == 0)
7609 tcc_error("%s", get_tok_str(tok, &tokc));
7610 next();
7611 skip(')');
7612 skip(';');
7613 continue;
7615 if (!parse_btype(&btype, &adbase)) {
7616 if (is_for_loop_init)
7617 return 0;
7618 /* skip redundant ';' if not in old parameter decl scope */
7619 if (tok == ';' && l != VT_CMP) {
7620 next();
7621 continue;
7623 if (l != VT_CONST)
7624 break;
7625 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7626 /* global asm block */
7627 asm_global_instr();
7628 continue;
7630 if (tok >= TOK_UIDENT) {
7631 /* special test for old K&R protos without explicit int
7632 type. Only accepted when defining global data */
7633 btype.t = VT_INT;
7634 } else {
7635 if (tok != TOK_EOF)
7636 expect("declaration");
7637 break;
7640 if (tok == ';') {
7641 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7642 int v = btype.ref->v;
7643 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7644 tcc_warning("unnamed struct/union that defines no instances");
7645 next();
7646 continue;
7648 if (IS_ENUM(btype.t)) {
7649 next();
7650 continue;
7653 while (1) { /* iterate thru each declaration */
7654 type = btype;
7655 /* If the base type itself was an array type of unspecified
7656 size (like in 'typedef int arr[]; arr x = {1};') then
7657 we will overwrite the unknown size by the real one for
7658 this decl. We need to unshare the ref symbol holding
7659 that size. */
7660 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7661 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7663 ad = adbase;
7664 type_decl(&type, &ad, &v, TYPE_DIRECT);
7665 #if 0
7667 char buf[500];
7668 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7669 printf("type = '%s'\n", buf);
7671 #endif
7672 if ((type.t & VT_BTYPE) == VT_FUNC) {
7673 /* if old style function prototype, we accept a
7674 declaration list */
7675 sym = type.ref;
7676 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7677 decl0(VT_CMP, 0, sym);
7678 /* always compile 'extern inline' */
7679 if (type.t & VT_EXTERN)
7680 type.t &= ~VT_INLINE;
7683 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7684 ad.asm_label = asm_label_instr();
7685 /* parse one last attribute list, after asm label */
7686 parse_attribute(&ad);
7687 #if 0
7688 /* gcc does not allow __asm__("label") with function definition,
7689 but why not ... */
7690 if (tok == '{')
7691 expect(";");
7692 #endif
7695 #ifdef TCC_TARGET_PE
7696 if (ad.a.dllimport || ad.a.dllexport) {
7697 if (type.t & VT_STATIC)
7698 tcc_error("cannot have dll linkage with static");
7699 if (type.t & VT_TYPEDEF) {
7700 tcc_warning("'%s' attribute ignored for typedef",
7701 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7702 (ad.a.dllexport = 0, "dllexport"));
7703 } else if (ad.a.dllimport) {
7704 if ((type.t & VT_BTYPE) == VT_FUNC)
7705 ad.a.dllimport = 0;
7706 else
7707 type.t |= VT_EXTERN;
7710 #endif
7711 if (tok == '{') {
7712 if (l != VT_CONST)
7713 tcc_error("cannot use local functions");
7714 if ((type.t & VT_BTYPE) != VT_FUNC)
7715 expect("function definition");
7717 /* reject abstract declarators in function definition
7718 make old style params without decl have int type */
7719 sym = type.ref;
7720 while ((sym = sym->next) != NULL) {
7721 if (!(sym->v & ~SYM_FIELD))
7722 expect("identifier");
7723 if (sym->type.t == VT_VOID)
7724 sym->type = int_type;
7727 /* put function symbol */
7728 type.t &= ~VT_EXTERN;
7729 sym = external_sym(v, &type, 0, &ad);
7730 /* static inline functions are just recorded as a kind
7731 of macro. Their code will be emitted at the end of
7732 the compilation unit only if they are used */
7733 if (sym->type.t & VT_INLINE) {
7734 struct InlineFunc *fn;
7735 const char *filename;
7737 filename = file ? file->filename : "";
7738 fn = tcc_malloc(sizeof *fn + strlen(filename));
7739 strcpy(fn->filename, filename);
7740 fn->sym = sym;
7741 skip_or_save_block(&fn->func_str);
7742 dynarray_add(&tcc_state->inline_fns,
7743 &tcc_state->nb_inline_fns, fn);
7744 } else {
7745 /* compute text section */
7746 cur_text_section = ad.section;
7747 if (!cur_text_section)
7748 cur_text_section = text_section;
7749 gen_function(sym);
7751 break;
7752 } else {
7753 if (l == VT_CMP) {
7754 /* find parameter in function parameter list */
7755 for (sym = func_sym->next; sym; sym = sym->next)
7756 if ((sym->v & ~SYM_FIELD) == v)
7757 goto found;
7758 tcc_error("declaration for parameter '%s' but no such parameter",
7759 get_tok_str(v, NULL));
7760 found:
7761 if (type.t & VT_STORAGE) /* 'register' is okay */
7762 tcc_error("storage class specified for '%s'",
7763 get_tok_str(v, NULL));
7764 if (sym->type.t != VT_VOID)
7765 tcc_error("redefinition of parameter '%s'",
7766 get_tok_str(v, NULL));
7767 convert_parameter_type(&type);
7768 sym->type = type;
7769 } else if (type.t & VT_TYPEDEF) {
7770 /* save typedefed type */
7771 /* XXX: test storage specifiers ? */
7772 sym = sym_find(v);
7773 if (sym && sym->sym_scope == local_scope) {
7774 if (!is_compatible_types(&sym->type, &type)
7775 || !(sym->type.t & VT_TYPEDEF))
7776 tcc_error("incompatible redefinition of '%s'",
7777 get_tok_str(v, NULL));
7778 sym->type = type;
7779 } else {
7780 sym = sym_push(v, &type, 0, 0);
7782 sym->a = ad.a;
7783 sym->f = ad.f;
7784 } else if ((type.t & VT_BTYPE) == VT_VOID
7785 && !(type.t & VT_EXTERN)) {
7786 tcc_error("declaration of void object");
7787 } else {
7788 r = 0;
7789 if ((type.t & VT_BTYPE) == VT_FUNC) {
7790 /* external function definition */
7791 /* specific case for func_call attribute */
7792 type.ref->f = ad.f;
7793 } else if (!(type.t & VT_ARRAY)) {
7794 /* not lvalue if array */
7795 r |= lvalue_type(type.t);
7797 has_init = (tok == '=');
7798 if (has_init && (type.t & VT_VLA))
7799 tcc_error("variable length array cannot be initialized");
7800 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7801 || (type.t & VT_BTYPE) == VT_FUNC
7802 /* as with GCC, uninitialized global arrays with no size
7803 are considered extern: */
7804 || ((type.t & VT_ARRAY) && !has_init
7805 && l == VT_CONST && type.ref->c < 0)
7807 /* external variable or function */
7808 type.t |= VT_EXTERN;
7809 sym = external_sym(v, &type, r, &ad);
7810 if (ad.alias_target) {
7811 ElfSym *esym;
7812 Sym *alias_target;
7813 alias_target = sym_find(ad.alias_target);
7814 esym = elfsym(alias_target);
7815 if (!esym)
7816 tcc_error("unsupported forward __alias__ attribute");
7817 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7819 } else {
7820 if (type.t & VT_STATIC)
7821 r |= VT_CONST;
7822 else
7823 r |= l;
7824 if (has_init)
7825 next();
7826 else if (l == VT_CONST)
7827 /* uninitialized global variables may be overridden */
7828 type.t |= VT_EXTERN;
7829 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7832 if (tok != ',') {
7833 if (is_for_loop_init)
7834 return 1;
7835 skip(';');
7836 break;
7838 next();
7842 return 0;
7845 static void decl(int l)
7847 decl0(l, 0, NULL);
7850 /* ------------------------------------------------------------------------- */
7851 #undef gjmp_addr
7852 #undef gjmp
7853 /* ------------------------------------------------------------------------- */