riscv: some long double support
[tinycc.git] / tccgen.c
blob5ea91f6e001c3c6227ce87fab196052545a5c111
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
880 else if (vtop->r == VT_CONST)
881 vset_VT_CMP(vtop->c.i != 0);
882 else
883 tcc_error("ICE");
885 p = inv ? &vtop->jfalse : &vtop->jtrue;
886 *p = gjmp_append(*p, t);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv, int t)
894 int op, u, x;
896 gvtst_set(inv, t);
898 t = vtop->jtrue, u = vtop->jfalse;
899 if (inv)
900 x = u, u = t, t = x;
901 op = vtop->cmp_op;
903 /* jump to the wanted target */
904 if (op > 1)
905 t = gjmp_cond(op ^ inv, t);
906 else if (op != inv)
907 t = gjmp(t);
908 /* resolve complementary jumps to here */
909 gsym(u);
911 vtop--;
912 return t;
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType *type, Sym *sym)
919 CValue cval;
920 cval.i = 0;
921 vsetc(type, VT_CONST | VT_SYM, &cval);
922 vtop->sym = sym;
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
928 int v;
929 Sym *sym;
931 v = anon_sym++;
932 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
933 sym->type.t |= VT_STATIC;
934 put_extern_sym(sym, sec, offset, size);
935 return sym;
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
941 vpushsym(type, get_sym_ref(type, sec, offset, size));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym *external_global_sym(int v, CType *type)
947 Sym *s;
949 s = sym_find(v);
950 if (!s) {
951 /* push forward reference */
952 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
953 s->type.ref = type->ref;
954 } else if (IS_ASM_SYM(s)) {
955 s->type.t = type->t | (s->type.t & VT_EXTERN);
956 s->type.ref = type->ref;
957 update_storage(s);
959 return s;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
965 if (sa1->aligned && !sa->aligned)
966 sa->aligned = sa1->aligned;
967 sa->packed |= sa1->packed;
968 sa->weak |= sa1->weak;
969 if (sa1->visibility != STV_DEFAULT) {
970 int vis = sa->visibility;
971 if (vis == STV_DEFAULT
972 || vis > sa1->visibility)
973 vis = sa1->visibility;
974 sa->visibility = vis;
976 sa->dllexport |= sa1->dllexport;
977 sa->nodecorate |= sa1->nodecorate;
978 sa->dllimport |= sa1->dllimport;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
984 if (fa1->func_call && !fa->func_call)
985 fa->func_call = fa1->func_call;
986 if (fa1->func_type && !fa->func_type)
987 fa->func_type = fa1->func_type;
988 if (fa1->func_args && !fa->func_args)
989 fa->func_args = fa1->func_args;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
995 merge_symattr(&ad->a, &ad1->a);
996 merge_funcattr(&ad->f, &ad1->f);
998 if (ad1->section)
999 ad->section = ad1->section;
1000 if (ad1->alias_target)
1001 ad->alias_target = ad1->alias_target;
1002 if (ad1->asm_label)
1003 ad->asm_label = ad1->asm_label;
1004 if (ad1->attr_mode)
1005 ad->attr_mode = ad1->attr_mode;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym *sym, CType *type)
1011 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1012 if (!(sym->type.t & VT_EXTERN))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1014 sym->type.t &= ~VT_EXTERN;
1017 if (IS_ASM_SYM(sym)) {
1018 /* stay static if both are static */
1019 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1020 sym->type.ref = type->ref;
1023 if (!is_compatible_types(&sym->type, type)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym->v, NULL));
1027 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1028 int static_proto = sym->type.t & VT_STATIC;
1029 /* warn if static follows non-static function declaration */
1030 if ((type->t & VT_STATIC) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type->t | sym->type.t) & VT_INLINE))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym->v, NULL));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type->t | sym->type.t) & VT_INLINE) {
1040 if (!((type->t ^ sym->type.t) & VT_INLINE)
1041 || ((type->t | sym->type.t) & VT_STATIC))
1042 static_proto |= VT_INLINE;
1045 if (0 == (type->t & VT_EXTERN)) {
1046 /* put complete type, use static from prototype */
1047 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1048 sym->type.ref = type->ref;
1049 } else {
1050 sym->type.t &= ~VT_INLINE | static_proto;
1053 if (sym->type.ref->f.func_type == FUNC_OLD
1054 && type->ref->f.func_type != FUNC_OLD) {
1055 sym->type.ref = type->ref;
1058 } else {
1059 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym->type.ref->c = type->ref->c;
1063 if ((type->t ^ sym->type.t) & VT_STATIC)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym->v, NULL));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1072 if (type)
1073 patch_type(sym, type);
1075 #ifdef TCC_TARGET_PE
1076 if (sym->a.dllimport != ad->a.dllimport)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym->v, NULL));
1079 #endif
1080 merge_symattr(&sym->a, &ad->a);
1081 if (ad->asm_label)
1082 sym->asm_label = ad->asm_label;
1083 update_storage(sym);
1086 /* copy sym to other stack */
1087 static Sym *sym_copy(Sym *s0, Sym **ps)
1089 Sym *s;
1090 s = sym_malloc(), *s = *s0;
1091 s->prev = *ps, *ps = s;
1092 if (s->v < SYM_FIRST_ANOM) {
1093 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1094 s->prev_tok = *ps, *ps = s;
1096 return s;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym *s0, Sym **ps)
1102 Sym *s, **sp = &s0->type.ref;
1103 for (s = *sp, *sp = NULL; s; s = s->next)
1104 sp = &(*sp = sym_copy(s, ps))->next;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1110 Sym *s; int bt;
1112 /* look for global symbol */
1113 s = sym_find(v);
1114 while (s && s->sym_scope)
1115 s = s->prev_tok;
1117 if (!s) {
1118 /* push forward reference */
1119 s = global_identifier_push(v, type->t, 0);
1120 s->r |= r;
1121 s->a = ad->a;
1122 s->asm_label = ad->asm_label;
1123 s->type.ref = type->ref;
1124 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1125 /* copy type to the global stack also */
1126 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1127 sym_copy_ref(s, &global_stack);
1128 } else {
1129 patch_storage(s, ad, type);
1130 bt = s->type.t & VT_BTYPE;
1132 /* push variables to local scope if any */
1133 if (local_stack && bt != VT_FUNC)
1134 s = sym_copy(s, &local_stack);
1135 return s;
1138 /* push a reference to global symbol v */
1139 ST_FUNC void vpush_global_sym(CType *type, int v)
1141 vpushsym(type, external_global_sym(v, type));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC void save_regs(int n)
1147 SValue *p, *p1;
1148 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1149 save_reg(p->r);
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC void save_reg(int r)
1155 save_reg_upstack(r, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC void save_reg_upstack(int r, int n)
1162 int l, saved, size, align;
1163 SValue *p, *p1, sv;
1164 CType *type;
1166 if ((r &= VT_VALMASK) >= VT_CONST)
1167 return;
1168 if (nocode_wanted)
1169 return;
1171 /* modify all stack values */
1172 saved = 0;
1173 l = 0;
1174 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1175 if ((p->r & VT_VALMASK) == r ||
1176 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1177 /* must save value on stack if not already done */
1178 if (!saved) {
1179 /* NOTE: must reload 'r' because r might be equal to r2 */
1180 r = p->r & VT_VALMASK;
1181 /* store register in the stack */
1182 type = &p->type;
1183 if ((p->r & VT_LVAL) ||
1184 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1185 #if PTR_SIZE == 8
1186 type = &char_pointer_type;
1187 #else
1188 type = &int_type;
1189 #endif
1190 size = type_size(type, &align);
1191 l=get_temp_local_var(size,align);
1192 sv.type.t = type->t;
1193 sv.r = VT_LOCAL | VT_LVAL;
1194 sv.c.i = l;
1195 store(r, &sv);
1196 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1197 /* x86 specific: need to pop fp register ST0 if saved */
1198 if (r == TREG_ST0) {
1199 o(0xd8dd); /* fstp %st(0) */
1201 #endif
1202 #if PTR_SIZE == 4
1203 /* special long long case */
1204 if ((type->t & VT_BTYPE) == VT_LLONG) {
1205 sv.c.i += 4;
1206 store(p->r2, &sv);
1208 #endif
1209 saved = 1;
1211 /* mark that stack entry as being saved on the stack */
1212 if (p->r & VT_LVAL) {
1213 /* also clear the bounded flag because the
1214 relocation address of the function was stored in
1215 p->c.i */
1216 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1217 } else {
1218 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1220 p->r2 = VT_CONST;
1221 p->c.i = l;
1226 #ifdef TCC_TARGET_ARM
1227 /* find a register of class 'rc2' with at most one reference on stack.
1228 * If none, call get_reg(rc) */
1229 ST_FUNC int get_reg_ex(int rc, int rc2)
1231 int r;
1232 SValue *p;
1234 for(r=0;r<NB_REGS;r++) {
1235 if (reg_classes[r] & rc2) {
1236 int n;
1237 n=0;
1238 for(p = vstack; p <= vtop; p++) {
1239 if ((p->r & VT_VALMASK) == r ||
1240 (p->r2 & VT_VALMASK) == r)
1241 n++;
1243 if (n <= 1)
1244 return r;
1247 return get_reg(rc);
1249 #endif
1251 /* find a free register of class 'rc'. If none, save one register */
1252 ST_FUNC int get_reg(int rc)
1254 int r;
1255 SValue *p;
1257 /* find a free register */
1258 for(r=0;r<NB_REGS;r++) {
1259 if (reg_classes[r] & rc) {
1260 if (nocode_wanted)
1261 return r;
1262 for(p=vstack;p<=vtop;p++) {
1263 if ((p->r & VT_VALMASK) == r ||
1264 (p->r2 & VT_VALMASK) == r)
1265 goto notfound;
1267 return r;
1269 notfound: ;
1272 /* no register left : free the first one on the stack (VERY
1273 IMPORTANT to start from the bottom to ensure that we don't
1274 spill registers used in gen_opi()) */
1275 for(p=vstack;p<=vtop;p++) {
1276 /* look at second register (if long long) */
1277 r = p->r2 & VT_VALMASK;
1278 if (r < VT_CONST && (reg_classes[r] & rc))
1279 goto save_found;
1280 r = p->r & VT_VALMASK;
1281 if (r < VT_CONST && (reg_classes[r] & rc)) {
1282 save_found:
1283 save_reg(r);
1284 return r;
1287 /* Should never comes here */
1288 return -1;
1291 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1292 static int get_temp_local_var(int size,int align){
1293 int i;
1294 struct temp_local_variable *temp_var;
1295 int found_var;
1296 SValue *p;
1297 int r;
1298 char free;
1299 char found;
1300 found=0;
1301 for(i=0;i<nb_temp_local_vars;i++){
1302 temp_var=&arr_temp_local_vars[i];
1303 if(temp_var->size<size||align!=temp_var->align){
1304 continue;
1306 /*check if temp_var is free*/
1307 free=1;
1308 for(p=vstack;p<=vtop;p++) {
1309 r=p->r&VT_VALMASK;
1310 if(r==VT_LOCAL||r==VT_LLOCAL){
1311 if(p->c.i==temp_var->location){
1312 free=0;
1313 break;
1317 if(free){
1318 found_var=temp_var->location;
1319 found=1;
1320 break;
1323 if(!found){
1324 loc = (loc - size) & -align;
1325 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1326 temp_var=&arr_temp_local_vars[i];
1327 temp_var->location=loc;
1328 temp_var->size=size;
1329 temp_var->align=align;
1330 nb_temp_local_vars++;
1332 found_var=loc;
1334 return found_var;
1337 static void clear_temp_local_var_list(){
1338 nb_temp_local_vars=0;
1341 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1342 if needed */
1343 static void move_reg(int r, int s, int t)
1345 SValue sv;
1347 if (r != s) {
1348 save_reg(r);
1349 sv.type.t = t;
1350 sv.type.ref = NULL;
1351 sv.r = s;
1352 sv.c.i = 0;
1353 load(r, &sv);
1357 /* get address of vtop (vtop MUST BE an lvalue) */
1358 ST_FUNC void gaddrof(void)
1360 vtop->r &= ~VT_LVAL;
1361 /* tricky: if saved lvalue, then we can go back to lvalue */
1362 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1363 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1368 #ifdef CONFIG_TCC_BCHECK
1369 /* generate lvalue bound code */
1370 static void gbound(void)
1372 int lval_type;
1373 CType type1;
1375 vtop->r &= ~VT_MUSTBOUND;
1376 /* if lvalue, then use checking code before dereferencing */
1377 if (vtop->r & VT_LVAL) {
1378 /* if not VT_BOUNDED value, then make one */
1379 if (!(vtop->r & VT_BOUNDED)) {
1380 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1381 /* must save type because we must set it to int to get pointer */
1382 type1 = vtop->type;
1383 vtop->type.t = VT_PTR;
1384 gaddrof();
1385 vpushi(0);
1386 gen_bounded_ptr_add();
1387 vtop->r |= lval_type;
1388 vtop->type = type1;
1390 /* then check for dereferencing */
1391 gen_bounded_ptr_deref();
1394 #endif
1396 static void incr_bf_adr(int o)
1398 vtop->type = char_pointer_type;
1399 gaddrof();
1400 vpushi(o);
1401 gen_op('+');
1402 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1403 | (VT_BYTE|VT_UNSIGNED);
1404 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1405 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1408 /* single-byte load mode for packed or otherwise unaligned bitfields */
1409 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1411 int n, o, bits;
1412 save_reg_upstack(vtop->r, 1);
1413 vpush64(type->t & VT_BTYPE, 0); // B X
1414 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1415 do {
1416 vswap(); // X B
1417 incr_bf_adr(o);
1418 vdup(); // X B B
1419 n = 8 - bit_pos;
1420 if (n > bit_size)
1421 n = bit_size;
1422 if (bit_pos)
1423 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1424 if (n < 8)
1425 vpushi((1 << n) - 1), gen_op('&');
1426 gen_cast(type);
1427 if (bits)
1428 vpushi(bits), gen_op(TOK_SHL);
1429 vrotb(3); // B Y X
1430 gen_op('|'); // B X
1431 bits += n, bit_size -= n, o = 1;
1432 } while (bit_size);
1433 vswap(), vpop();
1434 if (!(type->t & VT_UNSIGNED)) {
1435 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1436 vpushi(n), gen_op(TOK_SHL);
1437 vpushi(n), gen_op(TOK_SAR);
1441 /* single-byte store mode for packed or otherwise unaligned bitfields */
1442 static void store_packed_bf(int bit_pos, int bit_size)
1444 int bits, n, o, m, c;
1446 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1447 vswap(); // X B
1448 save_reg_upstack(vtop->r, 1);
1449 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1450 do {
1451 incr_bf_adr(o); // X B
1452 vswap(); //B X
1453 c ? vdup() : gv_dup(); // B V X
1454 vrott(3); // X B V
1455 if (bits)
1456 vpushi(bits), gen_op(TOK_SHR);
1457 if (bit_pos)
1458 vpushi(bit_pos), gen_op(TOK_SHL);
1459 n = 8 - bit_pos;
1460 if (n > bit_size)
1461 n = bit_size;
1462 if (n < 8) {
1463 m = ((1 << n) - 1) << bit_pos;
1464 vpushi(m), gen_op('&'); // X B V1
1465 vpushv(vtop-1); // X B V1 B
1466 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1467 gen_op('&'); // X B V1 B1
1468 gen_op('|'); // X B V2
1470 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1471 vstore(), vpop(); // X B
1472 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1473 } while (bit_size);
1474 vpop(), vpop();
1477 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1479 int t;
1480 if (0 == sv->type.ref)
1481 return 0;
1482 t = sv->type.ref->auxtype;
1483 if (t != -1 && t != VT_STRUCT) {
1484 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1485 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1487 return t;
1490 /* store vtop a register belonging to class 'rc'. lvalues are
1491 converted to values. Cannot be used if cannot be converted to
1492 register value (such as structures). */
1493 ST_FUNC int gv(int rc)
1495 int r, bit_pos, bit_size, size, align, rc2;
1497 /* NOTE: get_reg can modify vstack[] */
1498 if (vtop->type.t & VT_BITFIELD) {
1499 CType type;
1501 bit_pos = BIT_POS(vtop->type.t);
1502 bit_size = BIT_SIZE(vtop->type.t);
1503 /* remove bit field info to avoid loops */
1504 vtop->type.t &= ~VT_STRUCT_MASK;
1506 type.ref = NULL;
1507 type.t = vtop->type.t & VT_UNSIGNED;
1508 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1509 type.t |= VT_UNSIGNED;
1511 r = adjust_bf(vtop, bit_pos, bit_size);
1513 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1514 type.t |= VT_LLONG;
1515 else
1516 type.t |= VT_INT;
1518 if (r == VT_STRUCT) {
1519 load_packed_bf(&type, bit_pos, bit_size);
1520 } else {
1521 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1522 /* cast to int to propagate signedness in following ops */
1523 gen_cast(&type);
1524 /* generate shifts */
1525 vpushi(bits - (bit_pos + bit_size));
1526 gen_op(TOK_SHL);
1527 vpushi(bits - bit_size);
1528 /* NOTE: transformed to SHR if unsigned */
1529 gen_op(TOK_SAR);
1531 r = gv(rc);
1532 } else {
1533 if (is_float(vtop->type.t) &&
1534 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1535 unsigned long offset;
1536 /* CPUs usually cannot use float constants, so we store them
1537 generically in data segment */
1538 size = type_size(&vtop->type, &align);
1539 if (NODATA_WANTED)
1540 size = 0, align = 1;
1541 offset = section_add(data_section, size, align);
1542 vpush_ref(&vtop->type, data_section, offset, size);
1543 vswap();
1544 init_putv(&vtop->type, data_section, offset);
1545 vtop->r |= VT_LVAL;
1547 #ifdef CONFIG_TCC_BCHECK
1548 if (vtop->r & VT_MUSTBOUND)
1549 gbound();
1550 #endif
1551 #ifdef TCC_TARGET_RISCV64
1552 /* XXX mega hack */
1553 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && rc == RC_FLOAT)
1554 rc = RC_INT;
1555 #endif
1557 r = vtop->r & VT_VALMASK;
1558 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1559 #ifndef TCC_TARGET_ARM64
1560 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1561 if (rc == RC_IRET)
1562 rc2 = RC_LRET;
1563 #ifdef TCC_TARGET_X86_64
1564 else if (rc == RC_FRET)
1565 rc2 = RC_QRET;
1566 #endif
1567 #endif
1568 #endif
1569 /* need to reload if:
1570 - constant
1571 - lvalue (need to dereference pointer)
1572 - already a register, but not in the right class */
1573 if (r >= VT_CONST
1574 || (vtop->r & VT_LVAL)
1575 || !(reg_classes[r] & rc)
1576 #ifdef TCC_TARGET_RISCV64
1577 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1578 || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1579 #elif PTR_SIZE == 8
1580 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1581 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1582 #else
1583 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1584 #endif
1587 r = get_reg(rc);
1588 #ifdef TCC_TARGET_RISCV64
1589 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)) {
1590 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
1591 #elif PTR_SIZE == 8
1592 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1593 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1594 #else
1595 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1596 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1597 unsigned long long ll;
1598 #endif
1599 int r2, original_type;
1600 original_type = vtop->type.t;
1601 /* two register type load : expand to two words
1602 temporarily */
1603 #if PTR_SIZE == 4
1604 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1605 /* load constant */
1606 ll = vtop->c.i;
1607 vtop->c.i = ll; /* first word */
1608 load(r, vtop);
1609 vtop->r = r; /* save register value */
1610 vpushi(ll >> 32); /* second word */
1611 } else
1612 #endif
1613 if (vtop->r & VT_LVAL) {
1614 /* We do not want to modifier the long long
1615 pointer here, so the safest (and less
1616 efficient) is to save all the other registers
1617 in the stack. XXX: totally inefficient. */
1618 #if 0
1619 save_regs(1);
1620 #else
1621 /* lvalue_save: save only if used further down the stack */
1622 save_reg_upstack(vtop->r, 1);
1623 #endif
1624 /* load from memory */
1625 vtop->type.t = load_type;
1626 load(r, vtop);
1627 vdup();
1628 vtop[-1].r = r; /* save register value */
1629 /* increment pointer to get second word */
1630 vtop->type.t = addr_type;
1631 gaddrof();
1632 vpushi(load_size);
1633 gen_op('+');
1634 vtop->r |= VT_LVAL;
1635 vtop->type.t = load_type;
1636 } else {
1637 /* move registers */
1638 load(r, vtop);
1639 vdup();
1640 vtop[-1].r = r; /* save register value */
1641 vtop->r = vtop[-1].r2;
1643 /* Allocate second register. Here we rely on the fact that
1644 get_reg() tries first to free r2 of an SValue. */
1645 r2 = get_reg(rc2);
1646 load(r2, vtop);
1647 vpop();
1648 /* write second register */
1649 vtop->r2 = r2;
1650 vtop->type.t = original_type;
1651 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1652 int t1, t;
1653 /* lvalue of scalar type : need to use lvalue type
1654 because of possible cast */
1655 t = vtop->type.t;
1656 t1 = t;
1657 /* compute memory access type */
1658 if (vtop->r & VT_LVAL_BYTE)
1659 t = VT_BYTE;
1660 else if (vtop->r & VT_LVAL_SHORT)
1661 t = VT_SHORT;
1662 if (vtop->r & VT_LVAL_UNSIGNED)
1663 t |= VT_UNSIGNED;
1664 vtop->type.t = t;
1665 load(r, vtop);
1666 /* restore wanted type */
1667 vtop->type.t = t1;
1668 } else {
1669 if (vtop->r == VT_CMP)
1670 vset_VT_JMP();
1671 /* one register type load */
1672 load(r, vtop);
1675 vtop->r = r;
1676 #ifdef TCC_TARGET_C67
1677 /* uses register pairs for doubles */
1678 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1679 vtop->r2 = r+1;
1680 #endif
1682 return r;
1685 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1686 ST_FUNC void gv2(int rc1, int rc2)
1688 /* generate more generic register first. But VT_JMP or VT_CMP
1689 values must be generated first in all cases to avoid possible
1690 reload errors */
1691 if (vtop->r != VT_CMP && rc1 <= rc2) {
1692 vswap();
1693 gv(rc1);
1694 vswap();
1695 gv(rc2);
1696 /* test if reload is needed for first register */
1697 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1698 vswap();
1699 gv(rc1);
1700 vswap();
1702 } else {
1703 gv(rc2);
1704 vswap();
1705 gv(rc1);
1706 vswap();
1707 /* test if reload is needed for first register */
1708 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1709 gv(rc2);
1714 #ifndef TCC_TARGET_ARM64
1715 /* wrapper around RC_FRET to return a register by type */
1716 static int rc_fret(int t)
1718 #ifdef TCC_TARGET_X86_64
1719 if (t == VT_LDOUBLE) {
1720 return RC_ST0;
1722 #elif defined TCC_TARGET_RISCV64
1723 if (t == VT_LDOUBLE)
1724 return RC_IRET;
1725 #endif
1726 return RC_FRET;
1728 #endif
1730 /* wrapper around REG_FRET to return a register by type */
1731 static int reg_fret(int t)
1733 #ifdef TCC_TARGET_X86_64
1734 if (t == VT_LDOUBLE) {
1735 return TREG_ST0;
1737 #elif defined TCC_TARGET_RISCV64
1738 if (t == VT_LDOUBLE)
1739 return REG_IRET;
1740 #endif
1741 return REG_FRET;
1744 #if PTR_SIZE == 4
1745 /* expand 64bit on stack in two ints */
1746 ST_FUNC void lexpand(void)
1748 int u, v;
1749 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1750 v = vtop->r & (VT_VALMASK | VT_LVAL);
1751 if (v == VT_CONST) {
1752 vdup();
1753 vtop[0].c.i >>= 32;
1754 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1755 vdup();
1756 vtop[0].c.i += 4;
1757 } else {
1758 gv(RC_INT);
1759 vdup();
1760 vtop[0].r = vtop[-1].r2;
1761 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1763 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1765 #endif
1767 #if PTR_SIZE == 4
1768 /* build a long long from two ints */
1769 static void lbuild(int t)
1771 gv2(RC_INT, RC_INT);
1772 vtop[-1].r2 = vtop[0].r;
1773 vtop[-1].type.t = t;
1774 vpop();
1776 #endif
1778 /* convert stack entry to register and duplicate its value in another
1779 register */
1780 static void gv_dup(void)
1782 int rc, t, r, r1;
1783 SValue sv;
1785 t = vtop->type.t;
1786 #if PTR_SIZE == 4
1787 if ((t & VT_BTYPE) == VT_LLONG) {
1788 if (t & VT_BITFIELD) {
1789 gv(RC_INT);
1790 t = vtop->type.t;
1792 lexpand();
1793 gv_dup();
1794 vswap();
1795 vrotb(3);
1796 gv_dup();
1797 vrotb(4);
1798 /* stack: H L L1 H1 */
1799 lbuild(t);
1800 vrotb(3);
1801 vrotb(3);
1802 vswap();
1803 lbuild(t);
1804 vswap();
1805 } else
1806 #endif
1808 /* duplicate value */
1809 rc = RC_INT;
1810 sv.type.t = VT_INT;
1811 if (is_float(t)) {
1812 rc = RC_FLOAT;
1813 #ifdef TCC_TARGET_X86_64
1814 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1815 rc = RC_ST0;
1817 #elif defined TCC_TARGET_RISCV64
1818 if ((t & VT_BTYPE) == VT_LDOUBLE)
1819 rc = RC_INT;
1820 #endif
1821 sv.type.t = t;
1823 r = gv(rc);
1824 r1 = get_reg(rc);
1825 sv.r = r;
1826 sv.c.i = 0;
1827 load(r1, &sv); /* move r to r1 */
1828 vdup();
1829 /* duplicates value */
1830 if (r != r1)
1831 vtop->r = r1;
1835 #if PTR_SIZE == 4
1836 /* generate CPU independent (unsigned) long long operations */
1837 static void gen_opl(int op)
1839 int t, a, b, op1, c, i;
1840 int func;
1841 unsigned short reg_iret = REG_IRET;
1842 unsigned short reg_lret = REG_LRET;
1843 SValue tmp;
1845 switch(op) {
1846 case '/':
1847 case TOK_PDIV:
1848 func = TOK___divdi3;
1849 goto gen_func;
1850 case TOK_UDIV:
1851 func = TOK___udivdi3;
1852 goto gen_func;
1853 case '%':
1854 func = TOK___moddi3;
1855 goto gen_mod_func;
1856 case TOK_UMOD:
1857 func = TOK___umoddi3;
1858 gen_mod_func:
1859 #ifdef TCC_ARM_EABI
1860 reg_iret = TREG_R2;
1861 reg_lret = TREG_R3;
1862 #endif
1863 gen_func:
1864 /* call generic long long function */
1865 vpush_global_sym(&func_old_type, func);
1866 vrott(3);
1867 gfunc_call(2);
1868 vpushi(0);
1869 vtop->r = reg_iret;
1870 vtop->r2 = reg_lret;
1871 break;
1872 case '^':
1873 case '&':
1874 case '|':
1875 case '*':
1876 case '+':
1877 case '-':
1878 //pv("gen_opl A",0,2);
1879 t = vtop->type.t;
1880 vswap();
1881 lexpand();
1882 vrotb(3);
1883 lexpand();
1884 /* stack: L1 H1 L2 H2 */
1885 tmp = vtop[0];
1886 vtop[0] = vtop[-3];
1887 vtop[-3] = tmp;
1888 tmp = vtop[-2];
1889 vtop[-2] = vtop[-3];
1890 vtop[-3] = tmp;
1891 vswap();
1892 /* stack: H1 H2 L1 L2 */
1893 //pv("gen_opl B",0,4);
1894 if (op == '*') {
1895 vpushv(vtop - 1);
1896 vpushv(vtop - 1);
1897 gen_op(TOK_UMULL);
1898 lexpand();
1899 /* stack: H1 H2 L1 L2 ML MH */
1900 for(i=0;i<4;i++)
1901 vrotb(6);
1902 /* stack: ML MH H1 H2 L1 L2 */
1903 tmp = vtop[0];
1904 vtop[0] = vtop[-2];
1905 vtop[-2] = tmp;
1906 /* stack: ML MH H1 L2 H2 L1 */
1907 gen_op('*');
1908 vrotb(3);
1909 vrotb(3);
1910 gen_op('*');
1911 /* stack: ML MH M1 M2 */
1912 gen_op('+');
1913 gen_op('+');
1914 } else if (op == '+' || op == '-') {
1915 /* XXX: add non carry method too (for MIPS or alpha) */
1916 if (op == '+')
1917 op1 = TOK_ADDC1;
1918 else
1919 op1 = TOK_SUBC1;
1920 gen_op(op1);
1921 /* stack: H1 H2 (L1 op L2) */
1922 vrotb(3);
1923 vrotb(3);
1924 gen_op(op1 + 1); /* TOK_xxxC2 */
1925 } else {
1926 gen_op(op);
1927 /* stack: H1 H2 (L1 op L2) */
1928 vrotb(3);
1929 vrotb(3);
1930 /* stack: (L1 op L2) H1 H2 */
1931 gen_op(op);
1932 /* stack: (L1 op L2) (H1 op H2) */
1934 /* stack: L H */
1935 lbuild(t);
1936 break;
1937 case TOK_SAR:
1938 case TOK_SHR:
1939 case TOK_SHL:
1940 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1941 t = vtop[-1].type.t;
1942 vswap();
1943 lexpand();
1944 vrotb(3);
1945 /* stack: L H shift */
1946 c = (int)vtop->c.i;
1947 /* constant: simpler */
1948 /* NOTE: all comments are for SHL. the other cases are
1949 done by swapping words */
1950 vpop();
1951 if (op != TOK_SHL)
1952 vswap();
1953 if (c >= 32) {
1954 /* stack: L H */
1955 vpop();
1956 if (c > 32) {
1957 vpushi(c - 32);
1958 gen_op(op);
1960 if (op != TOK_SAR) {
1961 vpushi(0);
1962 } else {
1963 gv_dup();
1964 vpushi(31);
1965 gen_op(TOK_SAR);
1967 vswap();
1968 } else {
1969 vswap();
1970 gv_dup();
1971 /* stack: H L L */
1972 vpushi(c);
1973 gen_op(op);
1974 vswap();
1975 vpushi(32 - c);
1976 if (op == TOK_SHL)
1977 gen_op(TOK_SHR);
1978 else
1979 gen_op(TOK_SHL);
1980 vrotb(3);
1981 /* stack: L L H */
1982 vpushi(c);
1983 if (op == TOK_SHL)
1984 gen_op(TOK_SHL);
1985 else
1986 gen_op(TOK_SHR);
1987 gen_op('|');
1989 if (op != TOK_SHL)
1990 vswap();
1991 lbuild(t);
1992 } else {
1993 /* XXX: should provide a faster fallback on x86 ? */
1994 switch(op) {
1995 case TOK_SAR:
1996 func = TOK___ashrdi3;
1997 goto gen_func;
1998 case TOK_SHR:
1999 func = TOK___lshrdi3;
2000 goto gen_func;
2001 case TOK_SHL:
2002 func = TOK___ashldi3;
2003 goto gen_func;
2006 break;
2007 default:
2008 /* compare operations */
2009 t = vtop->type.t;
2010 vswap();
2011 lexpand();
2012 vrotb(3);
2013 lexpand();
2014 /* stack: L1 H1 L2 H2 */
2015 tmp = vtop[-1];
2016 vtop[-1] = vtop[-2];
2017 vtop[-2] = tmp;
2018 /* stack: L1 L2 H1 H2 */
2019 save_regs(4);
2020 /* compare high */
2021 op1 = op;
2022 /* when values are equal, we need to compare low words. since
2023 the jump is inverted, we invert the test too. */
2024 if (op1 == TOK_LT)
2025 op1 = TOK_LE;
2026 else if (op1 == TOK_GT)
2027 op1 = TOK_GE;
2028 else if (op1 == TOK_ULT)
2029 op1 = TOK_ULE;
2030 else if (op1 == TOK_UGT)
2031 op1 = TOK_UGE;
2032 a = 0;
2033 b = 0;
2034 gen_op(op1);
2035 if (op == TOK_NE) {
2036 b = gvtst(0, 0);
2037 } else {
2038 a = gvtst(1, 0);
2039 if (op != TOK_EQ) {
2040 /* generate non equal test */
2041 vpushi(0);
2042 vset_VT_CMP(TOK_NE);
2043 b = gvtst(0, 0);
2046 /* compare low. Always unsigned */
2047 op1 = op;
2048 if (op1 == TOK_LT)
2049 op1 = TOK_ULT;
2050 else if (op1 == TOK_LE)
2051 op1 = TOK_ULE;
2052 else if (op1 == TOK_GT)
2053 op1 = TOK_UGT;
2054 else if (op1 == TOK_GE)
2055 op1 = TOK_UGE;
2056 gen_op(op1);
2057 #if 0//def TCC_TARGET_I386
2058 if (op == TOK_NE) { gsym(b); break; }
2059 if (op == TOK_EQ) { gsym(a); break; }
2060 #endif
2061 gvtst_set(1, a);
2062 gvtst_set(0, b);
2063 break;
2066 #endif
2068 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2070 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2071 return (a ^ b) >> 63 ? -x : x;
2074 static int gen_opic_lt(uint64_t a, uint64_t b)
2076 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2079 /* handle integer constant optimizations and various machine
2080 independent opt */
2081 static void gen_opic(int op)
2083 SValue *v1 = vtop - 1;
2084 SValue *v2 = vtop;
2085 int t1 = v1->type.t & VT_BTYPE;
2086 int t2 = v2->type.t & VT_BTYPE;
2087 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2088 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2089 uint64_t l1 = c1 ? v1->c.i : 0;
2090 uint64_t l2 = c2 ? v2->c.i : 0;
2091 int shm = (t1 == VT_LLONG) ? 63 : 31;
2093 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2094 l1 = ((uint32_t)l1 |
2095 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2096 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2097 l2 = ((uint32_t)l2 |
2098 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2100 if (c1 && c2) {
2101 switch(op) {
2102 case '+': l1 += l2; break;
2103 case '-': l1 -= l2; break;
2104 case '&': l1 &= l2; break;
2105 case '^': l1 ^= l2; break;
2106 case '|': l1 |= l2; break;
2107 case '*': l1 *= l2; break;
2109 case TOK_PDIV:
2110 case '/':
2111 case '%':
2112 case TOK_UDIV:
2113 case TOK_UMOD:
2114 /* if division by zero, generate explicit division */
2115 if (l2 == 0) {
2116 if (const_wanted)
2117 tcc_error("division by zero in constant");
2118 goto general_case;
2120 switch(op) {
2121 default: l1 = gen_opic_sdiv(l1, l2); break;
2122 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2123 case TOK_UDIV: l1 = l1 / l2; break;
2124 case TOK_UMOD: l1 = l1 % l2; break;
2126 break;
2127 case TOK_SHL: l1 <<= (l2 & shm); break;
2128 case TOK_SHR: l1 >>= (l2 & shm); break;
2129 case TOK_SAR:
2130 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2131 break;
2132 /* tests */
2133 case TOK_ULT: l1 = l1 < l2; break;
2134 case TOK_UGE: l1 = l1 >= l2; break;
2135 case TOK_EQ: l1 = l1 == l2; break;
2136 case TOK_NE: l1 = l1 != l2; break;
2137 case TOK_ULE: l1 = l1 <= l2; break;
2138 case TOK_UGT: l1 = l1 > l2; break;
2139 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2140 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2141 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2142 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2143 /* logical */
2144 case TOK_LAND: l1 = l1 && l2; break;
2145 case TOK_LOR: l1 = l1 || l2; break;
2146 default:
2147 goto general_case;
2149 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2150 l1 = ((uint32_t)l1 |
2151 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2152 v1->c.i = l1;
2153 vtop--;
2154 } else {
2155 /* if commutative ops, put c2 as constant */
2156 if (c1 && (op == '+' || op == '&' || op == '^' ||
2157 op == '|' || op == '*')) {
2158 vswap();
2159 c2 = c1; //c = c1, c1 = c2, c2 = c;
2160 l2 = l1; //l = l1, l1 = l2, l2 = l;
2162 if (!const_wanted &&
2163 c1 && ((l1 == 0 &&
2164 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2165 (l1 == -1 && op == TOK_SAR))) {
2166 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2167 vtop--;
2168 } else if (!const_wanted &&
2169 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2170 (op == '|' &&
2171 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2172 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2173 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2174 if (l2 == 1)
2175 vtop->c.i = 0;
2176 vswap();
2177 vtop--;
2178 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2179 op == TOK_PDIV) &&
2180 l2 == 1) ||
2181 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2182 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2183 l2 == 0) ||
2184 (op == '&' &&
2185 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2186 /* filter out NOP operations like x*1, x-0, x&-1... */
2187 vtop--;
2188 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2189 /* try to use shifts instead of muls or divs */
2190 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2191 int n = -1;
2192 while (l2) {
2193 l2 >>= 1;
2194 n++;
2196 vtop->c.i = n;
2197 if (op == '*')
2198 op = TOK_SHL;
2199 else if (op == TOK_PDIV)
2200 op = TOK_SAR;
2201 else
2202 op = TOK_SHR;
2204 goto general_case;
2205 } else if (c2 && (op == '+' || op == '-') &&
2206 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2207 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2208 /* symbol + constant case */
2209 if (op == '-')
2210 l2 = -l2;
2211 l2 += vtop[-1].c.i;
2212 /* The backends can't always deal with addends to symbols
2213 larger than +-1<<31. Don't construct such. */
2214 if ((int)l2 != l2)
2215 goto general_case;
2216 vtop--;
2217 vtop->c.i = l2;
2218 } else {
2219 general_case:
2220 /* call low level op generator */
2221 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2222 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2223 gen_opl(op);
2224 else
2225 gen_opi(op);
2230 /* generate a floating point operation with constant propagation */
2231 static void gen_opif(int op)
2233 int c1, c2;
2234 SValue *v1, *v2;
2235 #if defined _MSC_VER && defined __x86_64__
2236 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2237 volatile
2238 #endif
2239 long double f1, f2;
2241 v1 = vtop - 1;
2242 v2 = vtop;
2243 /* currently, we cannot do computations with forward symbols */
2244 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2245 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2246 if (c1 && c2) {
2247 if (v1->type.t == VT_FLOAT) {
2248 f1 = v1->c.f;
2249 f2 = v2->c.f;
2250 } else if (v1->type.t == VT_DOUBLE) {
2251 f1 = v1->c.d;
2252 f2 = v2->c.d;
2253 } else {
2254 f1 = v1->c.ld;
2255 f2 = v2->c.ld;
2258 /* NOTE: we only do constant propagation if finite number (not
2259 NaN or infinity) (ANSI spec) */
2260 if (!ieee_finite(f1) || !ieee_finite(f2))
2261 goto general_case;
2263 switch(op) {
2264 case '+': f1 += f2; break;
2265 case '-': f1 -= f2; break;
2266 case '*': f1 *= f2; break;
2267 case '/':
2268 if (f2 == 0.0) {
2269 /* If not in initializer we need to potentially generate
2270 FP exceptions at runtime, otherwise we want to fold. */
2271 if (!const_wanted)
2272 goto general_case;
2274 f1 /= f2;
2275 break;
2276 /* XXX: also handles tests ? */
2277 default:
2278 goto general_case;
2280 /* XXX: overflow test ? */
2281 if (v1->type.t == VT_FLOAT) {
2282 v1->c.f = f1;
2283 } else if (v1->type.t == VT_DOUBLE) {
2284 v1->c.d = f1;
2285 } else {
2286 v1->c.ld = f1;
2288 vtop--;
2289 } else {
2290 general_case:
2291 gen_opf(op);
2295 static int pointed_size(CType *type)
2297 int align;
2298 return type_size(pointed_type(type), &align);
2301 static void vla_runtime_pointed_size(CType *type)
2303 int align;
2304 vla_runtime_type_size(pointed_type(type), &align);
2307 static inline int is_null_pointer(SValue *p)
2309 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2310 return 0;
2311 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2312 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2313 ((p->type.t & VT_BTYPE) == VT_PTR &&
2314 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2315 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2316 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2319 static inline int is_integer_btype(int bt)
2321 return (bt == VT_BYTE || bt == VT_SHORT ||
2322 bt == VT_INT || bt == VT_LLONG);
2325 /* check types for comparison or subtraction of pointers */
2326 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2328 CType *type1, *type2, tmp_type1, tmp_type2;
2329 int bt1, bt2;
2331 /* null pointers are accepted for all comparisons as gcc */
2332 if (is_null_pointer(p1) || is_null_pointer(p2))
2333 return;
2334 type1 = &p1->type;
2335 type2 = &p2->type;
2336 bt1 = type1->t & VT_BTYPE;
2337 bt2 = type2->t & VT_BTYPE;
2338 /* accept comparison between pointer and integer with a warning */
2339 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2340 if (op != TOK_LOR && op != TOK_LAND )
2341 tcc_warning("comparison between pointer and integer");
2342 return;
2345 /* both must be pointers or implicit function pointers */
2346 if (bt1 == VT_PTR) {
2347 type1 = pointed_type(type1);
2348 } else if (bt1 != VT_FUNC)
2349 goto invalid_operands;
2351 if (bt2 == VT_PTR) {
2352 type2 = pointed_type(type2);
2353 } else if (bt2 != VT_FUNC) {
2354 invalid_operands:
2355 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2357 if ((type1->t & VT_BTYPE) == VT_VOID ||
2358 (type2->t & VT_BTYPE) == VT_VOID)
2359 return;
2360 tmp_type1 = *type1;
2361 tmp_type2 = *type2;
2362 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2363 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2364 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2365 /* gcc-like error if '-' is used */
2366 if (op == '-')
2367 goto invalid_operands;
2368 else
2369 tcc_warning("comparison of distinct pointer types lacks a cast");
2373 /* generic gen_op: handles types problems */
2374 ST_FUNC void gen_op(int op)
2376 int u, t1, t2, bt1, bt2, t;
2377 CType type1;
2379 redo:
2380 t1 = vtop[-1].type.t;
2381 t2 = vtop[0].type.t;
2382 bt1 = t1 & VT_BTYPE;
2383 bt2 = t2 & VT_BTYPE;
2385 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2386 tcc_error("operation on a struct");
2387 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2388 if (bt2 == VT_FUNC) {
2389 mk_pointer(&vtop->type);
2390 gaddrof();
2392 if (bt1 == VT_FUNC) {
2393 vswap();
2394 mk_pointer(&vtop->type);
2395 gaddrof();
2396 vswap();
2398 goto redo;
2399 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2400 /* at least one operand is a pointer */
2401 /* relational op: must be both pointers */
2402 if (op >= TOK_ULT && op <= TOK_LOR) {
2403 check_comparison_pointer_types(vtop - 1, vtop, op);
2404 /* pointers are handled are unsigned */
2405 #if PTR_SIZE == 8
2406 t = VT_LLONG | VT_UNSIGNED;
2407 #else
2408 t = VT_INT | VT_UNSIGNED;
2409 #endif
2410 goto std_op;
2412 /* if both pointers, then it must be the '-' op */
2413 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2414 if (op != '-')
2415 tcc_error("cannot use pointers here");
2416 check_comparison_pointer_types(vtop - 1, vtop, op);
2417 /* XXX: check that types are compatible */
2418 if (vtop[-1].type.t & VT_VLA) {
2419 vla_runtime_pointed_size(&vtop[-1].type);
2420 } else {
2421 vpushi(pointed_size(&vtop[-1].type));
2423 vrott(3);
2424 gen_opic(op);
2425 vtop->type.t = ptrdiff_type.t;
2426 vswap();
2427 gen_op(TOK_PDIV);
2428 } else {
2429 /* exactly one pointer : must be '+' or '-'. */
2430 if (op != '-' && op != '+')
2431 tcc_error("cannot use pointers here");
2432 /* Put pointer as first operand */
2433 if (bt2 == VT_PTR) {
2434 vswap();
2435 t = t1, t1 = t2, t2 = t;
2437 #if PTR_SIZE == 4
2438 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2439 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2440 gen_cast_s(VT_INT);
2441 #endif
2442 type1 = vtop[-1].type;
2443 type1.t &= ~VT_ARRAY;
2444 if (vtop[-1].type.t & VT_VLA)
2445 vla_runtime_pointed_size(&vtop[-1].type);
2446 else {
2447 u = pointed_size(&vtop[-1].type);
2448 if (u < 0)
2449 tcc_error("unknown array element size");
2450 #if PTR_SIZE == 8
2451 vpushll(u);
2452 #else
2453 /* XXX: cast to int ? (long long case) */
2454 vpushi(u);
2455 #endif
2457 gen_op('*');
2458 #if 0
2459 /* #ifdef CONFIG_TCC_BCHECK
2460 The main reason to removing this code:
2461 #include <stdio.h>
2462 int main ()
2464 int v[10];
2465 int i = 10;
2466 int j = 9;
2467 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2468 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2470 When this code is on. then the output looks like
2471 v+i-j = 0xfffffffe
2472 v+(i-j) = 0xbff84000
2474 /* if evaluating constant expression, no code should be
2475 generated, so no bound check */
2476 if (tcc_state->do_bounds_check && !const_wanted) {
2477 /* if bounded pointers, we generate a special code to
2478 test bounds */
2479 if (op == '-') {
2480 vpushi(0);
2481 vswap();
2482 gen_op('-');
2484 gen_bounded_ptr_add();
2485 } else
2486 #endif
2488 gen_opic(op);
2490 /* put again type if gen_opic() swaped operands */
2491 vtop->type = type1;
2493 } else if (is_float(bt1) || is_float(bt2)) {
2494 /* compute bigger type and do implicit casts */
2495 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2496 t = VT_LDOUBLE;
2497 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2498 t = VT_DOUBLE;
2499 } else {
2500 t = VT_FLOAT;
2502 /* floats can only be used for a few operations */
2503 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2504 (op < TOK_ULT || op > TOK_GT))
2505 tcc_error("invalid operands for binary operation");
2506 goto std_op;
2507 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2508 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2509 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2510 t |= VT_UNSIGNED;
2511 t |= (VT_LONG & t1);
2512 goto std_op;
2513 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2514 /* cast to biggest op */
2515 t = VT_LLONG | VT_LONG;
2516 if (bt1 == VT_LLONG)
2517 t &= t1;
2518 if (bt2 == VT_LLONG)
2519 t &= t2;
2520 /* convert to unsigned if it does not fit in a long long */
2521 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2522 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2523 t |= VT_UNSIGNED;
2524 goto std_op;
2525 } else {
2526 /* integer operations */
2527 t = VT_INT | (VT_LONG & (t1 | t2));
2528 /* convert to unsigned if it does not fit in an integer */
2529 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2530 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2531 t |= VT_UNSIGNED;
2532 std_op:
2533 /* XXX: currently, some unsigned operations are explicit, so
2534 we modify them here */
2535 if (t & VT_UNSIGNED) {
2536 if (op == TOK_SAR)
2537 op = TOK_SHR;
2538 else if (op == '/')
2539 op = TOK_UDIV;
2540 else if (op == '%')
2541 op = TOK_UMOD;
2542 else if (op == TOK_LT)
2543 op = TOK_ULT;
2544 else if (op == TOK_GT)
2545 op = TOK_UGT;
2546 else if (op == TOK_LE)
2547 op = TOK_ULE;
2548 else if (op == TOK_GE)
2549 op = TOK_UGE;
2551 vswap();
2552 type1.t = t;
2553 type1.ref = NULL;
2554 gen_cast(&type1);
2555 vswap();
2556 /* special case for shifts and long long: we keep the shift as
2557 an integer */
2558 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2559 type1.t = VT_INT;
2560 gen_cast(&type1);
2561 if (is_float(t))
2562 gen_opif(op);
2563 else
2564 gen_opic(op);
2565 if (op >= TOK_ULT && op <= TOK_GT) {
2566 /* relational op: the result is an int */
2567 vtop->type.t = VT_INT;
2568 } else {
2569 vtop->type.t = t;
2572 // Make sure that we have converted to an rvalue:
2573 if (vtop->r & VT_LVAL)
2574 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2577 #ifndef TCC_TARGET_ARM
2578 /* generic itof for unsigned long long case */
2579 static void gen_cvt_itof1(int t)
2581 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2582 gen_cvt_itof(t);
2583 #else
2584 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2585 (VT_LLONG | VT_UNSIGNED)) {
2587 if (t == VT_FLOAT)
2588 vpush_global_sym(&func_old_type, TOK___floatundisf);
2589 #if LDOUBLE_SIZE != 8
2590 else if (t == VT_LDOUBLE)
2591 vpush_global_sym(&func_old_type, TOK___floatundixf);
2592 #endif
2593 else
2594 vpush_global_sym(&func_old_type, TOK___floatundidf);
2595 vrott(2);
2596 gfunc_call(1);
2597 vpushi(0);
2598 vtop->r = reg_fret(t);
2599 } else {
2600 gen_cvt_itof(t);
2602 #endif
2604 #endif
2606 /* generic ftoi for unsigned long long case */
2607 static void gen_cvt_ftoi1(int t)
2609 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2610 gen_cvt_ftoi(t);
2611 #else
2612 int st;
2614 if (t == (VT_LLONG | VT_UNSIGNED)) {
2615 /* not handled natively */
2616 st = vtop->type.t & VT_BTYPE;
2617 if (st == VT_FLOAT)
2618 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2619 #if LDOUBLE_SIZE != 8
2620 else if (st == VT_LDOUBLE)
2621 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2622 #endif
2623 else
2624 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2625 vrott(2);
2626 gfunc_call(1);
2627 vpushi(0);
2628 vtop->r = REG_IRET;
2629 vtop->r2 = REG_LRET;
2630 } else {
2631 gen_cvt_ftoi(t);
2633 #endif
2636 /* force char or short cast */
2637 static void force_charshort_cast(int t)
2639 int bits, dbt;
2641 /* cannot cast static initializers */
2642 if (STATIC_DATA_WANTED)
2643 return;
2645 dbt = t & VT_BTYPE;
2646 /* XXX: add optimization if lvalue : just change type and offset */
2647 if (dbt == VT_BYTE)
2648 bits = 8;
2649 else
2650 bits = 16;
2651 if (t & VT_UNSIGNED) {
2652 vpushi((1 << bits) - 1);
2653 gen_op('&');
2654 } else {
2655 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2656 bits = 64 - bits;
2657 else
2658 bits = 32 - bits;
2659 vpushi(bits);
2660 gen_op(TOK_SHL);
2661 /* result must be signed or the SAR is converted to an SHL
2662 This was not the case when "t" was a signed short
2663 and the last value on the stack was an unsigned int */
2664 vtop->type.t &= ~VT_UNSIGNED;
2665 vpushi(bits);
2666 gen_op(TOK_SAR);
2670 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2671 static void gen_cast_s(int t)
2673 CType type;
2674 type.t = t;
2675 type.ref = NULL;
2676 gen_cast(&type);
2679 static void gen_cast(CType *type)
2681 int sbt, dbt, sf, df, c, p;
2683 /* special delayed cast for char/short */
2684 /* XXX: in some cases (multiple cascaded casts), it may still
2685 be incorrect */
2686 if (vtop->r & VT_MUSTCAST) {
2687 vtop->r &= ~VT_MUSTCAST;
2688 force_charshort_cast(vtop->type.t);
2691 /* bitfields first get cast to ints */
2692 if (vtop->type.t & VT_BITFIELD) {
2693 gv(RC_INT);
2696 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2697 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2699 if (sbt != dbt) {
2700 sf = is_float(sbt);
2701 df = is_float(dbt);
2702 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2703 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2704 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2705 c &= dbt != VT_LDOUBLE;
2706 #endif
2707 if (c) {
2708 /* constant case: we can do it now */
2709 /* XXX: in ISOC, cannot do it if error in convert */
2710 if (sbt == VT_FLOAT)
2711 vtop->c.ld = vtop->c.f;
2712 else if (sbt == VT_DOUBLE)
2713 vtop->c.ld = vtop->c.d;
2715 if (df) {
2716 if ((sbt & VT_BTYPE) == VT_LLONG) {
2717 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2718 vtop->c.ld = vtop->c.i;
2719 else
2720 vtop->c.ld = -(long double)-vtop->c.i;
2721 } else if(!sf) {
2722 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2723 vtop->c.ld = (uint32_t)vtop->c.i;
2724 else
2725 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2728 if (dbt == VT_FLOAT)
2729 vtop->c.f = (float)vtop->c.ld;
2730 else if (dbt == VT_DOUBLE)
2731 vtop->c.d = (double)vtop->c.ld;
2732 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2733 vtop->c.i = vtop->c.ld;
2734 } else if (sf && dbt == VT_BOOL) {
2735 vtop->c.i = (vtop->c.ld != 0);
2736 } else {
2737 if(sf)
2738 vtop->c.i = vtop->c.ld;
2739 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2741 else if (sbt & VT_UNSIGNED)
2742 vtop->c.i = (uint32_t)vtop->c.i;
2743 #if PTR_SIZE == 8
2744 else if (sbt == VT_PTR)
2746 #endif
2747 else if (sbt != VT_LLONG)
2748 vtop->c.i = ((uint32_t)vtop->c.i |
2749 -(vtop->c.i & 0x80000000));
2751 if (dbt == (VT_LLONG|VT_UNSIGNED))
2753 else if (dbt == VT_BOOL)
2754 vtop->c.i = (vtop->c.i != 0);
2755 #if PTR_SIZE == 8
2756 else if (dbt == VT_PTR)
2758 #endif
2759 else if (dbt != VT_LLONG) {
2760 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2761 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2762 0xffffffff);
2763 vtop->c.i &= m;
2764 if (!(dbt & VT_UNSIGNED))
2765 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2768 } else if (p && dbt == VT_BOOL) {
2769 vtop->r = VT_CONST;
2770 vtop->c.i = 1;
2771 } else {
2772 /* non constant case: generate code */
2773 if (sf && df) {
2774 /* convert from fp to fp */
2775 gen_cvt_ftof(dbt);
2776 } else if (df) {
2777 /* convert int to fp */
2778 gen_cvt_itof1(dbt);
2779 } else if (sf) {
2780 /* convert fp to int */
2781 if (dbt == VT_BOOL) {
2782 vpushi(0);
2783 gen_op(TOK_NE);
2784 } else {
2785 /* we handle char/short/etc... with generic code */
2786 if (dbt != (VT_INT | VT_UNSIGNED) &&
2787 dbt != (VT_LLONG | VT_UNSIGNED) &&
2788 dbt != VT_LLONG)
2789 dbt = VT_INT;
2790 gen_cvt_ftoi1(dbt);
2791 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2792 /* additional cast for char/short... */
2793 vtop->type.t = dbt;
2794 gen_cast(type);
2797 #if PTR_SIZE == 4
2798 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2799 if ((sbt & VT_BTYPE) != VT_LLONG) {
2800 /* scalar to long long */
2801 /* machine independent conversion */
2802 gv(RC_INT);
2803 /* generate high word */
2804 if (sbt == (VT_INT | VT_UNSIGNED)) {
2805 vpushi(0);
2806 gv(RC_INT);
2807 } else {
2808 if (sbt == VT_PTR) {
2809 /* cast from pointer to int before we apply
2810 shift operation, which pointers don't support*/
2811 gen_cast_s(VT_INT);
2813 gv_dup();
2814 vpushi(31);
2815 gen_op(TOK_SAR);
2817 /* patch second register */
2818 vtop[-1].r2 = vtop->r;
2819 vpop();
2821 #else
2822 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2823 (dbt & VT_BTYPE) == VT_PTR ||
2824 (dbt & VT_BTYPE) == VT_FUNC) {
2825 if ((sbt & VT_BTYPE) != VT_LLONG &&
2826 (sbt & VT_BTYPE) != VT_PTR &&
2827 (sbt & VT_BTYPE) != VT_FUNC) {
2828 /* need to convert from 32bit to 64bit */
2829 gv(RC_INT);
2830 if (sbt != (VT_INT | VT_UNSIGNED)) {
2831 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2832 gen_cvt_sxtw();
2833 #elif defined(TCC_TARGET_X86_64)
2834 int r = gv(RC_INT);
2835 /* x86_64 specific: movslq */
2836 o(0x6348);
2837 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2838 #else
2839 #error
2840 #endif
2843 #endif
2844 } else if (dbt == VT_BOOL) {
2845 /* scalar to bool */
2846 vpushi(0);
2847 gen_op(TOK_NE);
2848 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2849 (dbt & VT_BTYPE) == VT_SHORT) {
2850 if (sbt == VT_PTR) {
2851 vtop->type.t = VT_INT;
2852 tcc_warning("nonportable conversion from pointer to char/short");
2854 force_charshort_cast(dbt);
2855 } else if ((dbt & VT_BTYPE) == VT_INT) {
2856 /* scalar to int */
2857 if ((sbt & VT_BTYPE) == VT_LLONG) {
2858 #if PTR_SIZE == 4
2859 /* from long long: just take low order word */
2860 lexpand();
2861 vpop();
2862 #else
2863 /* XXX some architectures (e.g. risc-v) would like it
2864 better for this merely being a 32-to-64 sign or zero-
2865 extension. */
2866 vpushi(0xffffffff);
2867 vtop->type.t |= VT_UNSIGNED;
2868 gen_op('&');
2869 #endif
2871 /* if lvalue and single word type, nothing to do because
2872 the lvalue already contains the real type size (see
2873 VT_LVAL_xxx constants) */
2876 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2877 /* if we are casting between pointer types,
2878 we must update the VT_LVAL_xxx size */
2879 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2880 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2882 vtop->type = *type;
2883 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2886 /* return type size as known at compile time. Put alignment at 'a' */
2887 ST_FUNC int type_size(CType *type, int *a)
2889 Sym *s;
2890 int bt;
2892 bt = type->t & VT_BTYPE;
2893 if (bt == VT_STRUCT) {
2894 /* struct/union */
2895 s = type->ref;
2896 *a = s->r;
2897 return s->c;
2898 } else if (bt == VT_PTR) {
2899 if (type->t & VT_ARRAY) {
2900 int ts;
2902 s = type->ref;
2903 ts = type_size(&s->type, a);
2905 if (ts < 0 && s->c < 0)
2906 ts = -ts;
2908 return ts * s->c;
2909 } else {
2910 *a = PTR_SIZE;
2911 return PTR_SIZE;
2913 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2914 return -1; /* incomplete enum */
2915 } else if (bt == VT_LDOUBLE) {
2916 *a = LDOUBLE_ALIGN;
2917 return LDOUBLE_SIZE;
2918 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2919 #ifdef TCC_TARGET_I386
2920 #ifdef TCC_TARGET_PE
2921 *a = 8;
2922 #else
2923 *a = 4;
2924 #endif
2925 #elif defined(TCC_TARGET_ARM)
2926 #ifdef TCC_ARM_EABI
2927 *a = 8;
2928 #else
2929 *a = 4;
2930 #endif
2931 #else
2932 *a = 8;
2933 #endif
2934 return 8;
2935 } else if (bt == VT_INT || bt == VT_FLOAT) {
2936 *a = 4;
2937 return 4;
2938 } else if (bt == VT_SHORT) {
2939 *a = 2;
2940 return 2;
2941 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2942 *a = 8;
2943 return 16;
2944 } else {
2945 /* char, void, function, _Bool */
2946 *a = 1;
2947 return 1;
2951 /* push type size as known at runtime time on top of value stack. Put
2952 alignment at 'a' */
2953 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2955 if (type->t & VT_VLA) {
2956 type_size(&type->ref->type, a);
2957 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2958 } else {
2959 vpushi(type_size(type, a));
2963 /* return the pointed type of t */
2964 static inline CType *pointed_type(CType *type)
2966 return &type->ref->type;
2969 /* modify type so that its it is a pointer to type. */
2970 ST_FUNC void mk_pointer(CType *type)
2972 Sym *s;
2973 s = sym_push(SYM_FIELD, type, 0, -1);
2974 type->t = VT_PTR | (type->t & VT_STORAGE);
2975 type->ref = s;
2978 /* compare function types. OLD functions match any new functions */
2979 static int is_compatible_func(CType *type1, CType *type2)
2981 Sym *s1, *s2;
2983 s1 = type1->ref;
2984 s2 = type2->ref;
2985 if (s1->f.func_call != s2->f.func_call)
2986 return 0;
2987 if (s1->f.func_type != s2->f.func_type
2988 && s1->f.func_type != FUNC_OLD
2989 && s2->f.func_type != FUNC_OLD)
2990 return 0;
2991 /* we should check the function return type for FUNC_OLD too
2992 but that causes problems with the internally used support
2993 functions such as TOK_memmove */
2994 if (s1->f.func_type == FUNC_OLD && !s1->next)
2995 return 1;
2996 if (s2->f.func_type == FUNC_OLD && !s2->next)
2997 return 1;
2998 for (;;) {
2999 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3000 return 0;
3001 s1 = s1->next;
3002 s2 = s2->next;
3003 if (!s1)
3004 return !s2;
3005 if (!s2)
3006 return 0;
3010 /* return true if type1 and type2 are the same. If unqualified is
3011 true, qualifiers on the types are ignored.
3013 static int compare_types(CType *type1, CType *type2, int unqualified)
3015 int bt1, t1, t2;
3017 t1 = type1->t & VT_TYPE;
3018 t2 = type2->t & VT_TYPE;
3019 if (unqualified) {
3020 /* strip qualifiers before comparing */
3021 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3022 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3025 /* Default Vs explicit signedness only matters for char */
3026 if ((t1 & VT_BTYPE) != VT_BYTE) {
3027 t1 &= ~VT_DEFSIGN;
3028 t2 &= ~VT_DEFSIGN;
3030 /* XXX: bitfields ? */
3031 if (t1 != t2)
3032 return 0;
3034 if ((t1 & VT_ARRAY)
3035 && !(type1->ref->c < 0
3036 || type2->ref->c < 0
3037 || type1->ref->c == type2->ref->c))
3038 return 0;
3040 /* test more complicated cases */
3041 bt1 = t1 & VT_BTYPE;
3042 if (bt1 == VT_PTR) {
3043 type1 = pointed_type(type1);
3044 type2 = pointed_type(type2);
3045 return is_compatible_types(type1, type2);
3046 } else if (bt1 == VT_STRUCT) {
3047 return (type1->ref == type2->ref);
3048 } else if (bt1 == VT_FUNC) {
3049 return is_compatible_func(type1, type2);
3050 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3051 return type1->ref == type2->ref;
3052 } else {
3053 return 1;
3057 /* return true if type1 and type2 are exactly the same (including
3058 qualifiers).
3060 static int is_compatible_types(CType *type1, CType *type2)
3062 return compare_types(type1,type2,0);
3065 /* return true if type1 and type2 are the same (ignoring qualifiers).
3067 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3069 return compare_types(type1,type2,1);
3072 /* print a type. If 'varstr' is not NULL, then the variable is also
3073 printed in the type */
3074 /* XXX: union */
3075 /* XXX: add array and function pointers */
3076 static void type_to_str(char *buf, int buf_size,
3077 CType *type, const char *varstr)
3079 int bt, v, t;
3080 Sym *s, *sa;
3081 char buf1[256];
3082 const char *tstr;
3084 t = type->t;
3085 bt = t & VT_BTYPE;
3086 buf[0] = '\0';
3088 if (t & VT_EXTERN)
3089 pstrcat(buf, buf_size, "extern ");
3090 if (t & VT_STATIC)
3091 pstrcat(buf, buf_size, "static ");
3092 if (t & VT_TYPEDEF)
3093 pstrcat(buf, buf_size, "typedef ");
3094 if (t & VT_INLINE)
3095 pstrcat(buf, buf_size, "inline ");
3096 if (t & VT_VOLATILE)
3097 pstrcat(buf, buf_size, "volatile ");
3098 if (t & VT_CONSTANT)
3099 pstrcat(buf, buf_size, "const ");
3101 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3102 || ((t & VT_UNSIGNED)
3103 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3104 && !IS_ENUM(t)
3106 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3108 buf_size -= strlen(buf);
3109 buf += strlen(buf);
3111 switch(bt) {
3112 case VT_VOID:
3113 tstr = "void";
3114 goto add_tstr;
3115 case VT_BOOL:
3116 tstr = "_Bool";
3117 goto add_tstr;
3118 case VT_BYTE:
3119 tstr = "char";
3120 goto add_tstr;
3121 case VT_SHORT:
3122 tstr = "short";
3123 goto add_tstr;
3124 case VT_INT:
3125 tstr = "int";
3126 goto maybe_long;
3127 case VT_LLONG:
3128 tstr = "long long";
3129 maybe_long:
3130 if (t & VT_LONG)
3131 tstr = "long";
3132 if (!IS_ENUM(t))
3133 goto add_tstr;
3134 tstr = "enum ";
3135 goto tstruct;
3136 case VT_FLOAT:
3137 tstr = "float";
3138 goto add_tstr;
3139 case VT_DOUBLE:
3140 tstr = "double";
3141 goto add_tstr;
3142 case VT_LDOUBLE:
3143 tstr = "long double";
3144 add_tstr:
3145 pstrcat(buf, buf_size, tstr);
3146 break;
3147 case VT_STRUCT:
3148 tstr = "struct ";
3149 if (IS_UNION(t))
3150 tstr = "union ";
3151 tstruct:
3152 pstrcat(buf, buf_size, tstr);
3153 v = type->ref->v & ~SYM_STRUCT;
3154 if (v >= SYM_FIRST_ANOM)
3155 pstrcat(buf, buf_size, "<anonymous>");
3156 else
3157 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3158 break;
3159 case VT_FUNC:
3160 s = type->ref;
3161 buf1[0]=0;
3162 if (varstr && '*' == *varstr) {
3163 pstrcat(buf1, sizeof(buf1), "(");
3164 pstrcat(buf1, sizeof(buf1), varstr);
3165 pstrcat(buf1, sizeof(buf1), ")");
3167 pstrcat(buf1, buf_size, "(");
3168 sa = s->next;
3169 while (sa != NULL) {
3170 char buf2[256];
3171 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3172 pstrcat(buf1, sizeof(buf1), buf2);
3173 sa = sa->next;
3174 if (sa)
3175 pstrcat(buf1, sizeof(buf1), ", ");
3177 if (s->f.func_type == FUNC_ELLIPSIS)
3178 pstrcat(buf1, sizeof(buf1), ", ...");
3179 pstrcat(buf1, sizeof(buf1), ")");
3180 type_to_str(buf, buf_size, &s->type, buf1);
3181 goto no_var;
3182 case VT_PTR:
3183 s = type->ref;
3184 if (t & VT_ARRAY) {
3185 if (varstr && '*' == *varstr)
3186 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3187 else
3188 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3189 type_to_str(buf, buf_size, &s->type, buf1);
3190 goto no_var;
3192 pstrcpy(buf1, sizeof(buf1), "*");
3193 if (t & VT_CONSTANT)
3194 pstrcat(buf1, buf_size, "const ");
3195 if (t & VT_VOLATILE)
3196 pstrcat(buf1, buf_size, "volatile ");
3197 if (varstr)
3198 pstrcat(buf1, sizeof(buf1), varstr);
3199 type_to_str(buf, buf_size, &s->type, buf1);
3200 goto no_var;
3202 if (varstr) {
3203 pstrcat(buf, buf_size, " ");
3204 pstrcat(buf, buf_size, varstr);
3206 no_var: ;
3209 /* verify type compatibility to store vtop in 'dt' type, and generate
3210 casts if needed. */
3211 static void gen_assign_cast(CType *dt)
3213 CType *st, *type1, *type2;
3214 char buf1[256], buf2[256];
3215 int dbt, sbt, qualwarn, lvl;
3217 st = &vtop->type; /* source type */
3218 dbt = dt->t & VT_BTYPE;
3219 sbt = st->t & VT_BTYPE;
3220 if (sbt == VT_VOID || dbt == VT_VOID) {
3221 if (sbt == VT_VOID && dbt == VT_VOID)
3222 ; /* It is Ok if both are void */
3223 else
3224 tcc_error("cannot cast from/to void");
3226 if (dt->t & VT_CONSTANT)
3227 tcc_warning("assignment of read-only location");
3228 switch(dbt) {
3229 case VT_PTR:
3230 /* special cases for pointers */
3231 /* '0' can also be a pointer */
3232 if (is_null_pointer(vtop))
3233 break;
3234 /* accept implicit pointer to integer cast with warning */
3235 if (is_integer_btype(sbt)) {
3236 tcc_warning("assignment makes pointer from integer without a cast");
3237 break;
3239 type1 = pointed_type(dt);
3240 if (sbt == VT_PTR)
3241 type2 = pointed_type(st);
3242 else if (sbt == VT_FUNC)
3243 type2 = st; /* a function is implicitly a function pointer */
3244 else
3245 goto error;
3246 if (is_compatible_types(type1, type2))
3247 break;
3248 for (qualwarn = lvl = 0;; ++lvl) {
3249 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3250 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3251 qualwarn = 1;
3252 dbt = type1->t & (VT_BTYPE|VT_LONG);
3253 sbt = type2->t & (VT_BTYPE|VT_LONG);
3254 if (dbt != VT_PTR || sbt != VT_PTR)
3255 break;
3256 type1 = pointed_type(type1);
3257 type2 = pointed_type(type2);
3259 if (!is_compatible_unqualified_types(type1, type2)) {
3260 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3261 /* void * can match anything */
3262 } else if (dbt == sbt
3263 && is_integer_btype(sbt & VT_BTYPE)
3264 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3265 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3266 /* Like GCC don't warn by default for merely changes
3267 in pointer target signedness. Do warn for different
3268 base types, though, in particular for unsigned enums
3269 and signed int targets. */
3270 } else {
3271 tcc_warning("assignment from incompatible pointer type");
3272 break;
3275 if (qualwarn)
3276 tcc_warning("assignment discards qualifiers from pointer target type");
3277 break;
3278 case VT_BYTE:
3279 case VT_SHORT:
3280 case VT_INT:
3281 case VT_LLONG:
3282 if (sbt == VT_PTR || sbt == VT_FUNC) {
3283 tcc_warning("assignment makes integer from pointer without a cast");
3284 } else if (sbt == VT_STRUCT) {
3285 goto case_VT_STRUCT;
3287 /* XXX: more tests */
3288 break;
3289 case VT_STRUCT:
3290 case_VT_STRUCT:
3291 if (!is_compatible_unqualified_types(dt, st)) {
3292 error:
3293 type_to_str(buf1, sizeof(buf1), st, NULL);
3294 type_to_str(buf2, sizeof(buf2), dt, NULL);
3295 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3297 break;
3299 gen_cast(dt);
3302 /* store vtop in lvalue pushed on stack */
3303 ST_FUNC void vstore(void)
3305 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3307 ft = vtop[-1].type.t;
3308 sbt = vtop->type.t & VT_BTYPE;
3309 dbt = ft & VT_BTYPE;
3310 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3311 (sbt == VT_INT && dbt == VT_SHORT))
3312 && !(vtop->type.t & VT_BITFIELD)) {
3313 /* optimize char/short casts */
3314 delayed_cast = VT_MUSTCAST;
3315 vtop->type.t = ft & VT_TYPE;
3316 /* XXX: factorize */
3317 if (ft & VT_CONSTANT)
3318 tcc_warning("assignment of read-only location");
3319 } else {
3320 delayed_cast = 0;
3321 if (!(ft & VT_BITFIELD))
3322 gen_assign_cast(&vtop[-1].type);
3325 if (sbt == VT_STRUCT) {
3326 /* if structure, only generate pointer */
3327 /* structure assignment : generate memcpy */
3328 /* XXX: optimize if small size */
3329 size = type_size(&vtop->type, &align);
3331 /* destination */
3332 vswap();
3333 vtop->type.t = VT_PTR;
3334 gaddrof();
3336 /* address of memcpy() */
3337 #ifdef TCC_ARM_EABI
3338 if(!(align & 7))
3339 vpush_global_sym(&func_old_type, TOK_memcpy8);
3340 else if(!(align & 3))
3341 vpush_global_sym(&func_old_type, TOK_memcpy4);
3342 else
3343 #endif
3344 /* Use memmove, rather than memcpy, as dest and src may be same: */
3345 vpush_global_sym(&func_old_type, TOK_memmove);
3347 vswap();
3348 /* source */
3349 vpushv(vtop - 2);
3350 vtop->type.t = VT_PTR;
3351 gaddrof();
3352 /* type size */
3353 vpushi(size);
3354 gfunc_call(3);
3356 /* leave source on stack */
3357 } else if (ft & VT_BITFIELD) {
3358 /* bitfield store handling */
3360 /* save lvalue as expression result (example: s.b = s.a = n;) */
3361 vdup(), vtop[-1] = vtop[-2];
3363 bit_pos = BIT_POS(ft);
3364 bit_size = BIT_SIZE(ft);
3365 /* remove bit field info to avoid loops */
3366 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3368 if ((ft & VT_BTYPE) == VT_BOOL) {
3369 gen_cast(&vtop[-1].type);
3370 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3373 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3374 if (r == VT_STRUCT) {
3375 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3376 store_packed_bf(bit_pos, bit_size);
3377 } else {
3378 unsigned long long mask = (1ULL << bit_size) - 1;
3379 if ((ft & VT_BTYPE) != VT_BOOL) {
3380 /* mask source */
3381 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3382 vpushll(mask);
3383 else
3384 vpushi((unsigned)mask);
3385 gen_op('&');
3387 /* shift source */
3388 vpushi(bit_pos);
3389 gen_op(TOK_SHL);
3390 vswap();
3391 /* duplicate destination */
3392 vdup();
3393 vrott(3);
3394 /* load destination, mask and or with source */
3395 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3396 vpushll(~(mask << bit_pos));
3397 else
3398 vpushi(~((unsigned)mask << bit_pos));
3399 gen_op('&');
3400 gen_op('|');
3401 /* store result */
3402 vstore();
3403 /* ... and discard */
3404 vpop();
3406 } else if (dbt == VT_VOID) {
3407 --vtop;
3408 } else {
3409 #ifdef CONFIG_TCC_BCHECK
3410 /* bound check case */
3411 if (vtop[-1].r & VT_MUSTBOUND) {
3412 vswap();
3413 gbound();
3414 vswap();
3416 #endif
3417 rc = RC_INT;
3418 if (is_float(ft)) {
3419 rc = RC_FLOAT;
3420 #ifdef TCC_TARGET_X86_64
3421 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3422 rc = RC_ST0;
3423 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3424 rc = RC_FRET;
3426 #elif defined TCC_TARGET_RISCV64
3427 if (dbt == VT_LDOUBLE)
3428 rc = RC_INT;
3429 #endif
3431 r = gv(rc); /* generate value */
3432 /* if lvalue was saved on stack, must read it */
3433 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3434 SValue sv;
3435 t = get_reg(RC_INT);
3436 #if PTR_SIZE == 8
3437 sv.type.t = VT_PTR;
3438 #else
3439 sv.type.t = VT_INT;
3440 #endif
3441 sv.r = VT_LOCAL | VT_LVAL;
3442 sv.c.i = vtop[-1].c.i;
3443 load(t, &sv);
3444 vtop[-1].r = t | VT_LVAL;
3446 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3447 #ifdef TCC_TARGET_RISCV64
3448 if (dbt == VT_QLONG || dbt == VT_LDOUBLE) {
3449 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
3450 #elif PTR_SIZE == 8
3451 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3452 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3453 #else
3454 if ((ft & VT_BTYPE) == VT_LLONG) {
3455 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3456 #endif
3457 vtop[-1].type.t = load_type;
3458 store(r, vtop - 1);
3459 vswap();
3460 /* convert to int to increment easily */
3461 vtop->type.t = addr_type;
3462 gaddrof();
3463 vpushi(load_size);
3464 gen_op('+');
3465 vtop->r |= VT_LVAL;
3466 vswap();
3467 vtop[-1].type.t = load_type;
3468 /* XXX: it works because r2 is spilled last ! */
3469 store(vtop->r2, vtop - 1);
3470 } else {
3471 store(r, vtop - 1);
3474 vswap();
3475 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3476 vtop->r |= delayed_cast;
3480 /* post defines POST/PRE add. c is the token ++ or -- */
3481 ST_FUNC void inc(int post, int c)
3483 test_lvalue();
3484 vdup(); /* save lvalue */
3485 if (post) {
3486 gv_dup(); /* duplicate value */
3487 vrotb(3);
3488 vrotb(3);
3490 /* add constant */
3491 vpushi(c - TOK_MID);
3492 gen_op('+');
3493 vstore(); /* store value */
3494 if (post)
3495 vpop(); /* if post op, return saved value */
3498 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3500 /* read the string */
3501 if (tok != TOK_STR)
3502 expect(msg);
3503 cstr_new(astr);
3504 while (tok == TOK_STR) {
3505 /* XXX: add \0 handling too ? */
3506 cstr_cat(astr, tokc.str.data, -1);
3507 next();
3509 cstr_ccat(astr, '\0');
3512 /* If I is >= 1 and a power of two, returns log2(i)+1.
3513 If I is 0 returns 0. */
3514 static int exact_log2p1(int i)
3516 int ret;
3517 if (!i)
3518 return 0;
3519 for (ret = 1; i >= 1 << 8; ret += 8)
3520 i >>= 8;
3521 if (i >= 1 << 4)
3522 ret += 4, i >>= 4;
3523 if (i >= 1 << 2)
3524 ret += 2, i >>= 2;
3525 if (i >= 1 << 1)
3526 ret++;
3527 return ret;
3530 /* Parse __attribute__((...)) GNUC extension. */
3531 static void parse_attribute(AttributeDef *ad)
3533 int t, n;
3534 CString astr;
3536 redo:
3537 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3538 return;
3539 next();
3540 skip('(');
3541 skip('(');
3542 while (tok != ')') {
3543 if (tok < TOK_IDENT)
3544 expect("attribute name");
3545 t = tok;
3546 next();
3547 switch(t) {
3548 case TOK_CLEANUP1:
3549 case TOK_CLEANUP2:
3551 Sym *s;
3553 skip('(');
3554 s = sym_find(tok);
3555 if (!s) {
3556 tcc_warning("implicit declaration of function '%s'",
3557 get_tok_str(tok, &tokc));
3558 s = external_global_sym(tok, &func_old_type);
3560 ad->cleanup_func = s;
3561 next();
3562 skip(')');
3563 break;
3565 case TOK_SECTION1:
3566 case TOK_SECTION2:
3567 skip('(');
3568 parse_mult_str(&astr, "section name");
3569 ad->section = find_section(tcc_state, (char *)astr.data);
3570 skip(')');
3571 cstr_free(&astr);
3572 break;
3573 case TOK_ALIAS1:
3574 case TOK_ALIAS2:
3575 skip('(');
3576 parse_mult_str(&astr, "alias(\"target\")");
3577 ad->alias_target = /* save string as token, for later */
3578 tok_alloc((char*)astr.data, astr.size-1)->tok;
3579 skip(')');
3580 cstr_free(&astr);
3581 break;
3582 case TOK_VISIBILITY1:
3583 case TOK_VISIBILITY2:
3584 skip('(');
3585 parse_mult_str(&astr,
3586 "visibility(\"default|hidden|internal|protected\")");
3587 if (!strcmp (astr.data, "default"))
3588 ad->a.visibility = STV_DEFAULT;
3589 else if (!strcmp (astr.data, "hidden"))
3590 ad->a.visibility = STV_HIDDEN;
3591 else if (!strcmp (astr.data, "internal"))
3592 ad->a.visibility = STV_INTERNAL;
3593 else if (!strcmp (astr.data, "protected"))
3594 ad->a.visibility = STV_PROTECTED;
3595 else
3596 expect("visibility(\"default|hidden|internal|protected\")");
3597 skip(')');
3598 cstr_free(&astr);
3599 break;
3600 case TOK_ALIGNED1:
3601 case TOK_ALIGNED2:
3602 if (tok == '(') {
3603 next();
3604 n = expr_const();
3605 if (n <= 0 || (n & (n - 1)) != 0)
3606 tcc_error("alignment must be a positive power of two");
3607 skip(')');
3608 } else {
3609 n = MAX_ALIGN;
3611 ad->a.aligned = exact_log2p1(n);
3612 if (n != 1 << (ad->a.aligned - 1))
3613 tcc_error("alignment of %d is larger than implemented", n);
3614 break;
3615 case TOK_PACKED1:
3616 case TOK_PACKED2:
3617 ad->a.packed = 1;
3618 break;
3619 case TOK_WEAK1:
3620 case TOK_WEAK2:
3621 ad->a.weak = 1;
3622 break;
3623 case TOK_UNUSED1:
3624 case TOK_UNUSED2:
3625 /* currently, no need to handle it because tcc does not
3626 track unused objects */
3627 break;
3628 case TOK_NORETURN1:
3629 case TOK_NORETURN2:
3630 ad->f.func_noreturn = 1;
3631 break;
3632 case TOK_CDECL1:
3633 case TOK_CDECL2:
3634 case TOK_CDECL3:
3635 ad->f.func_call = FUNC_CDECL;
3636 break;
3637 case TOK_STDCALL1:
3638 case TOK_STDCALL2:
3639 case TOK_STDCALL3:
3640 ad->f.func_call = FUNC_STDCALL;
3641 break;
3642 #ifdef TCC_TARGET_I386
3643 case TOK_REGPARM1:
3644 case TOK_REGPARM2:
3645 skip('(');
3646 n = expr_const();
3647 if (n > 3)
3648 n = 3;
3649 else if (n < 0)
3650 n = 0;
3651 if (n > 0)
3652 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3653 skip(')');
3654 break;
3655 case TOK_FASTCALL1:
3656 case TOK_FASTCALL2:
3657 case TOK_FASTCALL3:
3658 ad->f.func_call = FUNC_FASTCALLW;
3659 break;
3660 #endif
3661 case TOK_MODE:
3662 skip('(');
3663 switch(tok) {
3664 case TOK_MODE_DI:
3665 ad->attr_mode = VT_LLONG + 1;
3666 break;
3667 case TOK_MODE_QI:
3668 ad->attr_mode = VT_BYTE + 1;
3669 break;
3670 case TOK_MODE_HI:
3671 ad->attr_mode = VT_SHORT + 1;
3672 break;
3673 case TOK_MODE_SI:
3674 case TOK_MODE_word:
3675 ad->attr_mode = VT_INT + 1;
3676 break;
3677 default:
3678 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3679 break;
3681 next();
3682 skip(')');
3683 break;
3684 case TOK_DLLEXPORT:
3685 ad->a.dllexport = 1;
3686 break;
3687 case TOK_NODECORATE:
3688 ad->a.nodecorate = 1;
3689 break;
3690 case TOK_DLLIMPORT:
3691 ad->a.dllimport = 1;
3692 break;
3693 default:
3694 if (tcc_state->warn_unsupported)
3695 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3696 /* skip parameters */
3697 if (tok == '(') {
3698 int parenthesis = 0;
3699 do {
3700 if (tok == '(')
3701 parenthesis++;
3702 else if (tok == ')')
3703 parenthesis--;
3704 next();
3705 } while (parenthesis && tok != -1);
3707 break;
3709 if (tok != ',')
3710 break;
3711 next();
3713 skip(')');
3714 skip(')');
3715 goto redo;
3718 static Sym * find_field (CType *type, int v, int *cumofs)
3720 Sym *s = type->ref;
3721 v |= SYM_FIELD;
3722 while ((s = s->next) != NULL) {
3723 if ((s->v & SYM_FIELD) &&
3724 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3725 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3726 Sym *ret = find_field (&s->type, v, cumofs);
3727 if (ret) {
3728 *cumofs += s->c;
3729 return ret;
3732 if (s->v == v)
3733 break;
3735 return s;
3738 static void struct_layout(CType *type, AttributeDef *ad)
3740 int size, align, maxalign, offset, c, bit_pos, bit_size;
3741 int packed, a, bt, prevbt, prev_bit_size;
3742 int pcc = !tcc_state->ms_bitfields;
3743 int pragma_pack = *tcc_state->pack_stack_ptr;
3744 Sym *f;
3746 maxalign = 1;
3747 offset = 0;
3748 c = 0;
3749 bit_pos = 0;
3750 prevbt = VT_STRUCT; /* make it never match */
3751 prev_bit_size = 0;
3753 //#define BF_DEBUG
3755 for (f = type->ref->next; f; f = f->next) {
3756 if (f->type.t & VT_BITFIELD)
3757 bit_size = BIT_SIZE(f->type.t);
3758 else
3759 bit_size = -1;
3760 size = type_size(&f->type, &align);
3761 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3762 packed = 0;
3764 if (pcc && bit_size == 0) {
3765 /* in pcc mode, packing does not affect zero-width bitfields */
3767 } else {
3768 /* in pcc mode, attribute packed overrides if set. */
3769 if (pcc && (f->a.packed || ad->a.packed))
3770 align = packed = 1;
3772 /* pragma pack overrides align if lesser and packs bitfields always */
3773 if (pragma_pack) {
3774 packed = 1;
3775 if (pragma_pack < align)
3776 align = pragma_pack;
3777 /* in pcc mode pragma pack also overrides individual align */
3778 if (pcc && pragma_pack < a)
3779 a = 0;
3782 /* some individual align was specified */
3783 if (a)
3784 align = a;
3786 if (type->ref->type.t == VT_UNION) {
3787 if (pcc && bit_size >= 0)
3788 size = (bit_size + 7) >> 3;
3789 offset = 0;
3790 if (size > c)
3791 c = size;
3793 } else if (bit_size < 0) {
3794 if (pcc)
3795 c += (bit_pos + 7) >> 3;
3796 c = (c + align - 1) & -align;
3797 offset = c;
3798 if (size > 0)
3799 c += size;
3800 bit_pos = 0;
3801 prevbt = VT_STRUCT;
3802 prev_bit_size = 0;
3804 } else {
3805 /* A bit-field. Layout is more complicated. There are two
3806 options: PCC (GCC) compatible and MS compatible */
3807 if (pcc) {
3808 /* In PCC layout a bit-field is placed adjacent to the
3809 preceding bit-fields, except if:
3810 - it has zero-width
3811 - an individual alignment was given
3812 - it would overflow its base type container and
3813 there is no packing */
3814 if (bit_size == 0) {
3815 new_field:
3816 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3817 bit_pos = 0;
3818 } else if (f->a.aligned) {
3819 goto new_field;
3820 } else if (!packed) {
3821 int a8 = align * 8;
3822 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3823 if (ofs > size / align)
3824 goto new_field;
3827 /* in pcc mode, long long bitfields have type int if they fit */
3828 if (size == 8 && bit_size <= 32)
3829 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3831 while (bit_pos >= align * 8)
3832 c += align, bit_pos -= align * 8;
3833 offset = c;
3835 /* In PCC layout named bit-fields influence the alignment
3836 of the containing struct using the base types alignment,
3837 except for packed fields (which here have correct align). */
3838 if (f->v & SYM_FIRST_ANOM
3839 // && bit_size // ??? gcc on ARM/rpi does that
3841 align = 1;
3843 } else {
3844 bt = f->type.t & VT_BTYPE;
3845 if ((bit_pos + bit_size > size * 8)
3846 || (bit_size > 0) == (bt != prevbt)
3848 c = (c + align - 1) & -align;
3849 offset = c;
3850 bit_pos = 0;
3851 /* In MS bitfield mode a bit-field run always uses
3852 at least as many bits as the underlying type.
3853 To start a new run it's also required that this
3854 or the last bit-field had non-zero width. */
3855 if (bit_size || prev_bit_size)
3856 c += size;
3858 /* In MS layout the records alignment is normally
3859 influenced by the field, except for a zero-width
3860 field at the start of a run (but by further zero-width
3861 fields it is again). */
3862 if (bit_size == 0 && prevbt != bt)
3863 align = 1;
3864 prevbt = bt;
3865 prev_bit_size = bit_size;
3868 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3869 | (bit_pos << VT_STRUCT_SHIFT);
3870 bit_pos += bit_size;
3872 if (align > maxalign)
3873 maxalign = align;
3875 #ifdef BF_DEBUG
3876 printf("set field %s offset %-2d size %-2d align %-2d",
3877 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3878 if (f->type.t & VT_BITFIELD) {
3879 printf(" pos %-2d bits %-2d",
3880 BIT_POS(f->type.t),
3881 BIT_SIZE(f->type.t)
3884 printf("\n");
3885 #endif
3887 f->c = offset;
3888 f->r = 0;
3891 if (pcc)
3892 c += (bit_pos + 7) >> 3;
3894 /* store size and alignment */
3895 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3896 if (a < maxalign)
3897 a = maxalign;
3898 type->ref->r = a;
3899 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3900 /* can happen if individual align for some member was given. In
3901 this case MSVC ignores maxalign when aligning the size */
3902 a = pragma_pack;
3903 if (a < bt)
3904 a = bt;
3906 c = (c + a - 1) & -a;
3907 type->ref->c = c;
3909 #ifdef BF_DEBUG
3910 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3911 #endif
3913 /* check whether we can access bitfields by their type */
3914 for (f = type->ref->next; f; f = f->next) {
3915 int s, px, cx, c0;
3916 CType t;
3918 if (0 == (f->type.t & VT_BITFIELD))
3919 continue;
3920 f->type.ref = f;
3921 f->auxtype = -1;
3922 bit_size = BIT_SIZE(f->type.t);
3923 if (bit_size == 0)
3924 continue;
3925 bit_pos = BIT_POS(f->type.t);
3926 size = type_size(&f->type, &align);
3927 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3928 continue;
3930 /* try to access the field using a different type */
3931 c0 = -1, s = align = 1;
3932 for (;;) {
3933 px = f->c * 8 + bit_pos;
3934 cx = (px >> 3) & -align;
3935 px = px - (cx << 3);
3936 if (c0 == cx)
3937 break;
3938 s = (px + bit_size + 7) >> 3;
3939 if (s > 4) {
3940 t.t = VT_LLONG;
3941 } else if (s > 2) {
3942 t.t = VT_INT;
3943 } else if (s > 1) {
3944 t.t = VT_SHORT;
3945 } else {
3946 t.t = VT_BYTE;
3948 s = type_size(&t, &align);
3949 c0 = cx;
3952 if (px + bit_size <= s * 8 && cx + s <= c) {
3953 /* update offset and bit position */
3954 f->c = cx;
3955 bit_pos = px;
3956 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3957 | (bit_pos << VT_STRUCT_SHIFT);
3958 if (s != size)
3959 f->auxtype = t.t;
3960 #ifdef BF_DEBUG
3961 printf("FIX field %s offset %-2d size %-2d align %-2d "
3962 "pos %-2d bits %-2d\n",
3963 get_tok_str(f->v & ~SYM_FIELD, NULL),
3964 cx, s, align, px, bit_size);
3965 #endif
3966 } else {
3967 /* fall back to load/store single-byte wise */
3968 f->auxtype = VT_STRUCT;
3969 #ifdef BF_DEBUG
3970 printf("FIX field %s : load byte-wise\n",
3971 get_tok_str(f->v & ~SYM_FIELD, NULL));
3972 #endif
3977 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3978 static void struct_decl(CType *type, int u)
3980 int v, c, size, align, flexible;
3981 int bit_size, bsize, bt;
3982 Sym *s, *ss, **ps;
3983 AttributeDef ad, ad1;
3984 CType type1, btype;
3986 memset(&ad, 0, sizeof ad);
3987 next();
3988 parse_attribute(&ad);
3989 if (tok != '{') {
3990 v = tok;
3991 next();
3992 /* struct already defined ? return it */
3993 if (v < TOK_IDENT)
3994 expect("struct/union/enum name");
3995 s = struct_find(v);
3996 if (s && (s->sym_scope == local_scope || tok != '{')) {
3997 if (u == s->type.t)
3998 goto do_decl;
3999 if (u == VT_ENUM && IS_ENUM(s->type.t))
4000 goto do_decl;
4001 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4003 } else {
4004 v = anon_sym++;
4006 /* Record the original enum/struct/union token. */
4007 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4008 type1.ref = NULL;
4009 /* we put an undefined size for struct/union */
4010 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4011 s->r = 0; /* default alignment is zero as gcc */
4012 do_decl:
4013 type->t = s->type.t;
4014 type->ref = s;
4016 if (tok == '{') {
4017 next();
4018 if (s->c != -1)
4019 tcc_error("struct/union/enum already defined");
4020 s->c = -2;
4021 /* cannot be empty */
4022 /* non empty enums are not allowed */
4023 ps = &s->next;
4024 if (u == VT_ENUM) {
4025 long long ll = 0, pl = 0, nl = 0;
4026 CType t;
4027 t.ref = s;
4028 /* enum symbols have static storage */
4029 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4030 for(;;) {
4031 v = tok;
4032 if (v < TOK_UIDENT)
4033 expect("identifier");
4034 ss = sym_find(v);
4035 if (ss && !local_stack)
4036 tcc_error("redefinition of enumerator '%s'",
4037 get_tok_str(v, NULL));
4038 next();
4039 if (tok == '=') {
4040 next();
4041 ll = expr_const64();
4043 ss = sym_push(v, &t, VT_CONST, 0);
4044 ss->enum_val = ll;
4045 *ps = ss, ps = &ss->next;
4046 if (ll < nl)
4047 nl = ll;
4048 if (ll > pl)
4049 pl = ll;
4050 if (tok != ',')
4051 break;
4052 next();
4053 ll++;
4054 /* NOTE: we accept a trailing comma */
4055 if (tok == '}')
4056 break;
4058 skip('}');
4059 /* set integral type of the enum */
4060 t.t = VT_INT;
4061 if (nl >= 0) {
4062 if (pl != (unsigned)pl)
4063 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4064 t.t |= VT_UNSIGNED;
4065 } else if (pl != (int)pl || nl != (int)nl)
4066 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4067 s->type.t = type->t = t.t | VT_ENUM;
4068 s->c = 0;
4069 /* set type for enum members */
4070 for (ss = s->next; ss; ss = ss->next) {
4071 ll = ss->enum_val;
4072 if (ll == (int)ll) /* default is int if it fits */
4073 continue;
4074 if (t.t & VT_UNSIGNED) {
4075 ss->type.t |= VT_UNSIGNED;
4076 if (ll == (unsigned)ll)
4077 continue;
4079 ss->type.t = (ss->type.t & ~VT_BTYPE)
4080 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4082 } else {
4083 c = 0;
4084 flexible = 0;
4085 while (tok != '}') {
4086 if (!parse_btype(&btype, &ad1)) {
4087 skip(';');
4088 continue;
4090 while (1) {
4091 if (flexible)
4092 tcc_error("flexible array member '%s' not at the end of struct",
4093 get_tok_str(v, NULL));
4094 bit_size = -1;
4095 v = 0;
4096 type1 = btype;
4097 if (tok != ':') {
4098 if (tok != ';')
4099 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4100 if (v == 0) {
4101 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4102 expect("identifier");
4103 else {
4104 int v = btype.ref->v;
4105 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4106 if (tcc_state->ms_extensions == 0)
4107 expect("identifier");
4111 if (type_size(&type1, &align) < 0) {
4112 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4113 flexible = 1;
4114 else
4115 tcc_error("field '%s' has incomplete type",
4116 get_tok_str(v, NULL));
4118 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4119 (type1.t & VT_BTYPE) == VT_VOID ||
4120 (type1.t & VT_STORAGE))
4121 tcc_error("invalid type for '%s'",
4122 get_tok_str(v, NULL));
4124 if (tok == ':') {
4125 next();
4126 bit_size = expr_const();
4127 /* XXX: handle v = 0 case for messages */
4128 if (bit_size < 0)
4129 tcc_error("negative width in bit-field '%s'",
4130 get_tok_str(v, NULL));
4131 if (v && bit_size == 0)
4132 tcc_error("zero width for bit-field '%s'",
4133 get_tok_str(v, NULL));
4134 parse_attribute(&ad1);
4136 size = type_size(&type1, &align);
4137 if (bit_size >= 0) {
4138 bt = type1.t & VT_BTYPE;
4139 if (bt != VT_INT &&
4140 bt != VT_BYTE &&
4141 bt != VT_SHORT &&
4142 bt != VT_BOOL &&
4143 bt != VT_LLONG)
4144 tcc_error("bitfields must have scalar type");
4145 bsize = size * 8;
4146 if (bit_size > bsize) {
4147 tcc_error("width of '%s' exceeds its type",
4148 get_tok_str(v, NULL));
4149 } else if (bit_size == bsize
4150 && !ad.a.packed && !ad1.a.packed) {
4151 /* no need for bit fields */
4153 } else if (bit_size == 64) {
4154 tcc_error("field width 64 not implemented");
4155 } else {
4156 type1.t = (type1.t & ~VT_STRUCT_MASK)
4157 | VT_BITFIELD
4158 | (bit_size << (VT_STRUCT_SHIFT + 6));
4161 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4162 /* Remember we've seen a real field to check
4163 for placement of flexible array member. */
4164 c = 1;
4166 /* If member is a struct or bit-field, enforce
4167 placing into the struct (as anonymous). */
4168 if (v == 0 &&
4169 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4170 bit_size >= 0)) {
4171 v = anon_sym++;
4173 if (v) {
4174 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4175 ss->a = ad1.a;
4176 *ps = ss;
4177 ps = &ss->next;
4179 if (tok == ';' || tok == TOK_EOF)
4180 break;
4181 skip(',');
4183 skip(';');
4185 skip('}');
4186 parse_attribute(&ad);
4187 struct_layout(type, &ad);
4192 static void sym_to_attr(AttributeDef *ad, Sym *s)
4194 merge_symattr(&ad->a, &s->a);
4195 merge_funcattr(&ad->f, &s->f);
4198 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4199 are added to the element type, copied because it could be a typedef. */
4200 static void parse_btype_qualify(CType *type, int qualifiers)
4202 while (type->t & VT_ARRAY) {
4203 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4204 type = &type->ref->type;
4206 type->t |= qualifiers;
4209 /* return 0 if no type declaration. otherwise, return the basic type
4210 and skip it.
4212 static int parse_btype(CType *type, AttributeDef *ad)
4214 int t, u, bt, st, type_found, typespec_found, g, n;
4215 Sym *s;
4216 CType type1;
4218 memset(ad, 0, sizeof(AttributeDef));
4219 type_found = 0;
4220 typespec_found = 0;
4221 t = VT_INT;
4222 bt = st = -1;
4223 type->ref = NULL;
4225 while(1) {
4226 switch(tok) {
4227 case TOK_EXTENSION:
4228 /* currently, we really ignore extension */
4229 next();
4230 continue;
4232 /* basic types */
4233 case TOK_CHAR:
4234 u = VT_BYTE;
4235 basic_type:
4236 next();
4237 basic_type1:
4238 if (u == VT_SHORT || u == VT_LONG) {
4239 if (st != -1 || (bt != -1 && bt != VT_INT))
4240 tmbt: tcc_error("too many basic types");
4241 st = u;
4242 } else {
4243 if (bt != -1 || (st != -1 && u != VT_INT))
4244 goto tmbt;
4245 bt = u;
4247 if (u != VT_INT)
4248 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4249 typespec_found = 1;
4250 break;
4251 case TOK_VOID:
4252 u = VT_VOID;
4253 goto basic_type;
4254 case TOK_SHORT:
4255 u = VT_SHORT;
4256 goto basic_type;
4257 case TOK_INT:
4258 u = VT_INT;
4259 goto basic_type;
4260 case TOK_ALIGNAS:
4261 { int n;
4262 AttributeDef ad1;
4263 next();
4264 skip('(');
4265 memset(&ad1, 0, sizeof(AttributeDef));
4266 if (parse_btype(&type1, &ad1)) {
4267 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4268 if (ad1.a.aligned)
4269 n = 1 << (ad1.a.aligned - 1);
4270 else
4271 type_size(&type1, &n);
4272 } else {
4273 n = expr_const();
4274 if (n <= 0 || (n & (n - 1)) != 0)
4275 tcc_error("alignment must be a positive power of two");
4277 skip(')');
4278 ad->a.aligned = exact_log2p1(n);
4280 continue;
4281 case TOK_LONG:
4282 if ((t & VT_BTYPE) == VT_DOUBLE) {
4283 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4284 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4285 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4286 } else {
4287 u = VT_LONG;
4288 goto basic_type;
4290 next();
4291 break;
4292 #ifdef TCC_TARGET_ARM64
4293 case TOK_UINT128:
4294 /* GCC's __uint128_t appears in some Linux header files. Make it a
4295 synonym for long double to get the size and alignment right. */
4296 u = VT_LDOUBLE;
4297 goto basic_type;
4298 #endif
4299 case TOK_BOOL:
4300 u = VT_BOOL;
4301 goto basic_type;
4302 case TOK_FLOAT:
4303 u = VT_FLOAT;
4304 goto basic_type;
4305 case TOK_DOUBLE:
4306 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4307 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4308 } else {
4309 u = VT_DOUBLE;
4310 goto basic_type;
4312 next();
4313 break;
4314 case TOK_ENUM:
4315 struct_decl(&type1, VT_ENUM);
4316 basic_type2:
4317 u = type1.t;
4318 type->ref = type1.ref;
4319 goto basic_type1;
4320 case TOK_STRUCT:
4321 struct_decl(&type1, VT_STRUCT);
4322 goto basic_type2;
4323 case TOK_UNION:
4324 struct_decl(&type1, VT_UNION);
4325 goto basic_type2;
4327 /* type modifiers */
4328 case TOK_CONST1:
4329 case TOK_CONST2:
4330 case TOK_CONST3:
4331 type->t = t;
4332 parse_btype_qualify(type, VT_CONSTANT);
4333 t = type->t;
4334 next();
4335 break;
4336 case TOK_VOLATILE1:
4337 case TOK_VOLATILE2:
4338 case TOK_VOLATILE3:
4339 type->t = t;
4340 parse_btype_qualify(type, VT_VOLATILE);
4341 t = type->t;
4342 next();
4343 break;
4344 case TOK_SIGNED1:
4345 case TOK_SIGNED2:
4346 case TOK_SIGNED3:
4347 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4348 tcc_error("signed and unsigned modifier");
4349 t |= VT_DEFSIGN;
4350 next();
4351 typespec_found = 1;
4352 break;
4353 case TOK_REGISTER:
4354 case TOK_AUTO:
4355 case TOK_RESTRICT1:
4356 case TOK_RESTRICT2:
4357 case TOK_RESTRICT3:
4358 next();
4359 break;
4360 case TOK_UNSIGNED:
4361 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4362 tcc_error("signed and unsigned modifier");
4363 t |= VT_DEFSIGN | VT_UNSIGNED;
4364 next();
4365 typespec_found = 1;
4366 break;
4368 /* storage */
4369 case TOK_EXTERN:
4370 g = VT_EXTERN;
4371 goto storage;
4372 case TOK_STATIC:
4373 g = VT_STATIC;
4374 goto storage;
4375 case TOK_TYPEDEF:
4376 g = VT_TYPEDEF;
4377 goto storage;
4378 storage:
4379 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4380 tcc_error("multiple storage classes");
4381 t |= g;
4382 next();
4383 break;
4384 case TOK_INLINE1:
4385 case TOK_INLINE2:
4386 case TOK_INLINE3:
4387 t |= VT_INLINE;
4388 next();
4389 break;
4390 case TOK_NORETURN3:
4391 /* currently, no need to handle it because tcc does not
4392 track unused objects */
4393 next();
4394 break;
4395 /* GNUC attribute */
4396 case TOK_ATTRIBUTE1:
4397 case TOK_ATTRIBUTE2:
4398 parse_attribute(ad);
4399 if (ad->attr_mode) {
4400 u = ad->attr_mode -1;
4401 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4403 continue;
4404 /* GNUC typeof */
4405 case TOK_TYPEOF1:
4406 case TOK_TYPEOF2:
4407 case TOK_TYPEOF3:
4408 next();
4409 parse_expr_type(&type1);
4410 /* remove all storage modifiers except typedef */
4411 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4412 if (type1.ref)
4413 sym_to_attr(ad, type1.ref);
4414 goto basic_type2;
4415 default:
4416 if (typespec_found)
4417 goto the_end;
4418 s = sym_find(tok);
4419 if (!s || !(s->type.t & VT_TYPEDEF))
4420 goto the_end;
4422 n = tok, next();
4423 if (tok == ':' && !in_generic) {
4424 /* ignore if it's a label */
4425 unget_tok(n);
4426 goto the_end;
4429 t &= ~(VT_BTYPE|VT_LONG);
4430 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4431 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4432 type->ref = s->type.ref;
4433 if (t)
4434 parse_btype_qualify(type, t);
4435 t = type->t;
4436 /* get attributes from typedef */
4437 sym_to_attr(ad, s);
4438 typespec_found = 1;
4439 st = bt = -2;
4440 break;
4442 type_found = 1;
4444 the_end:
4445 if (tcc_state->char_is_unsigned) {
4446 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4447 t |= VT_UNSIGNED;
4449 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4450 bt = t & (VT_BTYPE|VT_LONG);
4451 if (bt == VT_LONG)
4452 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4453 #ifdef TCC_TARGET_PE
4454 if (bt == VT_LDOUBLE)
4455 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4456 #endif
4457 type->t = t;
4458 return type_found;
4461 /* convert a function parameter type (array to pointer and function to
4462 function pointer) */
4463 static inline void convert_parameter_type(CType *pt)
4465 /* remove const and volatile qualifiers (XXX: const could be used
4466 to indicate a const function parameter */
4467 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4468 /* array must be transformed to pointer according to ANSI C */
4469 pt->t &= ~VT_ARRAY;
4470 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4471 mk_pointer(pt);
4475 ST_FUNC void parse_asm_str(CString *astr)
4477 skip('(');
4478 parse_mult_str(astr, "string constant");
4481 /* Parse an asm label and return the token */
4482 static int asm_label_instr(void)
4484 int v;
4485 CString astr;
4487 next();
4488 parse_asm_str(&astr);
4489 skip(')');
4490 #ifdef ASM_DEBUG
4491 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4492 #endif
4493 v = tok_alloc(astr.data, astr.size - 1)->tok;
4494 cstr_free(&astr);
4495 return v;
4498 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4500 int n, l, t1, arg_size, align, unused_align;
4501 Sym **plast, *s, *first;
4502 AttributeDef ad1;
4503 CType pt;
4505 if (tok == '(') {
4506 /* function type, or recursive declarator (return if so) */
4507 next();
4508 if (td && !(td & TYPE_ABSTRACT))
4509 return 0;
4510 if (tok == ')')
4511 l = 0;
4512 else if (parse_btype(&pt, &ad1))
4513 l = FUNC_NEW;
4514 else if (td) {
4515 merge_attr (ad, &ad1);
4516 return 0;
4517 } else
4518 l = FUNC_OLD;
4519 first = NULL;
4520 plast = &first;
4521 arg_size = 0;
4522 if (l) {
4523 for(;;) {
4524 /* read param name and compute offset */
4525 if (l != FUNC_OLD) {
4526 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4527 break;
4528 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4529 if ((pt.t & VT_BTYPE) == VT_VOID)
4530 tcc_error("parameter declared as void");
4531 } else {
4532 n = tok;
4533 if (n < TOK_UIDENT)
4534 expect("identifier");
4535 pt.t = VT_VOID; /* invalid type */
4536 pt.ref = NULL;
4537 next();
4539 convert_parameter_type(&pt);
4540 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4541 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4542 *plast = s;
4543 plast = &s->next;
4544 if (tok == ')')
4545 break;
4546 skip(',');
4547 if (l == FUNC_NEW && tok == TOK_DOTS) {
4548 l = FUNC_ELLIPSIS;
4549 next();
4550 break;
4552 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4553 tcc_error("invalid type");
4555 } else
4556 /* if no parameters, then old type prototype */
4557 l = FUNC_OLD;
4558 skip(')');
4559 /* NOTE: const is ignored in returned type as it has a special
4560 meaning in gcc / C++ */
4561 type->t &= ~VT_CONSTANT;
4562 /* some ancient pre-K&R C allows a function to return an array
4563 and the array brackets to be put after the arguments, such
4564 that "int c()[]" means something like "int[] c()" */
4565 if (tok == '[') {
4566 next();
4567 skip(']'); /* only handle simple "[]" */
4568 mk_pointer(type);
4570 /* we push a anonymous symbol which will contain the function prototype */
4571 ad->f.func_args = arg_size;
4572 ad->f.func_type = l;
4573 s = sym_push(SYM_FIELD, type, 0, 0);
4574 s->a = ad->a;
4575 s->f = ad->f;
4576 s->next = first;
4577 type->t = VT_FUNC;
4578 type->ref = s;
4579 } else if (tok == '[') {
4580 int saved_nocode_wanted = nocode_wanted;
4581 /* array definition */
4582 next();
4583 while (1) {
4584 /* XXX The optional type-quals and static should only be accepted
4585 in parameter decls. The '*' as well, and then even only
4586 in prototypes (not function defs). */
4587 switch (tok) {
4588 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4589 case TOK_CONST1:
4590 case TOK_VOLATILE1:
4591 case TOK_STATIC:
4592 case '*':
4593 next();
4594 continue;
4595 default:
4596 break;
4598 break;
4600 n = -1;
4601 t1 = 0;
4602 if (tok != ']') {
4603 if (!local_stack || (storage & VT_STATIC))
4604 vpushi(expr_const());
4605 else {
4606 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4607 length must always be evaluated, even under nocode_wanted,
4608 so that its size slot is initialized (e.g. under sizeof
4609 or typeof). */
4610 nocode_wanted = 0;
4611 gexpr();
4613 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4614 n = vtop->c.i;
4615 if (n < 0)
4616 tcc_error("invalid array size");
4617 } else {
4618 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4619 tcc_error("size of variable length array should be an integer");
4620 n = 0;
4621 t1 = VT_VLA;
4624 skip(']');
4625 /* parse next post type */
4626 post_type(type, ad, storage, 0);
4628 if ((type->t & VT_BTYPE) == VT_FUNC)
4629 tcc_error("declaration of an array of functions");
4630 if ((type->t & VT_BTYPE) == VT_VOID
4631 || type_size(type, &unused_align) < 0)
4632 tcc_error("declaration of an array of incomplete type elements");
4634 t1 |= type->t & VT_VLA;
4636 if (t1 & VT_VLA) {
4637 if (n < 0)
4638 tcc_error("need explicit inner array size in VLAs");
4639 loc -= type_size(&int_type, &align);
4640 loc &= -align;
4641 n = loc;
4643 vla_runtime_type_size(type, &align);
4644 gen_op('*');
4645 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4646 vswap();
4647 vstore();
4649 if (n != -1)
4650 vpop();
4651 nocode_wanted = saved_nocode_wanted;
4653 /* we push an anonymous symbol which will contain the array
4654 element type */
4655 s = sym_push(SYM_FIELD, type, 0, n);
4656 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4657 type->ref = s;
4659 return 1;
4662 /* Parse a type declarator (except basic type), and return the type
4663 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4664 expected. 'type' should contain the basic type. 'ad' is the
4665 attribute definition of the basic type. It can be modified by
4666 type_decl(). If this (possibly abstract) declarator is a pointer chain
4667 it returns the innermost pointed to type (equals *type, but is a different
4668 pointer), otherwise returns type itself, that's used for recursive calls. */
4669 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4671 CType *post, *ret;
4672 int qualifiers, storage;
4674 /* recursive type, remove storage bits first, apply them later again */
4675 storage = type->t & VT_STORAGE;
4676 type->t &= ~VT_STORAGE;
4677 post = ret = type;
4679 while (tok == '*') {
4680 qualifiers = 0;
4681 redo:
4682 next();
4683 switch(tok) {
4684 case TOK_CONST1:
4685 case TOK_CONST2:
4686 case TOK_CONST3:
4687 qualifiers |= VT_CONSTANT;
4688 goto redo;
4689 case TOK_VOLATILE1:
4690 case TOK_VOLATILE2:
4691 case TOK_VOLATILE3:
4692 qualifiers |= VT_VOLATILE;
4693 goto redo;
4694 case TOK_RESTRICT1:
4695 case TOK_RESTRICT2:
4696 case TOK_RESTRICT3:
4697 goto redo;
4698 /* XXX: clarify attribute handling */
4699 case TOK_ATTRIBUTE1:
4700 case TOK_ATTRIBUTE2:
4701 parse_attribute(ad);
4702 break;
4704 mk_pointer(type);
4705 type->t |= qualifiers;
4706 if (ret == type)
4707 /* innermost pointed to type is the one for the first derivation */
4708 ret = pointed_type(type);
4711 if (tok == '(') {
4712 /* This is possibly a parameter type list for abstract declarators
4713 ('int ()'), use post_type for testing this. */
4714 if (!post_type(type, ad, 0, td)) {
4715 /* It's not, so it's a nested declarator, and the post operations
4716 apply to the innermost pointed to type (if any). */
4717 /* XXX: this is not correct to modify 'ad' at this point, but
4718 the syntax is not clear */
4719 parse_attribute(ad);
4720 post = type_decl(type, ad, v, td);
4721 skip(')');
4722 } else
4723 goto abstract;
4724 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4725 /* type identifier */
4726 *v = tok;
4727 next();
4728 } else {
4729 abstract:
4730 if (!(td & TYPE_ABSTRACT))
4731 expect("identifier");
4732 *v = 0;
4734 post_type(post, ad, storage, 0);
4735 parse_attribute(ad);
4736 type->t |= storage;
4737 return ret;
4740 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4741 ST_FUNC int lvalue_type(int t)
4743 int bt, r;
4744 r = VT_LVAL;
4745 bt = t & VT_BTYPE;
4746 if (bt == VT_BYTE || bt == VT_BOOL)
4747 r |= VT_LVAL_BYTE;
4748 else if (bt == VT_SHORT)
4749 r |= VT_LVAL_SHORT;
4750 else
4751 return r;
4752 if (t & VT_UNSIGNED)
4753 r |= VT_LVAL_UNSIGNED;
4754 return r;
4757 /* indirection with full error checking and bound check */
4758 ST_FUNC void indir(void)
4760 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4761 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4762 return;
4763 expect("pointer");
4765 if (vtop->r & VT_LVAL)
4766 gv(RC_INT);
4767 vtop->type = *pointed_type(&vtop->type);
4768 /* Arrays and functions are never lvalues */
4769 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4770 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4771 vtop->r |= lvalue_type(vtop->type.t);
4772 /* if bound checking, the referenced pointer must be checked */
4773 #ifdef CONFIG_TCC_BCHECK
4774 if (tcc_state->do_bounds_check)
4775 vtop->r |= VT_MUSTBOUND;
4776 #endif
4780 /* pass a parameter to a function and do type checking and casting */
4781 static void gfunc_param_typed(Sym *func, Sym *arg)
4783 int func_type;
4784 CType type;
4786 func_type = func->f.func_type;
4787 if (func_type == FUNC_OLD ||
4788 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4789 /* default casting : only need to convert float to double */
4790 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4791 gen_cast_s(VT_DOUBLE);
4792 } else if (vtop->type.t & VT_BITFIELD) {
4793 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4794 type.ref = vtop->type.ref;
4795 gen_cast(&type);
4797 } else if (arg == NULL) {
4798 tcc_error("too many arguments to function");
4799 } else {
4800 type = arg->type;
4801 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4802 gen_assign_cast(&type);
4806 /* parse an expression and return its type without any side effect. */
4807 static void expr_type(CType *type, void (*expr_fn)(void))
4809 nocode_wanted++;
4810 expr_fn();
4811 *type = vtop->type;
4812 vpop();
4813 nocode_wanted--;
4816 /* parse an expression of the form '(type)' or '(expr)' and return its
4817 type */
4818 static void parse_expr_type(CType *type)
4820 int n;
4821 AttributeDef ad;
4823 skip('(');
4824 if (parse_btype(type, &ad)) {
4825 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4826 } else {
4827 expr_type(type, gexpr);
4829 skip(')');
4832 static void parse_type(CType *type)
4834 AttributeDef ad;
4835 int n;
4837 if (!parse_btype(type, &ad)) {
4838 expect("type");
4840 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4843 static void parse_builtin_params(int nc, const char *args)
4845 char c, sep = '(';
4846 CType t;
4847 if (nc)
4848 nocode_wanted++;
4849 next();
4850 while ((c = *args++)) {
4851 skip(sep);
4852 sep = ',';
4853 switch (c) {
4854 case 'e': expr_eq(); continue;
4855 case 't': parse_type(&t); vpush(&t); continue;
4856 default: tcc_error("internal error"); break;
4859 skip(')');
4860 if (nc)
4861 nocode_wanted--;
4864 ST_FUNC void unary(void)
4866 int n, t, align, size, r, sizeof_caller;
4867 CType type;
4868 Sym *s;
4869 AttributeDef ad;
4871 sizeof_caller = in_sizeof;
4872 in_sizeof = 0;
4873 type.ref = NULL;
4874 /* XXX: GCC 2.95.3 does not generate a table although it should be
4875 better here */
4876 tok_next:
4877 switch(tok) {
4878 case TOK_EXTENSION:
4879 next();
4880 goto tok_next;
4881 case TOK_LCHAR:
4882 #ifdef TCC_TARGET_PE
4883 t = VT_SHORT|VT_UNSIGNED;
4884 goto push_tokc;
4885 #endif
4886 case TOK_CINT:
4887 case TOK_CCHAR:
4888 t = VT_INT;
4889 push_tokc:
4890 type.t = t;
4891 vsetc(&type, VT_CONST, &tokc);
4892 next();
4893 break;
4894 case TOK_CUINT:
4895 t = VT_INT | VT_UNSIGNED;
4896 goto push_tokc;
4897 case TOK_CLLONG:
4898 t = VT_LLONG;
4899 goto push_tokc;
4900 case TOK_CULLONG:
4901 t = VT_LLONG | VT_UNSIGNED;
4902 goto push_tokc;
4903 case TOK_CFLOAT:
4904 t = VT_FLOAT;
4905 goto push_tokc;
4906 case TOK_CDOUBLE:
4907 t = VT_DOUBLE;
4908 goto push_tokc;
4909 case TOK_CLDOUBLE:
4910 t = VT_LDOUBLE;
4911 goto push_tokc;
4912 case TOK_CLONG:
4913 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4914 goto push_tokc;
4915 case TOK_CULONG:
4916 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4917 goto push_tokc;
4918 case TOK___FUNCTION__:
4919 if (!gnu_ext)
4920 goto tok_identifier;
4921 /* fall thru */
4922 case TOK___FUNC__:
4924 void *ptr;
4925 int len;
4926 /* special function name identifier */
4927 len = strlen(funcname) + 1;
4928 /* generate char[len] type */
4929 type.t = VT_BYTE;
4930 mk_pointer(&type);
4931 type.t |= VT_ARRAY;
4932 type.ref->c = len;
4933 vpush_ref(&type, data_section, data_section->data_offset, len);
4934 if (!NODATA_WANTED) {
4935 ptr = section_ptr_add(data_section, len);
4936 memcpy(ptr, funcname, len);
4938 next();
4940 break;
4941 case TOK_LSTR:
4942 #ifdef TCC_TARGET_PE
4943 t = VT_SHORT | VT_UNSIGNED;
4944 #else
4945 t = VT_INT;
4946 #endif
4947 goto str_init;
4948 case TOK_STR:
4949 /* string parsing */
4950 t = VT_BYTE;
4951 if (tcc_state->char_is_unsigned)
4952 t = VT_BYTE | VT_UNSIGNED;
4953 str_init:
4954 if (tcc_state->warn_write_strings)
4955 t |= VT_CONSTANT;
4956 type.t = t;
4957 mk_pointer(&type);
4958 type.t |= VT_ARRAY;
4959 memset(&ad, 0, sizeof(AttributeDef));
4960 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4961 break;
4962 case '(':
4963 next();
4964 /* cast ? */
4965 if (parse_btype(&type, &ad)) {
4966 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4967 skip(')');
4968 /* check ISOC99 compound literal */
4969 if (tok == '{') {
4970 /* data is allocated locally by default */
4971 if (global_expr)
4972 r = VT_CONST;
4973 else
4974 r = VT_LOCAL;
4975 /* all except arrays are lvalues */
4976 if (!(type.t & VT_ARRAY))
4977 r |= lvalue_type(type.t);
4978 memset(&ad, 0, sizeof(AttributeDef));
4979 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4980 } else {
4981 if (sizeof_caller) {
4982 vpush(&type);
4983 return;
4985 unary();
4986 gen_cast(&type);
4988 } else if (tok == '{') {
4989 int saved_nocode_wanted = nocode_wanted;
4990 if (const_wanted)
4991 tcc_error("expected constant");
4992 /* save all registers */
4993 save_regs(0);
4994 /* statement expression : we do not accept break/continue
4995 inside as GCC does. We do retain the nocode_wanted state,
4996 as statement expressions can't ever be entered from the
4997 outside, so any reactivation of code emission (from labels
4998 or loop heads) can be disabled again after the end of it. */
4999 block(1);
5000 nocode_wanted = saved_nocode_wanted;
5001 skip(')');
5002 } else {
5003 gexpr();
5004 skip(')');
5006 break;
5007 case '*':
5008 next();
5009 unary();
5010 indir();
5011 break;
5012 case '&':
5013 next();
5014 unary();
5015 /* functions names must be treated as function pointers,
5016 except for unary '&' and sizeof. Since we consider that
5017 functions are not lvalues, we only have to handle it
5018 there and in function calls. */
5019 /* arrays can also be used although they are not lvalues */
5020 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5021 !(vtop->type.t & VT_ARRAY))
5022 test_lvalue();
5023 mk_pointer(&vtop->type);
5024 gaddrof();
5025 break;
5026 case '!':
5027 next();
5028 unary();
5029 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5030 gen_cast_s(VT_BOOL);
5031 vtop->c.i = !vtop->c.i;
5032 } else if (vtop->r == VT_CMP) {
5033 vtop->cmp_op ^= 1;
5034 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5035 } else {
5036 vpushi(0);
5037 gen_op(TOK_EQ);
5039 break;
5040 case '~':
5041 next();
5042 unary();
5043 vpushi(-1);
5044 gen_op('^');
5045 break;
5046 case '+':
5047 next();
5048 unary();
5049 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5050 tcc_error("pointer not accepted for unary plus");
5051 /* In order to force cast, we add zero, except for floating point
5052 where we really need an noop (otherwise -0.0 will be transformed
5053 into +0.0). */
5054 if (!is_float(vtop->type.t)) {
5055 vpushi(0);
5056 gen_op('+');
5058 break;
5059 case TOK_SIZEOF:
5060 case TOK_ALIGNOF1:
5061 case TOK_ALIGNOF2:
5062 case TOK_ALIGNOF3:
5063 t = tok;
5064 next();
5065 in_sizeof++;
5066 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5067 s = NULL;
5068 if (vtop[1].r & VT_SYM)
5069 s = vtop[1].sym; /* hack: accessing previous vtop */
5070 size = type_size(&type, &align);
5071 if (s && s->a.aligned)
5072 align = 1 << (s->a.aligned - 1);
5073 if (t == TOK_SIZEOF) {
5074 if (!(type.t & VT_VLA)) {
5075 if (size < 0)
5076 tcc_error("sizeof applied to an incomplete type");
5077 vpushs(size);
5078 } else {
5079 vla_runtime_type_size(&type, &align);
5081 } else {
5082 vpushs(align);
5084 vtop->type.t |= VT_UNSIGNED;
5085 break;
5087 case TOK_builtin_expect:
5088 /* __builtin_expect is a no-op for now */
5089 parse_builtin_params(0, "ee");
5090 vpop();
5091 break;
5092 case TOK_builtin_types_compatible_p:
5093 parse_builtin_params(0, "tt");
5094 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5095 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5096 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5097 vtop -= 2;
5098 vpushi(n);
5099 break;
5100 case TOK_builtin_choose_expr:
5102 int64_t c;
5103 next();
5104 skip('(');
5105 c = expr_const64();
5106 skip(',');
5107 if (!c) {
5108 nocode_wanted++;
5110 expr_eq();
5111 if (!c) {
5112 vpop();
5113 nocode_wanted--;
5115 skip(',');
5116 if (c) {
5117 nocode_wanted++;
5119 expr_eq();
5120 if (c) {
5121 vpop();
5122 nocode_wanted--;
5124 skip(')');
5126 break;
5127 case TOK_builtin_constant_p:
5128 parse_builtin_params(1, "e");
5129 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5130 vtop--;
5131 vpushi(n);
5132 break;
5133 case TOK_builtin_frame_address:
5134 case TOK_builtin_return_address:
5136 int tok1 = tok;
5137 int level;
5138 next();
5139 skip('(');
5140 if (tok != TOK_CINT) {
5141 tcc_error("%s only takes positive integers",
5142 tok1 == TOK_builtin_return_address ?
5143 "__builtin_return_address" :
5144 "__builtin_frame_address");
5146 level = (uint32_t)tokc.i;
5147 next();
5148 skip(')');
5149 type.t = VT_VOID;
5150 mk_pointer(&type);
5151 vset(&type, VT_LOCAL, 0); /* local frame */
5152 while (level--) {
5153 mk_pointer(&vtop->type);
5154 indir(); /* -> parent frame */
5156 if (tok1 == TOK_builtin_return_address) {
5157 // assume return address is just above frame pointer on stack
5158 vpushi(PTR_SIZE);
5159 gen_op('+');
5160 mk_pointer(&vtop->type);
5161 indir();
5164 break;
5165 #ifdef TCC_TARGET_X86_64
5166 #ifdef TCC_TARGET_PE
5167 case TOK_builtin_va_start:
5168 parse_builtin_params(0, "ee");
5169 r = vtop->r & VT_VALMASK;
5170 if (r == VT_LLOCAL)
5171 r = VT_LOCAL;
5172 if (r != VT_LOCAL)
5173 tcc_error("__builtin_va_start expects a local variable");
5174 vtop->r = r;
5175 vtop->type = char_pointer_type;
5176 vtop->c.i += 8;
5177 vstore();
5178 break;
5179 #else
5180 case TOK_builtin_va_arg_types:
5181 parse_builtin_params(0, "t");
5182 vpushi(classify_x86_64_va_arg(&vtop->type));
5183 vswap();
5184 vpop();
5185 break;
5186 #endif
5187 #endif
5189 #ifdef TCC_TARGET_ARM64
5190 case TOK___va_start: {
5191 parse_builtin_params(0, "ee");
5192 //xx check types
5193 gen_va_start();
5194 vpushi(0);
5195 vtop->type.t = VT_VOID;
5196 break;
5198 case TOK___va_arg: {
5199 parse_builtin_params(0, "et");
5200 type = vtop->type;
5201 vpop();
5202 //xx check types
5203 gen_va_arg(&type);
5204 vtop->type = type;
5205 break;
5207 case TOK___arm64_clear_cache: {
5208 parse_builtin_params(0, "ee");
5209 gen_clear_cache();
5210 vpushi(0);
5211 vtop->type.t = VT_VOID;
5212 break;
5214 #endif
5215 /* pre operations */
5216 case TOK_INC:
5217 case TOK_DEC:
5218 t = tok;
5219 next();
5220 unary();
5221 inc(0, t);
5222 break;
5223 case '-':
5224 next();
5225 unary();
5226 t = vtop->type.t & VT_BTYPE;
5227 if (is_float(t)) {
5228 /* In IEEE negate(x) isn't subtract(0,x), but rather
5229 subtract(-0, x). */
5230 vpush(&vtop->type);
5231 if (t == VT_FLOAT)
5232 vtop->c.f = -1.0 * 0.0;
5233 else if (t == VT_DOUBLE)
5234 vtop->c.d = -1.0 * 0.0;
5235 else
5236 vtop->c.ld = -1.0 * 0.0;
5237 } else
5238 vpushi(0);
5239 vswap();
5240 gen_op('-');
5241 break;
5242 case TOK_LAND:
5243 if (!gnu_ext)
5244 goto tok_identifier;
5245 next();
5246 /* allow to take the address of a label */
5247 if (tok < TOK_UIDENT)
5248 expect("label identifier");
5249 s = label_find(tok);
5250 if (!s) {
5251 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5252 } else {
5253 if (s->r == LABEL_DECLARED)
5254 s->r = LABEL_FORWARD;
5256 if (!s->type.t) {
5257 s->type.t = VT_VOID;
5258 mk_pointer(&s->type);
5259 s->type.t |= VT_STATIC;
5261 vpushsym(&s->type, s);
5262 next();
5263 break;
5265 case TOK_GENERIC:
5267 CType controlling_type;
5268 int has_default = 0;
5269 int has_match = 0;
5270 int learn = 0;
5271 TokenString *str = NULL;
5272 int saved_const_wanted = const_wanted;
5274 next();
5275 skip('(');
5276 const_wanted = 0;
5277 expr_type(&controlling_type, expr_eq);
5278 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5279 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5280 mk_pointer(&controlling_type);
5281 const_wanted = saved_const_wanted;
5282 for (;;) {
5283 learn = 0;
5284 skip(',');
5285 if (tok == TOK_DEFAULT) {
5286 if (has_default)
5287 tcc_error("too many 'default'");
5288 has_default = 1;
5289 if (!has_match)
5290 learn = 1;
5291 next();
5292 } else {
5293 AttributeDef ad_tmp;
5294 int itmp;
5295 CType cur_type;
5297 in_generic++;
5298 parse_btype(&cur_type, &ad_tmp);
5299 in_generic--;
5301 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5302 if (compare_types(&controlling_type, &cur_type, 0)) {
5303 if (has_match) {
5304 tcc_error("type match twice");
5306 has_match = 1;
5307 learn = 1;
5310 skip(':');
5311 if (learn) {
5312 if (str)
5313 tok_str_free(str);
5314 skip_or_save_block(&str);
5315 } else {
5316 skip_or_save_block(NULL);
5318 if (tok == ')')
5319 break;
5321 if (!str) {
5322 char buf[60];
5323 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5324 tcc_error("type '%s' does not match any association", buf);
5326 begin_macro(str, 1);
5327 next();
5328 expr_eq();
5329 if (tok != TOK_EOF)
5330 expect(",");
5331 end_macro();
5332 next();
5333 break;
5335 // special qnan , snan and infinity values
5336 case TOK___NAN__:
5337 n = 0x7fc00000;
5338 special_math_val:
5339 vpushi(n);
5340 vtop->type.t = VT_FLOAT;
5341 next();
5342 break;
5343 case TOK___SNAN__:
5344 n = 0x7f800001;
5345 goto special_math_val;
5346 case TOK___INF__:
5347 n = 0x7f800000;
5348 goto special_math_val;
5350 default:
5351 tok_identifier:
5352 t = tok;
5353 next();
5354 if (t < TOK_UIDENT)
5355 expect("identifier");
5356 s = sym_find(t);
5357 if (!s || IS_ASM_SYM(s)) {
5358 const char *name = get_tok_str(t, NULL);
5359 if (tok != '(')
5360 tcc_error("'%s' undeclared", name);
5361 /* for simple function calls, we tolerate undeclared
5362 external reference to int() function */
5363 if (tcc_state->warn_implicit_function_declaration
5364 #ifdef TCC_TARGET_PE
5365 /* people must be warned about using undeclared WINAPI functions
5366 (which usually start with uppercase letter) */
5367 || (name[0] >= 'A' && name[0] <= 'Z')
5368 #endif
5370 tcc_warning("implicit declaration of function '%s'", name);
5371 s = external_global_sym(t, &func_old_type);
5374 r = s->r;
5375 /* A symbol that has a register is a local register variable,
5376 which starts out as VT_LOCAL value. */
5377 if ((r & VT_VALMASK) < VT_CONST)
5378 r = (r & ~VT_VALMASK) | VT_LOCAL;
5380 vset(&s->type, r, s->c);
5381 /* Point to s as backpointer (even without r&VT_SYM).
5382 Will be used by at least the x86 inline asm parser for
5383 regvars. */
5384 vtop->sym = s;
5386 if (r & VT_SYM) {
5387 vtop->c.i = 0;
5388 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5389 vtop->c.i = s->enum_val;
5391 break;
5394 /* post operations */
5395 while (1) {
5396 if (tok == TOK_INC || tok == TOK_DEC) {
5397 inc(1, tok);
5398 next();
5399 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5400 int qualifiers, cumofs = 0;
5401 /* field */
5402 if (tok == TOK_ARROW)
5403 indir();
5404 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5405 test_lvalue();
5406 gaddrof();
5407 /* expect pointer on structure */
5408 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5409 expect("struct or union");
5410 if (tok == TOK_CDOUBLE)
5411 expect("field name");
5412 next();
5413 if (tok == TOK_CINT || tok == TOK_CUINT)
5414 expect("field name");
5415 s = find_field(&vtop->type, tok, &cumofs);
5416 if (!s)
5417 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5418 /* add field offset to pointer */
5419 vtop->type = char_pointer_type; /* change type to 'char *' */
5420 vpushi(cumofs + s->c);
5421 gen_op('+');
5422 /* change type to field type, and set to lvalue */
5423 vtop->type = s->type;
5424 vtop->type.t |= qualifiers;
5425 /* an array is never an lvalue */
5426 if (!(vtop->type.t & VT_ARRAY)) {
5427 vtop->r |= lvalue_type(vtop->type.t);
5428 #ifdef CONFIG_TCC_BCHECK
5429 /* if bound checking, the referenced pointer must be checked */
5430 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5431 vtop->r |= VT_MUSTBOUND;
5432 #endif
5434 next();
5435 } else if (tok == '[') {
5436 next();
5437 gexpr();
5438 gen_op('+');
5439 indir();
5440 skip(']');
5441 } else if (tok == '(') {
5442 SValue ret;
5443 Sym *sa;
5444 int nb_args, ret_nregs, ret_align, regsize, variadic;
5446 /* function call */
5447 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5448 /* pointer test (no array accepted) */
5449 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5450 vtop->type = *pointed_type(&vtop->type);
5451 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5452 goto error_func;
5453 } else {
5454 error_func:
5455 expect("function pointer");
5457 } else {
5458 vtop->r &= ~VT_LVAL; /* no lvalue */
5460 /* get return type */
5461 s = vtop->type.ref;
5462 next();
5463 sa = s->next; /* first parameter */
5464 nb_args = regsize = 0;
5465 ret.r2 = VT_CONST;
5466 /* compute first implicit argument if a structure is returned */
5467 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5468 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5469 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5470 &ret_align, &regsize);
5471 if (!ret_nregs) {
5472 /* get some space for the returned structure */
5473 size = type_size(&s->type, &align);
5474 #ifdef TCC_TARGET_ARM64
5475 /* On arm64, a small struct is return in registers.
5476 It is much easier to write it to memory if we know
5477 that we are allowed to write some extra bytes, so
5478 round the allocated space up to a power of 2: */
5479 if (size < 16)
5480 while (size & (size - 1))
5481 size = (size | (size - 1)) + 1;
5482 #endif
5483 loc = (loc - size) & -align;
5484 ret.type = s->type;
5485 ret.r = VT_LOCAL | VT_LVAL;
5486 /* pass it as 'int' to avoid structure arg passing
5487 problems */
5488 vseti(VT_LOCAL, loc);
5489 ret.c = vtop->c;
5490 nb_args++;
5492 } else {
5493 ret_nregs = 1;
5494 ret.type = s->type;
5497 if (ret_nregs) {
5498 /* return in register */
5499 if (is_float(ret.type.t)) {
5500 ret.r = reg_fret(ret.type.t);
5501 #ifdef TCC_TARGET_X86_64
5502 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5503 ret.r2 = REG_QRET;
5504 #endif
5505 } else {
5506 #ifndef TCC_TARGET_ARM64
5507 #ifndef TCC_TARGET_RISCV64
5508 #ifdef TCC_TARGET_X86_64
5509 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5510 #else
5511 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5512 #endif
5513 ret.r2 = REG_LRET;
5514 #endif
5515 #endif
5516 ret.r = REG_IRET;
5518 ret.c.i = 0;
5520 if (tok != ')') {
5521 for(;;) {
5522 expr_eq();
5523 gfunc_param_typed(s, sa);
5524 nb_args++;
5525 if (sa)
5526 sa = sa->next;
5527 if (tok == ')')
5528 break;
5529 skip(',');
5532 if (sa)
5533 tcc_error("too few arguments to function");
5534 skip(')');
5535 gfunc_call(nb_args);
5537 /* return value */
5538 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5539 vsetc(&ret.type, r, &ret.c);
5540 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5543 /* handle packed struct return */
5544 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5545 int addr, offset;
5547 size = type_size(&s->type, &align);
5548 /* We're writing whole regs often, make sure there's enough
5549 space. Assume register size is power of 2. */
5550 if (regsize > align)
5551 align = regsize;
5552 loc = (loc - size) & -align;
5553 addr = loc;
5554 offset = 0;
5555 for (;;) {
5556 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5557 vswap();
5558 vstore();
5559 vtop--;
5560 if (--ret_nregs == 0)
5561 break;
5562 offset += regsize;
5564 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5566 if (s->f.func_noreturn)
5567 CODE_OFF();
5568 } else {
5569 break;
5574 ST_FUNC void expr_prod(void)
5576 int t;
5578 unary();
5579 while (tok == '*' || tok == '/' || tok == '%') {
5580 t = tok;
5581 next();
5582 unary();
5583 gen_op(t);
5587 ST_FUNC void expr_sum(void)
5589 int t;
5591 expr_prod();
5592 while (tok == '+' || tok == '-') {
5593 t = tok;
5594 next();
5595 expr_prod();
5596 gen_op(t);
5600 static void expr_shift(void)
5602 int t;
5604 expr_sum();
5605 while (tok == TOK_SHL || tok == TOK_SAR) {
5606 t = tok;
5607 next();
5608 expr_sum();
5609 gen_op(t);
5613 static void expr_cmp(void)
5615 int t;
5617 expr_shift();
5618 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5619 tok == TOK_ULT || tok == TOK_UGE) {
5620 t = tok;
5621 next();
5622 expr_shift();
5623 gen_op(t);
5627 static void expr_cmpeq(void)
5629 int t;
5631 expr_cmp();
5632 while (tok == TOK_EQ || tok == TOK_NE) {
5633 t = tok;
5634 next();
5635 expr_cmp();
5636 gen_op(t);
5640 static void expr_and(void)
5642 expr_cmpeq();
5643 while (tok == '&') {
5644 next();
5645 expr_cmpeq();
5646 gen_op('&');
5650 static void expr_xor(void)
5652 expr_and();
5653 while (tok == '^') {
5654 next();
5655 expr_and();
5656 gen_op('^');
5660 static void expr_or(void)
5662 expr_xor();
5663 while (tok == '|') {
5664 next();
5665 expr_xor();
5666 gen_op('|');
5670 static int condition_3way(void);
5672 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5674 int t = 0, cc = 1, f = 0, c;
5675 for(;;) {
5676 c = f ? i : condition_3way();
5677 if (c < 0) {
5678 save_regs(1), cc = 0;
5679 } else if (c != i) {
5680 nocode_wanted++, f = 1;
5682 if (tok != e_op) {
5683 if (cc || f) {
5684 vpop();
5685 vpushi(i ^ f);
5686 gsym(t);
5687 nocode_wanted -= f;
5688 } else {
5689 gvtst_set(i, t);
5691 break;
5693 if (c < 0)
5694 t = gvtst(i, t);
5695 else
5696 vpop();
5697 next();
5698 e_fn();
5702 static void expr_land(void)
5704 expr_or();
5705 if (tok == TOK_LAND)
5706 expr_landor(expr_or, TOK_LAND, 1);
5709 static void expr_lor(void)
5711 expr_land();
5712 if (tok == TOK_LOR)
5713 expr_landor(expr_land, TOK_LOR, 0);
5716 /* Assuming vtop is a value used in a conditional context
5717 (i.e. compared with zero) return 0 if it's false, 1 if
5718 true and -1 if it can't be statically determined. */
5719 static int condition_3way(void)
5721 int c = -1;
5722 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5723 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5724 vdup();
5725 gen_cast_s(VT_BOOL);
5726 c = vtop->c.i;
5727 vpop();
5729 return c;
5732 static int is_cond_bool(SValue *sv)
5734 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5735 && (sv->type.t & VT_BTYPE) == VT_INT)
5736 return (unsigned)sv->c.i < 2;
5737 if (sv->r == VT_CMP)
5738 return 1;
5739 return 0;
5742 static void expr_cond(void)
5744 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5745 SValue sv;
5746 CType type, type1, type2;
5747 int ncw_prev;
5749 expr_lor();
5750 if (tok == '?') {
5751 next();
5752 c = condition_3way();
5753 g = (tok == ':' && gnu_ext);
5754 tt = 0;
5755 if (!g) {
5756 if (c < 0) {
5757 save_regs(1);
5758 tt = gvtst(1, 0);
5759 } else {
5760 vpop();
5762 } else if (c < 0) {
5763 /* needed to avoid having different registers saved in
5764 each branch */
5765 rc = RC_INT;
5766 if (is_float(vtop->type.t)) {
5767 rc = RC_FLOAT;
5768 #ifdef TCC_TARGET_X86_64
5769 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5770 rc = RC_ST0;
5772 #elif defined TCC_TARGET_RISCV64
5773 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)
5774 rc = RC_INT;
5775 #endif
5777 gv(rc);
5778 save_regs(1);
5779 gv_dup();
5780 tt = gvtst(0, 0);
5783 ncw_prev = nocode_wanted;
5784 if (1) {
5785 if (c == 0)
5786 nocode_wanted++;
5787 if (!g)
5788 gexpr();
5790 if (c < 0 && vtop->r == VT_CMP) {
5791 t1 = gvtst(0, 0);
5792 vpushi(0);
5793 gvtst_set(0, t1);
5796 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5797 mk_pointer(&vtop->type);
5798 type1 = vtop->type;
5799 sv = *vtop; /* save value to handle it later */
5800 vtop--; /* no vpop so that FP stack is not flushed */
5802 if (g) {
5803 u = tt;
5804 } else if (c < 0) {
5805 u = gjmp(0);
5806 gsym(tt);
5807 } else
5808 u = 0;
5810 nocode_wanted = ncw_prev;
5811 if (c == 1)
5812 nocode_wanted++;
5813 skip(':');
5814 expr_cond();
5816 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5817 if (sv.r == VT_CMP) {
5818 t1 = sv.jtrue;
5819 t2 = u;
5820 } else {
5821 t1 = gvtst(0, 0);
5822 t2 = gjmp(0);
5823 gsym(u);
5824 vpushv(&sv);
5826 gvtst_set(0, t1);
5827 gvtst_set(1, t2);
5828 nocode_wanted = ncw_prev;
5829 // tcc_warning("two conditions expr_cond");
5830 return;
5833 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5834 mk_pointer(&vtop->type);
5835 type2=vtop->type;
5836 t1 = type1.t;
5837 bt1 = t1 & VT_BTYPE;
5838 t2 = type2.t;
5839 bt2 = t2 & VT_BTYPE;
5840 type.ref = NULL;
5842 /* cast operands to correct type according to ISOC rules */
5843 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5844 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5845 } else if (is_float(bt1) || is_float(bt2)) {
5846 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5847 type.t = VT_LDOUBLE;
5849 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5850 type.t = VT_DOUBLE;
5851 } else {
5852 type.t = VT_FLOAT;
5854 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5855 /* cast to biggest op */
5856 type.t = VT_LLONG | VT_LONG;
5857 if (bt1 == VT_LLONG)
5858 type.t &= t1;
5859 if (bt2 == VT_LLONG)
5860 type.t &= t2;
5861 /* convert to unsigned if it does not fit in a long long */
5862 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5863 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5864 type.t |= VT_UNSIGNED;
5865 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5866 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5867 /* If one is a null ptr constant the result type
5868 is the other. */
5869 if (is_null_pointer (vtop)) type = type1;
5870 else if (is_null_pointer (&sv)) type = type2;
5871 else if (bt1 != bt2)
5872 tcc_error("incompatible types in conditional expressions");
5873 else {
5874 CType *pt1 = pointed_type(&type1);
5875 CType *pt2 = pointed_type(&type2);
5876 int pbt1 = pt1->t & VT_BTYPE;
5877 int pbt2 = pt2->t & VT_BTYPE;
5878 int newquals, copied = 0;
5879 /* pointers to void get preferred, otherwise the
5880 pointed to types minus qualifs should be compatible */
5881 type = (pbt1 == VT_VOID) ? type1 : type2;
5882 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5883 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5884 tcc_warning("pointer type mismatch in conditional expression\n");
5886 /* combine qualifs */
5887 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5888 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5889 & newquals)
5891 /* copy the pointer target symbol */
5892 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5893 0, type.ref->c);
5894 copied = 1;
5895 pointed_type(&type)->t |= newquals;
5897 /* pointers to incomplete arrays get converted to
5898 pointers to completed ones if possible */
5899 if (pt1->t & VT_ARRAY
5900 && pt2->t & VT_ARRAY
5901 && pointed_type(&type)->ref->c < 0
5902 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5904 if (!copied)
5905 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5906 0, type.ref->c);
5907 pointed_type(&type)->ref =
5908 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5909 0, pointed_type(&type)->ref->c);
5910 pointed_type(&type)->ref->c =
5911 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5914 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5915 /* XXX: test structure compatibility */
5916 type = bt1 == VT_STRUCT ? type1 : type2;
5917 } else {
5918 /* integer operations */
5919 type.t = VT_INT | (VT_LONG & (t1 | t2));
5920 /* convert to unsigned if it does not fit in an integer */
5921 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5922 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5923 type.t |= VT_UNSIGNED;
5925 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5926 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5927 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5929 /* now we convert second operand */
5930 if (c != 1) {
5931 gen_cast(&type);
5932 if (islv) {
5933 mk_pointer(&vtop->type);
5934 gaddrof();
5935 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5936 gaddrof();
5939 rc = RC_INT;
5940 if (is_float(type.t)) {
5941 rc = RC_FLOAT;
5942 #ifdef TCC_TARGET_X86_64
5943 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5944 rc = RC_ST0;
5946 #elif defined TCC_TARGET_RISCV64
5947 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)
5948 rc = RC_INT;
5949 #endif
5950 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5951 /* for long longs, we use fixed registers to avoid having
5952 to handle a complicated move */
5953 rc = RC_IRET;
5956 tt = r2 = 0;
5957 if (c < 0) {
5958 r2 = gv(rc);
5959 tt = gjmp(0);
5961 gsym(u);
5962 nocode_wanted = ncw_prev;
5964 /* this is horrible, but we must also convert first
5965 operand */
5966 if (c != 0) {
5967 *vtop = sv;
5968 gen_cast(&type);
5969 if (islv) {
5970 mk_pointer(&vtop->type);
5971 gaddrof();
5972 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5973 gaddrof();
5976 if (c < 0) {
5977 r1 = gv(rc);
5978 move_reg(r2, r1, type.t);
5979 vtop->r = r2;
5980 gsym(tt);
5983 if (islv)
5984 indir();
5989 static void expr_eq(void)
5991 int t;
5993 expr_cond();
5994 if (tok == '=' ||
5995 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5996 tok == TOK_A_XOR || tok == TOK_A_OR ||
5997 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5998 test_lvalue();
5999 t = tok;
6000 next();
6001 if (t == '=') {
6002 expr_eq();
6003 } else {
6004 vdup();
6005 expr_eq();
6006 gen_op(t & 0x7f);
6008 vstore();
6012 ST_FUNC void gexpr(void)
6014 while (1) {
6015 expr_eq();
6016 if (tok != ',')
6017 break;
6018 vpop();
6019 next();
6023 /* parse a constant expression and return value in vtop. */
6024 static void expr_const1(void)
6026 const_wanted++;
6027 nocode_wanted++;
6028 expr_cond();
6029 nocode_wanted--;
6030 const_wanted--;
6033 /* parse an integer constant and return its value. */
6034 static inline int64_t expr_const64(void)
6036 int64_t c;
6037 expr_const1();
6038 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6039 expect("constant expression");
6040 c = vtop->c.i;
6041 vpop();
6042 return c;
6045 /* parse an integer constant and return its value.
6046 Complain if it doesn't fit 32bit (signed or unsigned). */
6047 ST_FUNC int expr_const(void)
6049 int c;
6050 int64_t wc = expr_const64();
6051 c = wc;
6052 if (c != wc && (unsigned)c != wc)
6053 tcc_error("constant exceeds 32 bit");
6054 return c;
6057 /* ------------------------------------------------------------------------- */
6058 /* return from function */
6060 #ifndef TCC_TARGET_ARM64
6061 #ifndef TCC_TARGET_RISCV64
6062 static void gfunc_return(CType *func_type)
6064 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6065 CType type, ret_type;
6066 int ret_align, ret_nregs, regsize;
6067 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6068 &ret_align, &regsize);
6069 if (0 == ret_nregs) {
6070 /* if returning structure, must copy it to implicit
6071 first pointer arg location */
6072 type = *func_type;
6073 mk_pointer(&type);
6074 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6075 indir();
6076 vswap();
6077 /* copy structure value to pointer */
6078 vstore();
6079 } else {
6080 /* returning structure packed into registers */
6081 int r, size, addr, align;
6082 size = type_size(func_type,&align);
6083 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6084 (vtop->c.i & (ret_align-1)))
6085 && (align & (ret_align-1))) {
6086 loc = (loc - size) & -ret_align;
6087 addr = loc;
6088 type = *func_type;
6089 vset(&type, VT_LOCAL | VT_LVAL, addr);
6090 vswap();
6091 vstore();
6092 vpop();
6093 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6095 vtop->type = ret_type;
6096 if (is_float(ret_type.t))
6097 r = rc_fret(ret_type.t);
6098 else
6099 r = RC_IRET;
6101 if (ret_nregs == 1)
6102 gv(r);
6103 else {
6104 for (;;) {
6105 vdup();
6106 gv(r);
6107 vpop();
6108 if (--ret_nregs == 0)
6109 break;
6110 /* We assume that when a structure is returned in multiple
6111 registers, their classes are consecutive values of the
6112 suite s(n) = 2^n */
6113 r <<= 1;
6114 vtop->c.i += regsize;
6118 } else if (is_float(func_type->t)) {
6119 gv(rc_fret(func_type->t));
6120 } else {
6121 gv(RC_IRET);
6123 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6125 #endif
6126 #endif
6128 static void check_func_return(void)
6130 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6131 return;
6132 if (!strcmp (funcname, "main")
6133 && (func_vt.t & VT_BTYPE) == VT_INT) {
6134 /* main returns 0 by default */
6135 vpushi(0);
6136 gen_assign_cast(&func_vt);
6137 gfunc_return(&func_vt);
6138 } else {
6139 tcc_warning("function might return no value: '%s'", funcname);
6143 /* ------------------------------------------------------------------------- */
6144 /* switch/case */
6146 static int case_cmp(const void *pa, const void *pb)
6148 int64_t a = (*(struct case_t**) pa)->v1;
6149 int64_t b = (*(struct case_t**) pb)->v1;
6150 return a < b ? -1 : a > b;
6153 static void gtst_addr(int t, int a)
6155 gsym_addr(gvtst(0, t), a);
6158 static void gcase(struct case_t **base, int len, int *bsym)
6160 struct case_t *p;
6161 int e;
6162 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6163 while (len > 8) {
6164 /* binary search */
6165 p = base[len/2];
6166 vdup();
6167 if (ll)
6168 vpushll(p->v2);
6169 else
6170 vpushi(p->v2);
6171 gen_op(TOK_LE);
6172 e = gvtst(1, 0);
6173 vdup();
6174 if (ll)
6175 vpushll(p->v1);
6176 else
6177 vpushi(p->v1);
6178 gen_op(TOK_GE);
6179 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6180 /* x < v1 */
6181 gcase(base, len/2, bsym);
6182 /* x > v2 */
6183 gsym(e);
6184 e = len/2 + 1;
6185 base += e; len -= e;
6187 /* linear scan */
6188 while (len--) {
6189 p = *base++;
6190 vdup();
6191 if (ll)
6192 vpushll(p->v2);
6193 else
6194 vpushi(p->v2);
6195 if (p->v1 == p->v2) {
6196 gen_op(TOK_EQ);
6197 gtst_addr(0, p->sym);
6198 } else {
6199 gen_op(TOK_LE);
6200 e = gvtst(1, 0);
6201 vdup();
6202 if (ll)
6203 vpushll(p->v1);
6204 else
6205 vpushi(p->v1);
6206 gen_op(TOK_GE);
6207 gtst_addr(0, p->sym);
6208 gsym(e);
6211 *bsym = gjmp(*bsym);
6214 /* ------------------------------------------------------------------------- */
6215 /* __attribute__((cleanup(fn))) */
6217 static void try_call_scope_cleanup(Sym *stop)
6219 Sym *cls = cur_scope->cl.s;
6221 for (; cls != stop; cls = cls->ncl) {
6222 Sym *fs = cls->next;
6223 Sym *vs = cls->prev_tok;
6225 vpushsym(&fs->type, fs);
6226 vset(&vs->type, vs->r, vs->c);
6227 vtop->sym = vs;
6228 mk_pointer(&vtop->type);
6229 gaddrof();
6230 gfunc_call(1);
6234 static void try_call_cleanup_goto(Sym *cleanupstate)
6236 Sym *oc, *cc;
6237 int ocd, ccd;
6239 if (!cur_scope->cl.s)
6240 return;
6242 /* search NCA of both cleanup chains given parents and initial depth */
6243 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6244 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6246 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6248 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6251 try_call_scope_cleanup(cc);
6254 /* call 'func' for each __attribute__((cleanup(func))) */
6255 static void block_cleanup(struct scope *o)
6257 int jmp = 0;
6258 Sym *g, **pg;
6259 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6260 if (g->prev_tok->r & LABEL_FORWARD) {
6261 Sym *pcl = g->next;
6262 if (!jmp)
6263 jmp = gjmp(0);
6264 gsym(pcl->jnext);
6265 try_call_scope_cleanup(o->cl.s);
6266 pcl->jnext = gjmp(0);
6267 if (!o->cl.n)
6268 goto remove_pending;
6269 g->c = o->cl.n;
6270 pg = &g->prev;
6271 } else {
6272 remove_pending:
6273 *pg = g->prev;
6274 sym_free(g);
6277 gsym(jmp);
6278 try_call_scope_cleanup(o->cl.s);
6281 /* ------------------------------------------------------------------------- */
6282 /* VLA */
6284 static void vla_restore(int loc)
6286 if (loc)
6287 gen_vla_sp_restore(loc);
6290 static void vla_leave(struct scope *o)
6292 if (o->vla.num < cur_scope->vla.num)
6293 vla_restore(o->vla.loc);
6296 /* ------------------------------------------------------------------------- */
6297 /* local scopes */
6299 void new_scope(struct scope *o)
6301 /* copy and link previous scope */
6302 *o = *cur_scope;
6303 o->prev = cur_scope;
6304 cur_scope = o;
6306 /* record local declaration stack position */
6307 o->lstk = local_stack;
6308 o->llstk = local_label_stack;
6310 ++local_scope;
6313 void prev_scope(struct scope *o, int is_expr)
6315 vla_leave(o->prev);
6317 if (o->cl.s != o->prev->cl.s)
6318 block_cleanup(o->prev);
6320 /* pop locally defined labels */
6321 label_pop(&local_label_stack, o->llstk, is_expr);
6323 /* In the is_expr case (a statement expression is finished here),
6324 vtop might refer to symbols on the local_stack. Either via the
6325 type or via vtop->sym. We can't pop those nor any that in turn
6326 might be referred to. To make it easier we don't roll back
6327 any symbols in that case; some upper level call to block() will
6328 do that. We do have to remove such symbols from the lookup
6329 tables, though. sym_pop will do that. */
6331 /* pop locally defined symbols */
6332 sym_pop(&local_stack, o->lstk, is_expr);
6334 cur_scope = o->prev;
6335 --local_scope;
6338 /* leave a scope via break/continue(/goto) */
6339 void leave_scope(struct scope *o)
6341 if (!o)
6342 return;
6343 try_call_scope_cleanup(o->cl.s);
6344 vla_leave(o);
6347 /* ------------------------------------------------------------------------- */
6348 /* call block from 'for do while' loops */
6350 static void lblock(int *bsym, int *csym)
6352 struct scope *lo = loop_scope, *co = cur_scope;
6353 int *b = co->bsym, *c = co->csym;
6354 if (csym) {
6355 co->csym = csym;
6356 loop_scope = co;
6358 co->bsym = bsym;
6359 block(0);
6360 co->bsym = b;
6361 if (csym) {
6362 co->csym = c;
6363 loop_scope = lo;
6367 static void block(int is_expr)
6369 int a, b, c, d, e, t;
6370 Sym *s;
6372 if (is_expr) {
6373 /* default return value is (void) */
6374 vpushi(0);
6375 vtop->type.t = VT_VOID;
6378 again:
6379 t = tok, next();
6381 if (t == TOK_IF) {
6382 skip('(');
6383 gexpr();
6384 skip(')');
6385 a = gvtst(1, 0);
6386 block(0);
6387 if (tok == TOK_ELSE) {
6388 d = gjmp(0);
6389 gsym(a);
6390 next();
6391 block(0);
6392 gsym(d); /* patch else jmp */
6393 } else {
6394 gsym(a);
6397 } else if (t == TOK_WHILE) {
6398 d = gind();
6399 skip('(');
6400 gexpr();
6401 skip(')');
6402 a = gvtst(1, 0);
6403 b = 0;
6404 lblock(&a, &b);
6405 gjmp_addr(d);
6406 gsym_addr(b, d);
6407 gsym(a);
6409 } else if (t == '{') {
6410 struct scope o;
6411 new_scope(&o);
6413 /* handle local labels declarations */
6414 while (tok == TOK_LABEL) {
6415 do {
6416 next();
6417 if (tok < TOK_UIDENT)
6418 expect("label identifier");
6419 label_push(&local_label_stack, tok, LABEL_DECLARED);
6420 next();
6421 } while (tok == ',');
6422 skip(';');
6425 while (tok != '}') {
6426 decl(VT_LOCAL);
6427 if (tok != '}') {
6428 if (is_expr)
6429 vpop();
6430 block(is_expr);
6434 prev_scope(&o, is_expr);
6436 if (0 == local_scope && !nocode_wanted)
6437 check_func_return();
6438 next();
6440 } else if (t == TOK_RETURN) {
6441 a = tok != ';';
6442 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6443 if (a)
6444 gexpr(), gen_assign_cast(&func_vt);
6445 leave_scope(root_scope);
6446 if (a && b)
6447 gfunc_return(&func_vt);
6448 else if (a)
6449 vtop--;
6450 else if (b)
6451 tcc_warning("'return' with no value.");
6452 skip(';');
6453 /* jump unless last stmt in top-level block */
6454 if (tok != '}' || local_scope != 1)
6455 rsym = gjmp(rsym);
6456 CODE_OFF();
6458 } else if (t == TOK_BREAK) {
6459 /* compute jump */
6460 if (!cur_scope->bsym)
6461 tcc_error("cannot break");
6462 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6463 leave_scope(loop_scope);
6464 else
6465 leave_scope(cur_switch->scope);
6466 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6467 skip(';');
6469 } else if (t == TOK_CONTINUE) {
6470 /* compute jump */
6471 if (!cur_scope->csym)
6472 tcc_error("cannot continue");
6473 leave_scope(loop_scope);
6474 *cur_scope->csym = gjmp(*cur_scope->csym);
6475 skip(';');
6477 } else if (t == TOK_FOR) {
6478 struct scope o;
6479 new_scope(&o);
6481 skip('(');
6482 if (tok != ';') {
6483 /* c99 for-loop init decl? */
6484 if (!decl0(VT_LOCAL, 1, NULL)) {
6485 /* no, regular for-loop init expr */
6486 gexpr();
6487 vpop();
6490 skip(';');
6491 a = b = 0;
6492 c = d = gind();
6493 if (tok != ';') {
6494 gexpr();
6495 a = gvtst(1, 0);
6497 skip(';');
6498 if (tok != ')') {
6499 e = gjmp(0);
6500 d = gind();
6501 gexpr();
6502 vpop();
6503 gjmp_addr(c);
6504 gsym(e);
6506 skip(')');
6507 lblock(&a, &b);
6508 gjmp_addr(d);
6509 gsym_addr(b, d);
6510 gsym(a);
6511 prev_scope(&o, 0);
6513 } else if (t == TOK_DO) {
6514 a = b = 0;
6515 d = gind();
6516 lblock(&a, &b);
6517 gsym(b);
6518 skip(TOK_WHILE);
6519 skip('(');
6520 gexpr();
6521 skip(')');
6522 skip(';');
6523 c = gvtst(0, 0);
6524 gsym_addr(c, d);
6525 gsym(a);
6527 } else if (t == TOK_SWITCH) {
6528 struct switch_t *saved, sw;
6529 SValue switchval;
6531 sw.p = NULL;
6532 sw.n = 0;
6533 sw.def_sym = 0;
6534 sw.bsym = &a;
6535 sw.scope = cur_scope;
6537 saved = cur_switch;
6538 cur_switch = &sw;
6540 skip('(');
6541 gexpr();
6542 skip(')');
6543 switchval = *vtop--;
6545 a = 0;
6546 b = gjmp(0); /* jump to first case */
6547 lblock(&a, NULL);
6548 a = gjmp(a); /* add implicit break */
6549 /* case lookup */
6550 gsym(b);
6552 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6553 for (b = 1; b < sw.n; b++)
6554 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6555 tcc_error("duplicate case value");
6557 /* Our switch table sorting is signed, so the compared
6558 value needs to be as well when it's 64bit. */
6559 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6560 switchval.type.t &= ~VT_UNSIGNED;
6561 vpushv(&switchval);
6562 gv(RC_INT);
6563 d = 0, gcase(sw.p, sw.n, &d);
6564 vpop();
6565 if (sw.def_sym)
6566 gsym_addr(d, sw.def_sym);
6567 else
6568 gsym(d);
6569 /* break label */
6570 gsym(a);
6572 dynarray_reset(&sw.p, &sw.n);
6573 cur_switch = saved;
6575 } else if (t == TOK_CASE) {
6576 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6577 if (!cur_switch)
6578 expect("switch");
6579 cr->v1 = cr->v2 = expr_const64();
6580 if (gnu_ext && tok == TOK_DOTS) {
6581 next();
6582 cr->v2 = expr_const64();
6583 if (cr->v2 < cr->v1)
6584 tcc_warning("empty case range");
6586 cr->sym = gind();
6587 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6588 skip(':');
6589 is_expr = 0;
6590 goto block_after_label;
6592 } else if (t == TOK_DEFAULT) {
6593 if (!cur_switch)
6594 expect("switch");
6595 if (cur_switch->def_sym)
6596 tcc_error("too many 'default'");
6597 cur_switch->def_sym = gind();
6598 skip(':');
6599 is_expr = 0;
6600 goto block_after_label;
6602 } else if (t == TOK_GOTO) {
6603 vla_restore(root_scope->vla.loc);
6604 if (tok == '*' && gnu_ext) {
6605 /* computed goto */
6606 next();
6607 gexpr();
6608 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6609 expect("pointer");
6610 ggoto();
6612 } else if (tok >= TOK_UIDENT) {
6613 s = label_find(tok);
6614 /* put forward definition if needed */
6615 if (!s)
6616 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6617 else if (s->r == LABEL_DECLARED)
6618 s->r = LABEL_FORWARD;
6620 if (s->r & LABEL_FORWARD) {
6621 /* start new goto chain for cleanups, linked via label->next */
6622 if (cur_scope->cl.s && !nocode_wanted) {
6623 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6624 pending_gotos->prev_tok = s;
6625 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6626 pending_gotos->next = s;
6628 s->jnext = gjmp(s->jnext);
6629 } else {
6630 try_call_cleanup_goto(s->cleanupstate);
6631 gjmp_addr(s->jnext);
6633 next();
6635 } else {
6636 expect("label identifier");
6638 skip(';');
6640 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6641 asm_instr();
6643 } else {
6644 if (tok == ':' && t >= TOK_UIDENT) {
6645 /* label case */
6646 next();
6647 s = label_find(t);
6648 if (s) {
6649 if (s->r == LABEL_DEFINED)
6650 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6651 s->r = LABEL_DEFINED;
6652 if (s->next) {
6653 Sym *pcl; /* pending cleanup goto */
6654 for (pcl = s->next; pcl; pcl = pcl->prev)
6655 gsym(pcl->jnext);
6656 sym_pop(&s->next, NULL, 0);
6657 } else
6658 gsym(s->jnext);
6659 } else {
6660 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6662 s->jnext = gind();
6663 s->cleanupstate = cur_scope->cl.s;
6665 block_after_label:
6666 vla_restore(cur_scope->vla.loc);
6667 /* we accept this, but it is a mistake */
6668 if (tok == '}') {
6669 tcc_warning("deprecated use of label at end of compound statement");
6670 } else {
6671 goto again;
6674 } else {
6675 /* expression case */
6676 if (t != ';') {
6677 unget_tok(t);
6678 if (is_expr) {
6679 vpop();
6680 gexpr();
6681 } else {
6682 gexpr();
6683 vpop();
6685 skip(';');
6691 /* This skips over a stream of tokens containing balanced {} and ()
6692 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6693 with a '{'). If STR then allocates and stores the skipped tokens
6694 in *STR. This doesn't check if () and {} are nested correctly,
6695 i.e. "({)}" is accepted. */
6696 static void skip_or_save_block(TokenString **str)
6698 int braces = tok == '{';
6699 int level = 0;
6700 if (str)
6701 *str = tok_str_alloc();
6703 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6704 int t;
6705 if (tok == TOK_EOF) {
6706 if (str || level > 0)
6707 tcc_error("unexpected end of file");
6708 else
6709 break;
6711 if (str)
6712 tok_str_add_tok(*str);
6713 t = tok;
6714 next();
6715 if (t == '{' || t == '(') {
6716 level++;
6717 } else if (t == '}' || t == ')') {
6718 level--;
6719 if (level == 0 && braces && t == '}')
6720 break;
6723 if (str) {
6724 tok_str_add(*str, -1);
6725 tok_str_add(*str, 0);
6729 #define EXPR_CONST 1
6730 #define EXPR_ANY 2
6732 static void parse_init_elem(int expr_type)
6734 int saved_global_expr;
6735 switch(expr_type) {
6736 case EXPR_CONST:
6737 /* compound literals must be allocated globally in this case */
6738 saved_global_expr = global_expr;
6739 global_expr = 1;
6740 expr_const1();
6741 global_expr = saved_global_expr;
6742 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6743 (compound literals). */
6744 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6745 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6746 || vtop->sym->v < SYM_FIRST_ANOM))
6747 #ifdef TCC_TARGET_PE
6748 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6749 #endif
6751 tcc_error("initializer element is not constant");
6752 break;
6753 case EXPR_ANY:
6754 expr_eq();
6755 break;
6759 /* put zeros for variable based init */
6760 static void init_putz(Section *sec, unsigned long c, int size)
6762 if (sec) {
6763 /* nothing to do because globals are already set to zero */
6764 } else {
6765 vpush_global_sym(&func_old_type, TOK_memset);
6766 vseti(VT_LOCAL, c);
6767 #ifdef TCC_TARGET_ARM
6768 vpushs(size);
6769 vpushi(0);
6770 #else
6771 vpushi(0);
6772 vpushs(size);
6773 #endif
6774 gfunc_call(3);
6778 #define DIF_FIRST 1
6779 #define DIF_SIZE_ONLY 2
6780 #define DIF_HAVE_ELEM 4
6782 /* t is the array or struct type. c is the array or struct
6783 address. cur_field is the pointer to the current
6784 field, for arrays the 'c' member contains the current start
6785 index. 'flags' is as in decl_initializer.
6786 'al' contains the already initialized length of the
6787 current container (starting at c). This returns the new length of that. */
6788 static int decl_designator(CType *type, Section *sec, unsigned long c,
6789 Sym **cur_field, int flags, int al)
6791 Sym *s, *f;
6792 int index, index_last, align, l, nb_elems, elem_size;
6793 unsigned long corig = c;
6795 elem_size = 0;
6796 nb_elems = 1;
6798 if (flags & DIF_HAVE_ELEM)
6799 goto no_designator;
6801 if (gnu_ext && tok >= TOK_UIDENT) {
6802 l = tok, next();
6803 if (tok == ':')
6804 goto struct_field;
6805 unget_tok(l);
6808 /* NOTE: we only support ranges for last designator */
6809 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6810 if (tok == '[') {
6811 if (!(type->t & VT_ARRAY))
6812 expect("array type");
6813 next();
6814 index = index_last = expr_const();
6815 if (tok == TOK_DOTS && gnu_ext) {
6816 next();
6817 index_last = expr_const();
6819 skip(']');
6820 s = type->ref;
6821 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6822 index_last < index)
6823 tcc_error("invalid index");
6824 if (cur_field)
6825 (*cur_field)->c = index_last;
6826 type = pointed_type(type);
6827 elem_size = type_size(type, &align);
6828 c += index * elem_size;
6829 nb_elems = index_last - index + 1;
6830 } else {
6831 int cumofs;
6832 next();
6833 l = tok;
6834 struct_field:
6835 next();
6836 if ((type->t & VT_BTYPE) != VT_STRUCT)
6837 expect("struct/union type");
6838 cumofs = 0;
6839 f = find_field(type, l, &cumofs);
6840 if (!f)
6841 expect("field");
6842 if (cur_field)
6843 *cur_field = f;
6844 type = &f->type;
6845 c += cumofs + f->c;
6847 cur_field = NULL;
6849 if (!cur_field) {
6850 if (tok == '=') {
6851 next();
6852 } else if (!gnu_ext) {
6853 expect("=");
6855 } else {
6856 no_designator:
6857 if (type->t & VT_ARRAY) {
6858 index = (*cur_field)->c;
6859 if (type->ref->c >= 0 && index >= type->ref->c)
6860 tcc_error("index too large");
6861 type = pointed_type(type);
6862 c += index * type_size(type, &align);
6863 } else {
6864 f = *cur_field;
6865 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6866 *cur_field = f = f->next;
6867 if (!f)
6868 tcc_error("too many field init");
6869 type = &f->type;
6870 c += f->c;
6873 /* must put zero in holes (note that doing it that way
6874 ensures that it even works with designators) */
6875 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6876 init_putz(sec, corig + al, c - corig - al);
6877 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6879 /* XXX: make it more general */
6880 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6881 unsigned long c_end;
6882 uint8_t *src, *dst;
6883 int i;
6885 if (!sec) {
6886 vset(type, VT_LOCAL|VT_LVAL, c);
6887 for (i = 1; i < nb_elems; i++) {
6888 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6889 vswap();
6890 vstore();
6892 vpop();
6893 } else if (!NODATA_WANTED) {
6894 c_end = c + nb_elems * elem_size;
6895 if (c_end > sec->data_allocated)
6896 section_realloc(sec, c_end);
6897 src = sec->data + c;
6898 dst = src;
6899 for(i = 1; i < nb_elems; i++) {
6900 dst += elem_size;
6901 memcpy(dst, src, elem_size);
6905 c += nb_elems * type_size(type, &align);
6906 if (c - corig > al)
6907 al = c - corig;
6908 return al;
6911 /* store a value or an expression directly in global data or in local array */
6912 static void init_putv(CType *type, Section *sec, unsigned long c)
6914 int bt;
6915 void *ptr;
6916 CType dtype;
6918 dtype = *type;
6919 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6921 if (sec) {
6922 int size, align;
6923 /* XXX: not portable */
6924 /* XXX: generate error if incorrect relocation */
6925 gen_assign_cast(&dtype);
6926 bt = type->t & VT_BTYPE;
6928 if ((vtop->r & VT_SYM)
6929 && bt != VT_PTR
6930 && bt != VT_FUNC
6931 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6932 || (type->t & VT_BITFIELD))
6933 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6935 tcc_error("initializer element is not computable at load time");
6937 if (NODATA_WANTED) {
6938 vtop--;
6939 return;
6942 size = type_size(type, &align);
6943 section_reserve(sec, c + size);
6944 ptr = sec->data + c;
6946 /* XXX: make code faster ? */
6947 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6948 vtop->sym->v >= SYM_FIRST_ANOM &&
6949 /* XXX This rejects compound literals like
6950 '(void *){ptr}'. The problem is that '&sym' is
6951 represented the same way, which would be ruled out
6952 by the SYM_FIRST_ANOM check above, but also '"string"'
6953 in 'char *p = "string"' is represented the same
6954 with the type being VT_PTR and the symbol being an
6955 anonymous one. That is, there's no difference in vtop
6956 between '(void *){x}' and '&(void *){x}'. Ignore
6957 pointer typed entities here. Hopefully no real code
6958 will every use compound literals with scalar type. */
6959 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6960 /* These come from compound literals, memcpy stuff over. */
6961 Section *ssec;
6962 ElfSym *esym;
6963 ElfW_Rel *rel;
6964 esym = elfsym(vtop->sym);
6965 ssec = tcc_state->sections[esym->st_shndx];
6966 memmove (ptr, ssec->data + esym->st_value, size);
6967 if (ssec->reloc) {
6968 /* We need to copy over all memory contents, and that
6969 includes relocations. Use the fact that relocs are
6970 created it order, so look from the end of relocs
6971 until we hit one before the copied region. */
6972 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6973 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6974 while (num_relocs--) {
6975 rel--;
6976 if (rel->r_offset >= esym->st_value + size)
6977 continue;
6978 if (rel->r_offset < esym->st_value)
6979 break;
6980 /* Note: if the same fields are initialized multiple
6981 times (possible with designators) then we possibly
6982 add multiple relocations for the same offset here.
6983 That would lead to wrong code, the last reloc needs
6984 to win. We clean this up later after the whole
6985 initializer is parsed. */
6986 put_elf_reloca(symtab_section, sec,
6987 c + rel->r_offset - esym->st_value,
6988 ELFW(R_TYPE)(rel->r_info),
6989 ELFW(R_SYM)(rel->r_info),
6990 #if PTR_SIZE == 8
6991 rel->r_addend
6992 #else
6994 #endif
6998 } else {
6999 if (type->t & VT_BITFIELD) {
7000 int bit_pos, bit_size, bits, n;
7001 unsigned char *p, v, m;
7002 bit_pos = BIT_POS(vtop->type.t);
7003 bit_size = BIT_SIZE(vtop->type.t);
7004 p = (unsigned char*)ptr + (bit_pos >> 3);
7005 bit_pos &= 7, bits = 0;
7006 while (bit_size) {
7007 n = 8 - bit_pos;
7008 if (n > bit_size)
7009 n = bit_size;
7010 v = vtop->c.i >> bits << bit_pos;
7011 m = ((1 << n) - 1) << bit_pos;
7012 *p = (*p & ~m) | (v & m);
7013 bits += n, bit_size -= n, bit_pos = 0, ++p;
7015 } else
7016 switch(bt) {
7017 /* XXX: when cross-compiling we assume that each type has the
7018 same representation on host and target, which is likely to
7019 be wrong in the case of long double */
7020 case VT_BOOL:
7021 vtop->c.i = vtop->c.i != 0;
7022 case VT_BYTE:
7023 *(char *)ptr |= vtop->c.i;
7024 break;
7025 case VT_SHORT:
7026 *(short *)ptr |= vtop->c.i;
7027 break;
7028 case VT_FLOAT:
7029 *(float*)ptr = vtop->c.f;
7030 break;
7031 case VT_DOUBLE:
7032 *(double *)ptr = vtop->c.d;
7033 break;
7034 case VT_LDOUBLE:
7035 #if defined TCC_IS_NATIVE_387
7036 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7037 memcpy(ptr, &vtop->c.ld, 10);
7038 #ifdef __TINYC__
7039 else if (sizeof (long double) == sizeof (double))
7040 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7041 #endif
7042 else if (vtop->c.ld == 0.0)
7044 else
7045 #endif
7046 if (sizeof(long double) == LDOUBLE_SIZE)
7047 *(long double*)ptr = vtop->c.ld;
7048 else if (sizeof(double) == LDOUBLE_SIZE)
7049 *(double *)ptr = (double)vtop->c.ld;
7050 else
7051 tcc_error("can't cross compile long double constants");
7052 break;
7053 #if PTR_SIZE != 8
7054 case VT_LLONG:
7055 *(long long *)ptr |= vtop->c.i;
7056 break;
7057 #else
7058 case VT_LLONG:
7059 #endif
7060 case VT_PTR:
7062 addr_t val = vtop->c.i;
7063 #if PTR_SIZE == 8
7064 if (vtop->r & VT_SYM)
7065 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7066 else
7067 *(addr_t *)ptr |= val;
7068 #else
7069 if (vtop->r & VT_SYM)
7070 greloc(sec, vtop->sym, c, R_DATA_PTR);
7071 *(addr_t *)ptr |= val;
7072 #endif
7073 break;
7075 default:
7077 int val = vtop->c.i;
7078 #if PTR_SIZE == 8
7079 if (vtop->r & VT_SYM)
7080 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7081 else
7082 *(int *)ptr |= val;
7083 #else
7084 if (vtop->r & VT_SYM)
7085 greloc(sec, vtop->sym, c, R_DATA_PTR);
7086 *(int *)ptr |= val;
7087 #endif
7088 break;
7092 vtop--;
7093 } else {
7094 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7095 vswap();
7096 vstore();
7097 vpop();
7101 /* 't' contains the type and storage info. 'c' is the offset of the
7102 object in section 'sec'. If 'sec' is NULL, it means stack based
7103 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7104 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7105 size only evaluation is wanted (only for arrays). */
7106 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7107 int flags)
7109 int len, n, no_oblock, nb, i;
7110 int size1, align1;
7111 Sym *s, *f;
7112 Sym indexsym;
7113 CType *t1;
7115 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7116 /* In case of strings we have special handling for arrays, so
7117 don't consume them as initializer value (which would commit them
7118 to some anonymous symbol). */
7119 tok != TOK_LSTR && tok != TOK_STR &&
7120 !(flags & DIF_SIZE_ONLY)) {
7121 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7122 flags |= DIF_HAVE_ELEM;
7125 if ((flags & DIF_HAVE_ELEM) &&
7126 !(type->t & VT_ARRAY) &&
7127 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7128 The source type might have VT_CONSTANT set, which is
7129 of course assignable to non-const elements. */
7130 is_compatible_unqualified_types(type, &vtop->type)) {
7131 init_putv(type, sec, c);
7132 } else if (type->t & VT_ARRAY) {
7133 s = type->ref;
7134 n = s->c;
7135 t1 = pointed_type(type);
7136 size1 = type_size(t1, &align1);
7138 no_oblock = 1;
7139 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7140 tok == '{') {
7141 if (tok != '{')
7142 tcc_error("character array initializer must be a literal,"
7143 " optionally enclosed in braces");
7144 skip('{');
7145 no_oblock = 0;
7148 /* only parse strings here if correct type (otherwise: handle
7149 them as ((w)char *) expressions */
7150 if ((tok == TOK_LSTR &&
7151 #ifdef TCC_TARGET_PE
7152 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7153 #else
7154 (t1->t & VT_BTYPE) == VT_INT
7155 #endif
7156 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7157 len = 0;
7158 while (tok == TOK_STR || tok == TOK_LSTR) {
7159 int cstr_len, ch;
7161 /* compute maximum number of chars wanted */
7162 if (tok == TOK_STR)
7163 cstr_len = tokc.str.size;
7164 else
7165 cstr_len = tokc.str.size / sizeof(nwchar_t);
7166 cstr_len--;
7167 nb = cstr_len;
7168 if (n >= 0 && nb > (n - len))
7169 nb = n - len;
7170 if (!(flags & DIF_SIZE_ONLY)) {
7171 if (cstr_len > nb)
7172 tcc_warning("initializer-string for array is too long");
7173 /* in order to go faster for common case (char
7174 string in global variable, we handle it
7175 specifically */
7176 if (sec && tok == TOK_STR && size1 == 1) {
7177 if (!NODATA_WANTED)
7178 memcpy(sec->data + c + len, tokc.str.data, nb);
7179 } else {
7180 for(i=0;i<nb;i++) {
7181 if (tok == TOK_STR)
7182 ch = ((unsigned char *)tokc.str.data)[i];
7183 else
7184 ch = ((nwchar_t *)tokc.str.data)[i];
7185 vpushi(ch);
7186 init_putv(t1, sec, c + (len + i) * size1);
7190 len += nb;
7191 next();
7193 /* only add trailing zero if enough storage (no
7194 warning in this case since it is standard) */
7195 if (n < 0 || len < n) {
7196 if (!(flags & DIF_SIZE_ONLY)) {
7197 vpushi(0);
7198 init_putv(t1, sec, c + (len * size1));
7200 len++;
7202 len *= size1;
7203 } else {
7204 indexsym.c = 0;
7205 f = &indexsym;
7207 do_init_list:
7208 len = 0;
7209 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7210 len = decl_designator(type, sec, c, &f, flags, len);
7211 flags &= ~DIF_HAVE_ELEM;
7212 if (type->t & VT_ARRAY) {
7213 ++indexsym.c;
7214 /* special test for multi dimensional arrays (may not
7215 be strictly correct if designators are used at the
7216 same time) */
7217 if (no_oblock && len >= n*size1)
7218 break;
7219 } else {
7220 if (s->type.t == VT_UNION)
7221 f = NULL;
7222 else
7223 f = f->next;
7224 if (no_oblock && f == NULL)
7225 break;
7228 if (tok == '}')
7229 break;
7230 skip(',');
7233 /* put zeros at the end */
7234 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7235 init_putz(sec, c + len, n*size1 - len);
7236 if (!no_oblock)
7237 skip('}');
7238 /* patch type size if needed, which happens only for array types */
7239 if (n < 0)
7240 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7241 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7242 size1 = 1;
7243 no_oblock = 1;
7244 if ((flags & DIF_FIRST) || tok == '{') {
7245 skip('{');
7246 no_oblock = 0;
7248 s = type->ref;
7249 f = s->next;
7250 n = s->c;
7251 goto do_init_list;
7252 } else if (tok == '{') {
7253 if (flags & DIF_HAVE_ELEM)
7254 skip(';');
7255 next();
7256 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7257 skip('}');
7258 } else if ((flags & DIF_SIZE_ONLY)) {
7259 /* If we supported only ISO C we wouldn't have to accept calling
7260 this on anything than an array if DIF_SIZE_ONLY (and even then
7261 only on the outermost level, so no recursion would be needed),
7262 because initializing a flex array member isn't supported.
7263 But GNU C supports it, so we need to recurse even into
7264 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7265 /* just skip expression */
7266 skip_or_save_block(NULL);
7267 } else {
7268 if (!(flags & DIF_HAVE_ELEM)) {
7269 /* This should happen only when we haven't parsed
7270 the init element above for fear of committing a
7271 string constant to memory too early. */
7272 if (tok != TOK_STR && tok != TOK_LSTR)
7273 expect("string constant");
7274 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7276 init_putv(type, sec, c);
7280 /* parse an initializer for type 't' if 'has_init' is non zero, and
7281 allocate space in local or global data space ('r' is either
7282 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7283 variable 'v' of scope 'scope' is declared before initializers
7284 are parsed. If 'v' is zero, then a reference to the new object
7285 is put in the value stack. If 'has_init' is 2, a special parsing
7286 is done to handle string constants. */
7287 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7288 int has_init, int v, int scope)
7290 int size, align, addr;
7291 TokenString *init_str = NULL;
7293 Section *sec;
7294 Sym *flexible_array;
7295 Sym *sym = NULL;
7296 int saved_nocode_wanted = nocode_wanted;
7297 #ifdef CONFIG_TCC_BCHECK
7298 int bcheck;
7299 #endif
7301 /* Always allocate static or global variables */
7302 if (v && (r & VT_VALMASK) == VT_CONST)
7303 nocode_wanted |= 0x80000000;
7305 #ifdef CONFIG_TCC_BCHECK
7306 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7307 #endif
7309 flexible_array = NULL;
7310 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7311 Sym *field = type->ref->next;
7312 if (field) {
7313 while (field->next)
7314 field = field->next;
7315 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7316 flexible_array = field;
7320 size = type_size(type, &align);
7321 /* If unknown size, we must evaluate it before
7322 evaluating initializers because
7323 initializers can generate global data too
7324 (e.g. string pointers or ISOC99 compound
7325 literals). It also simplifies local
7326 initializers handling */
7327 if (size < 0 || (flexible_array && has_init)) {
7328 if (!has_init)
7329 tcc_error("unknown type size");
7330 /* get all init string */
7331 if (has_init == 2) {
7332 init_str = tok_str_alloc();
7333 /* only get strings */
7334 while (tok == TOK_STR || tok == TOK_LSTR) {
7335 tok_str_add_tok(init_str);
7336 next();
7338 tok_str_add(init_str, -1);
7339 tok_str_add(init_str, 0);
7340 } else {
7341 skip_or_save_block(&init_str);
7343 unget_tok(0);
7345 /* compute size */
7346 begin_macro(init_str, 1);
7347 next();
7348 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7349 /* prepare second initializer parsing */
7350 macro_ptr = init_str->str;
7351 next();
7353 /* if still unknown size, error */
7354 size = type_size(type, &align);
7355 if (size < 0)
7356 tcc_error("unknown type size");
7358 /* If there's a flex member and it was used in the initializer
7359 adjust size. */
7360 if (flexible_array &&
7361 flexible_array->type.ref->c > 0)
7362 size += flexible_array->type.ref->c
7363 * pointed_size(&flexible_array->type);
7364 /* take into account specified alignment if bigger */
7365 if (ad->a.aligned) {
7366 int speca = 1 << (ad->a.aligned - 1);
7367 if (speca > align)
7368 align = speca;
7369 } else if (ad->a.packed) {
7370 align = 1;
7373 if (!v && NODATA_WANTED)
7374 size = 0, align = 1;
7376 if ((r & VT_VALMASK) == VT_LOCAL) {
7377 sec = NULL;
7378 #ifdef CONFIG_TCC_BCHECK
7379 if (bcheck && (type->t & VT_ARRAY)) {
7380 loc--;
7382 #endif
7383 loc = (loc - size) & -align;
7384 addr = loc;
7385 #ifdef CONFIG_TCC_BCHECK
7386 /* handles bounds */
7387 /* XXX: currently, since we do only one pass, we cannot track
7388 '&' operators, so we add only arrays */
7389 if (bcheck && (type->t & VT_ARRAY)) {
7390 addr_t *bounds_ptr;
7391 /* add padding between regions */
7392 loc--;
7393 /* then add local bound info */
7394 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7395 bounds_ptr[0] = addr;
7396 bounds_ptr[1] = size;
7398 #endif
7399 if (v) {
7400 /* local variable */
7401 #ifdef CONFIG_TCC_ASM
7402 if (ad->asm_label) {
7403 int reg = asm_parse_regvar(ad->asm_label);
7404 if (reg >= 0)
7405 r = (r & ~VT_VALMASK) | reg;
7407 #endif
7408 sym = sym_push(v, type, r, addr);
7409 if (ad->cleanup_func) {
7410 Sym *cls = sym_push2(&all_cleanups,
7411 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7412 cls->prev_tok = sym;
7413 cls->next = ad->cleanup_func;
7414 cls->ncl = cur_scope->cl.s;
7415 cur_scope->cl.s = cls;
7418 sym->a = ad->a;
7419 } else {
7420 /* push local reference */
7421 vset(type, r, addr);
7423 } else {
7424 if (v && scope == VT_CONST) {
7425 /* see if the symbol was already defined */
7426 sym = sym_find(v);
7427 if (sym) {
7428 patch_storage(sym, ad, type);
7429 /* we accept several definitions of the same global variable. */
7430 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7431 goto no_alloc;
7435 /* allocate symbol in corresponding section */
7436 sec = ad->section;
7437 if (!sec) {
7438 if (has_init)
7439 sec = data_section;
7440 else if (tcc_state->nocommon)
7441 sec = bss_section;
7444 if (sec) {
7445 addr = section_add(sec, size, align);
7446 #ifdef CONFIG_TCC_BCHECK
7447 /* add padding if bound check */
7448 if (bcheck)
7449 section_add(sec, 1, 1);
7450 #endif
7451 } else {
7452 addr = align; /* SHN_COMMON is special, symbol value is align */
7453 sec = common_section;
7456 if (v) {
7457 if (!sym) {
7458 sym = sym_push(v, type, r | VT_SYM, 0);
7459 patch_storage(sym, ad, NULL);
7461 /* update symbol definition */
7462 put_extern_sym(sym, sec, addr, size);
7463 } else {
7464 /* push global reference */
7465 vpush_ref(type, sec, addr, size);
7466 sym = vtop->sym;
7467 vtop->r |= r;
7470 #ifdef CONFIG_TCC_BCHECK
7471 /* handles bounds now because the symbol must be defined
7472 before for the relocation */
7473 if (bcheck) {
7474 addr_t *bounds_ptr;
7476 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7477 /* then add global bound info */
7478 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7479 bounds_ptr[0] = 0; /* relocated */
7480 bounds_ptr[1] = size;
7482 #endif
7485 if (type->t & VT_VLA) {
7486 int a;
7488 if (NODATA_WANTED)
7489 goto no_alloc;
7491 /* save current stack pointer */
7492 if (root_scope->vla.loc == 0) {
7493 struct scope *v = cur_scope;
7494 gen_vla_sp_save(loc -= PTR_SIZE);
7495 do v->vla.loc = loc; while ((v = v->prev));
7498 vla_runtime_type_size(type, &a);
7499 gen_vla_alloc(type, a);
7500 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7501 /* on _WIN64, because of the function args scratch area, the
7502 result of alloca differs from RSP and is returned in RAX. */
7503 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7504 #endif
7505 gen_vla_sp_save(addr);
7506 cur_scope->vla.loc = addr;
7507 cur_scope->vla.num++;
7509 } else if (has_init) {
7510 size_t oldreloc_offset = 0;
7511 if (sec && sec->reloc)
7512 oldreloc_offset = sec->reloc->data_offset;
7513 decl_initializer(type, sec, addr, DIF_FIRST);
7514 if (sec && sec->reloc)
7515 squeeze_multi_relocs(sec, oldreloc_offset);
7516 /* patch flexible array member size back to -1, */
7517 /* for possible subsequent similar declarations */
7518 if (flexible_array)
7519 flexible_array->type.ref->c = -1;
7522 no_alloc:
7523 /* restore parse state if needed */
7524 if (init_str) {
7525 end_macro();
7526 next();
7529 nocode_wanted = saved_nocode_wanted;
7532 /* parse a function defined by symbol 'sym' and generate its code in
7533 'cur_text_section' */
7534 static void gen_function(Sym *sym)
7536 /* Initialize VLA state */
7537 struct scope f = { 0 };
7538 cur_scope = root_scope = &f;
7540 nocode_wanted = 0;
7541 ind = cur_text_section->data_offset;
7542 if (sym->a.aligned) {
7543 size_t newoff = section_add(cur_text_section, 0,
7544 1 << (sym->a.aligned - 1));
7545 gen_fill_nops(newoff - ind);
7547 /* NOTE: we patch the symbol size later */
7548 put_extern_sym(sym, cur_text_section, ind, 0);
7550 funcname = get_tok_str(sym->v, NULL);
7551 func_ind = ind;
7553 /* put debug symbol */
7554 tcc_debug_funcstart(tcc_state, sym);
7555 /* push a dummy symbol to enable local sym storage */
7556 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7557 local_scope = 1; /* for function parameters */
7558 gfunc_prolog(&sym->type);
7559 local_scope = 0;
7560 rsym = 0;
7561 clear_temp_local_var_list();
7562 block(0);
7563 gsym(rsym);
7564 nocode_wanted = 0;
7565 gfunc_epilog();
7566 cur_text_section->data_offset = ind;
7567 /* reset local stack */
7568 sym_pop(&local_stack, NULL, 0);
7569 local_scope = 0;
7570 label_pop(&global_label_stack, NULL, 0);
7571 sym_pop(&all_cleanups, NULL, 0);
7572 /* patch symbol size */
7573 elfsym(sym)->st_size = ind - func_ind;
7574 /* end of function */
7575 tcc_debug_funcend(tcc_state, ind - func_ind);
7576 /* It's better to crash than to generate wrong code */
7577 cur_text_section = NULL;
7578 funcname = ""; /* for safety */
7579 func_vt.t = VT_VOID; /* for safety */
7580 func_var = 0; /* for safety */
7581 ind = 0; /* for safety */
7582 nocode_wanted = 0x80000000;
7583 check_vstack();
7586 static void gen_inline_functions(TCCState *s)
7588 Sym *sym;
7589 int inline_generated, i;
7590 struct InlineFunc *fn;
7592 tcc_open_bf(s, ":inline:", 0);
7593 /* iterate while inline function are referenced */
7594 do {
7595 inline_generated = 0;
7596 for (i = 0; i < s->nb_inline_fns; ++i) {
7597 fn = s->inline_fns[i];
7598 sym = fn->sym;
7599 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7600 /* the function was used or forced (and then not internal):
7601 generate its code and convert it to a normal function */
7602 fn->sym = NULL;
7603 if (file)
7604 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7605 begin_macro(fn->func_str, 1);
7606 next();
7607 cur_text_section = text_section;
7608 gen_function(sym);
7609 end_macro();
7611 inline_generated = 1;
7614 } while (inline_generated);
7615 tcc_close();
7618 ST_FUNC void free_inline_functions(TCCState *s)
7620 int i;
7621 /* free tokens of unused inline functions */
7622 for (i = 0; i < s->nb_inline_fns; ++i) {
7623 struct InlineFunc *fn = s->inline_fns[i];
7624 if (fn->sym)
7625 tok_str_free(fn->func_str);
7627 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7630 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7631 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7632 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7634 int v, has_init, r;
7635 CType type, btype;
7636 Sym *sym;
7637 AttributeDef ad, adbase;
7639 while (1) {
7640 if (tok == TOK_STATIC_ASSERT) {
7641 int c;
7643 next();
7644 skip('(');
7645 c = expr_const();
7646 skip(',');
7647 if (c == 0)
7648 tcc_error("%s", get_tok_str(tok, &tokc));
7649 next();
7650 skip(')');
7651 skip(';');
7652 continue;
7654 if (!parse_btype(&btype, &adbase)) {
7655 if (is_for_loop_init)
7656 return 0;
7657 /* skip redundant ';' if not in old parameter decl scope */
7658 if (tok == ';' && l != VT_CMP) {
7659 next();
7660 continue;
7662 if (l != VT_CONST)
7663 break;
7664 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7665 /* global asm block */
7666 asm_global_instr();
7667 continue;
7669 if (tok >= TOK_UIDENT) {
7670 /* special test for old K&R protos without explicit int
7671 type. Only accepted when defining global data */
7672 btype.t = VT_INT;
7673 } else {
7674 if (tok != TOK_EOF)
7675 expect("declaration");
7676 break;
7679 if (tok == ';') {
7680 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7681 int v = btype.ref->v;
7682 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7683 tcc_warning("unnamed struct/union that defines no instances");
7684 next();
7685 continue;
7687 if (IS_ENUM(btype.t)) {
7688 next();
7689 continue;
7692 while (1) { /* iterate thru each declaration */
7693 type = btype;
7694 /* If the base type itself was an array type of unspecified
7695 size (like in 'typedef int arr[]; arr x = {1};') then
7696 we will overwrite the unknown size by the real one for
7697 this decl. We need to unshare the ref symbol holding
7698 that size. */
7699 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7700 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7702 ad = adbase;
7703 type_decl(&type, &ad, &v, TYPE_DIRECT);
7704 #if 0
7706 char buf[500];
7707 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7708 printf("type = '%s'\n", buf);
7710 #endif
7711 if ((type.t & VT_BTYPE) == VT_FUNC) {
7712 /* if old style function prototype, we accept a
7713 declaration list */
7714 sym = type.ref;
7715 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7716 decl0(VT_CMP, 0, sym);
7717 /* always compile 'extern inline' */
7718 if (type.t & VT_EXTERN)
7719 type.t &= ~VT_INLINE;
7722 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7723 ad.asm_label = asm_label_instr();
7724 /* parse one last attribute list, after asm label */
7725 parse_attribute(&ad);
7726 #if 0
7727 /* gcc does not allow __asm__("label") with function definition,
7728 but why not ... */
7729 if (tok == '{')
7730 expect(";");
7731 #endif
7734 #ifdef TCC_TARGET_PE
7735 if (ad.a.dllimport || ad.a.dllexport) {
7736 if (type.t & VT_STATIC)
7737 tcc_error("cannot have dll linkage with static");
7738 if (type.t & VT_TYPEDEF) {
7739 tcc_warning("'%s' attribute ignored for typedef",
7740 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7741 (ad.a.dllexport = 0, "dllexport"));
7742 } else if (ad.a.dllimport) {
7743 if ((type.t & VT_BTYPE) == VT_FUNC)
7744 ad.a.dllimport = 0;
7745 else
7746 type.t |= VT_EXTERN;
7749 #endif
7750 if (tok == '{') {
7751 if (l != VT_CONST)
7752 tcc_error("cannot use local functions");
7753 if ((type.t & VT_BTYPE) != VT_FUNC)
7754 expect("function definition");
7756 /* reject abstract declarators in function definition
7757 make old style params without decl have int type */
7758 sym = type.ref;
7759 while ((sym = sym->next) != NULL) {
7760 if (!(sym->v & ~SYM_FIELD))
7761 expect("identifier");
7762 if (sym->type.t == VT_VOID)
7763 sym->type = int_type;
7766 /* put function symbol */
7767 type.t &= ~VT_EXTERN;
7768 sym = external_sym(v, &type, 0, &ad);
7769 /* static inline functions are just recorded as a kind
7770 of macro. Their code will be emitted at the end of
7771 the compilation unit only if they are used */
7772 if (sym->type.t & VT_INLINE) {
7773 struct InlineFunc *fn;
7774 const char *filename;
7776 filename = file ? file->filename : "";
7777 fn = tcc_malloc(sizeof *fn + strlen(filename));
7778 strcpy(fn->filename, filename);
7779 fn->sym = sym;
7780 skip_or_save_block(&fn->func_str);
7781 dynarray_add(&tcc_state->inline_fns,
7782 &tcc_state->nb_inline_fns, fn);
7783 } else {
7784 /* compute text section */
7785 cur_text_section = ad.section;
7786 if (!cur_text_section)
7787 cur_text_section = text_section;
7788 gen_function(sym);
7790 break;
7791 } else {
7792 if (l == VT_CMP) {
7793 /* find parameter in function parameter list */
7794 for (sym = func_sym->next; sym; sym = sym->next)
7795 if ((sym->v & ~SYM_FIELD) == v)
7796 goto found;
7797 tcc_error("declaration for parameter '%s' but no such parameter",
7798 get_tok_str(v, NULL));
7799 found:
7800 if (type.t & VT_STORAGE) /* 'register' is okay */
7801 tcc_error("storage class specified for '%s'",
7802 get_tok_str(v, NULL));
7803 if (sym->type.t != VT_VOID)
7804 tcc_error("redefinition of parameter '%s'",
7805 get_tok_str(v, NULL));
7806 convert_parameter_type(&type);
7807 sym->type = type;
7808 } else if (type.t & VT_TYPEDEF) {
7809 /* save typedefed type */
7810 /* XXX: test storage specifiers ? */
7811 sym = sym_find(v);
7812 if (sym && sym->sym_scope == local_scope) {
7813 if (!is_compatible_types(&sym->type, &type)
7814 || !(sym->type.t & VT_TYPEDEF))
7815 tcc_error("incompatible redefinition of '%s'",
7816 get_tok_str(v, NULL));
7817 sym->type = type;
7818 } else {
7819 sym = sym_push(v, &type, 0, 0);
7821 sym->a = ad.a;
7822 sym->f = ad.f;
7823 } else if ((type.t & VT_BTYPE) == VT_VOID
7824 && !(type.t & VT_EXTERN)) {
7825 tcc_error("declaration of void object");
7826 } else {
7827 r = 0;
7828 if ((type.t & VT_BTYPE) == VT_FUNC) {
7829 /* external function definition */
7830 /* specific case for func_call attribute */
7831 type.ref->f = ad.f;
7832 } else if (!(type.t & VT_ARRAY)) {
7833 /* not lvalue if array */
7834 r |= lvalue_type(type.t);
7836 has_init = (tok == '=');
7837 if (has_init && (type.t & VT_VLA))
7838 tcc_error("variable length array cannot be initialized");
7839 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7840 || (type.t & VT_BTYPE) == VT_FUNC
7841 /* as with GCC, uninitialized global arrays with no size
7842 are considered extern: */
7843 || ((type.t & VT_ARRAY) && !has_init
7844 && l == VT_CONST && type.ref->c < 0)
7846 /* external variable or function */
7847 type.t |= VT_EXTERN;
7848 sym = external_sym(v, &type, r, &ad);
7849 if (ad.alias_target) {
7850 ElfSym *esym;
7851 Sym *alias_target;
7852 alias_target = sym_find(ad.alias_target);
7853 esym = elfsym(alias_target);
7854 if (!esym)
7855 tcc_error("unsupported forward __alias__ attribute");
7856 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7858 } else {
7859 if (type.t & VT_STATIC)
7860 r |= VT_CONST;
7861 else
7862 r |= l;
7863 if (has_init)
7864 next();
7865 else if (l == VT_CONST)
7866 /* uninitialized global variables may be overridden */
7867 type.t |= VT_EXTERN;
7868 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7871 if (tok != ',') {
7872 if (is_for_loop_init)
7873 return 1;
7874 skip(';');
7875 break;
7877 next();
7881 return 0;
7884 static void decl(int l)
7886 decl0(l, 0, NULL);
7889 /* ------------------------------------------------------------------------- */
7890 #undef gjmp_addr
7891 #undef gjmp
7892 /* ------------------------------------------------------------------------- */