riscv: Make 32_leds work
[tinycc.git] / tccgen.c
blob745df99084a6363f035402e2774882240b36d6d2
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
880 else if (vtop->r == VT_CONST)
881 vset_VT_CMP(vtop->c.i != 0);
882 else
883 tcc_error("ICE");
885 p = inv ? &vtop->jfalse : &vtop->jtrue;
886 *p = gjmp_append(*p, t);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv, int t)
894 int op, u, x;
896 gvtst_set(inv, t);
898 t = vtop->jtrue, u = vtop->jfalse;
899 if (inv)
900 x = u, u = t, t = x;
901 op = vtop->cmp_op;
903 /* jump to the wanted target */
904 if (op > 1)
905 t = gjmp_cond(op ^ inv, t);
906 else if (op != inv)
907 t = gjmp(t);
908 /* resolve complementary jumps to here */
909 gsym(u);
911 vtop--;
912 return t;
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType *type, Sym *sym)
919 CValue cval;
920 cval.i = 0;
921 vsetc(type, VT_CONST | VT_SYM, &cval);
922 vtop->sym = sym;
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
928 int v;
929 Sym *sym;
931 v = anon_sym++;
932 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
933 sym->type.t |= VT_STATIC;
934 put_extern_sym(sym, sec, offset, size);
935 return sym;
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
941 vpushsym(type, get_sym_ref(type, sec, offset, size));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym *external_global_sym(int v, CType *type)
947 Sym *s;
949 s = sym_find(v);
950 if (!s) {
951 /* push forward reference */
952 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
953 s->type.ref = type->ref;
954 } else if (IS_ASM_SYM(s)) {
955 s->type.t = type->t | (s->type.t & VT_EXTERN);
956 s->type.ref = type->ref;
957 update_storage(s);
959 return s;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
965 if (sa1->aligned && !sa->aligned)
966 sa->aligned = sa1->aligned;
967 sa->packed |= sa1->packed;
968 sa->weak |= sa1->weak;
969 if (sa1->visibility != STV_DEFAULT) {
970 int vis = sa->visibility;
971 if (vis == STV_DEFAULT
972 || vis > sa1->visibility)
973 vis = sa1->visibility;
974 sa->visibility = vis;
976 sa->dllexport |= sa1->dllexport;
977 sa->nodecorate |= sa1->nodecorate;
978 sa->dllimport |= sa1->dllimport;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
984 if (fa1->func_call && !fa->func_call)
985 fa->func_call = fa1->func_call;
986 if (fa1->func_type && !fa->func_type)
987 fa->func_type = fa1->func_type;
988 if (fa1->func_args && !fa->func_args)
989 fa->func_args = fa1->func_args;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
995 merge_symattr(&ad->a, &ad1->a);
996 merge_funcattr(&ad->f, &ad1->f);
998 if (ad1->section)
999 ad->section = ad1->section;
1000 if (ad1->alias_target)
1001 ad->alias_target = ad1->alias_target;
1002 if (ad1->asm_label)
1003 ad->asm_label = ad1->asm_label;
1004 if (ad1->attr_mode)
1005 ad->attr_mode = ad1->attr_mode;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym *sym, CType *type)
1011 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1012 if (!(sym->type.t & VT_EXTERN))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1014 sym->type.t &= ~VT_EXTERN;
1017 if (IS_ASM_SYM(sym)) {
1018 /* stay static if both are static */
1019 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1020 sym->type.ref = type->ref;
1023 if (!is_compatible_types(&sym->type, type)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym->v, NULL));
1027 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1028 int static_proto = sym->type.t & VT_STATIC;
1029 /* warn if static follows non-static function declaration */
1030 if ((type->t & VT_STATIC) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type->t | sym->type.t) & VT_INLINE))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym->v, NULL));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type->t | sym->type.t) & VT_INLINE) {
1040 if (!((type->t ^ sym->type.t) & VT_INLINE)
1041 || ((type->t | sym->type.t) & VT_STATIC))
1042 static_proto |= VT_INLINE;
1045 if (0 == (type->t & VT_EXTERN)) {
1046 /* put complete type, use static from prototype */
1047 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1048 sym->type.ref = type->ref;
1049 } else {
1050 sym->type.t &= ~VT_INLINE | static_proto;
1053 if (sym->type.ref->f.func_type == FUNC_OLD
1054 && type->ref->f.func_type != FUNC_OLD) {
1055 sym->type.ref = type->ref;
1058 } else {
1059 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym->type.ref->c = type->ref->c;
1063 if ((type->t ^ sym->type.t) & VT_STATIC)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym->v, NULL));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1072 if (type)
1073 patch_type(sym, type);
1075 #ifdef TCC_TARGET_PE
1076 if (sym->a.dllimport != ad->a.dllimport)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym->v, NULL));
1079 #endif
1080 merge_symattr(&sym->a, &ad->a);
1081 if (ad->asm_label)
1082 sym->asm_label = ad->asm_label;
1083 update_storage(sym);
1086 /* copy sym to other stack */
1087 static Sym *sym_copy(Sym *s0, Sym **ps)
1089 Sym *s;
1090 s = sym_malloc(), *s = *s0;
1091 s->prev = *ps, *ps = s;
1092 if (s->v < SYM_FIRST_ANOM) {
1093 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1094 s->prev_tok = *ps, *ps = s;
1096 return s;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym *s0, Sym **ps)
1102 Sym *s, **sp = &s0->type.ref;
1103 for (s = *sp, *sp = NULL; s; s = s->next)
1104 sp = &(*sp = sym_copy(s, ps))->next;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1110 Sym *s; int bt;
1112 /* look for global symbol */
1113 s = sym_find(v);
1114 while (s && s->sym_scope)
1115 s = s->prev_tok;
1117 if (!s) {
1118 /* push forward reference */
1119 s = global_identifier_push(v, type->t, 0);
1120 s->r |= r;
1121 s->a = ad->a;
1122 s->asm_label = ad->asm_label;
1123 s->type.ref = type->ref;
1124 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1125 /* copy type to the global stack also */
1126 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1127 sym_copy_ref(s, &global_stack);
1128 } else {
1129 patch_storage(s, ad, type);
1130 bt = s->type.t & VT_BTYPE;
1132 /* push variables to local scope if any */
1133 if (local_stack && bt != VT_FUNC)
1134 s = sym_copy(s, &local_stack);
1135 return s;
1138 /* push a reference to global symbol v */
1139 ST_FUNC void vpush_global_sym(CType *type, int v)
1141 vpushsym(type, external_global_sym(v, type));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC void save_regs(int n)
1147 SValue *p, *p1;
1148 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1149 save_reg(p->r);
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC void save_reg(int r)
1155 save_reg_upstack(r, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC void save_reg_upstack(int r, int n)
1162 int l, saved, size, align;
1163 SValue *p, *p1, sv;
1164 CType *type;
1166 if ((r &= VT_VALMASK) >= VT_CONST)
1167 return;
1168 if (nocode_wanted)
1169 return;
1171 /* modify all stack values */
1172 saved = 0;
1173 l = 0;
1174 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1175 if ((p->r & VT_VALMASK) == r ||
1176 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1177 /* must save value on stack if not already done */
1178 if (!saved) {
1179 /* NOTE: must reload 'r' because r might be equal to r2 */
1180 r = p->r & VT_VALMASK;
1181 /* store register in the stack */
1182 type = &p->type;
1183 if ((p->r & VT_LVAL) ||
1184 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1185 #if PTR_SIZE == 8
1186 type = &char_pointer_type;
1187 #else
1188 type = &int_type;
1189 #endif
1190 size = type_size(type, &align);
1191 l=get_temp_local_var(size,align);
1192 sv.type.t = type->t;
1193 sv.r = VT_LOCAL | VT_LVAL;
1194 sv.c.i = l;
1195 store(r, &sv);
1196 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1197 /* x86 specific: need to pop fp register ST0 if saved */
1198 if (r == TREG_ST0) {
1199 o(0xd8dd); /* fstp %st(0) */
1201 #endif
1202 #if PTR_SIZE == 4
1203 /* special long long case */
1204 if ((type->t & VT_BTYPE) == VT_LLONG) {
1205 sv.c.i += 4;
1206 store(p->r2, &sv);
1208 #endif
1209 saved = 1;
1211 /* mark that stack entry as being saved on the stack */
1212 if (p->r & VT_LVAL) {
1213 /* also clear the bounded flag because the
1214 relocation address of the function was stored in
1215 p->c.i */
1216 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1217 } else {
1218 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1220 p->r2 = VT_CONST;
1221 p->c.i = l;
1226 #ifdef TCC_TARGET_ARM
1227 /* find a register of class 'rc2' with at most one reference on stack.
1228 * If none, call get_reg(rc) */
1229 ST_FUNC int get_reg_ex(int rc, int rc2)
1231 int r;
1232 SValue *p;
1234 for(r=0;r<NB_REGS;r++) {
1235 if (reg_classes[r] & rc2) {
1236 int n;
1237 n=0;
1238 for(p = vstack; p <= vtop; p++) {
1239 if ((p->r & VT_VALMASK) == r ||
1240 (p->r2 & VT_VALMASK) == r)
1241 n++;
1243 if (n <= 1)
1244 return r;
1247 return get_reg(rc);
1249 #endif
1251 /* find a free register of class 'rc'. If none, save one register */
1252 ST_FUNC int get_reg(int rc)
1254 int r;
1255 SValue *p;
1257 /* find a free register */
1258 for(r=0;r<NB_REGS;r++) {
1259 if (reg_classes[r] & rc) {
1260 if (nocode_wanted)
1261 return r;
1262 for(p=vstack;p<=vtop;p++) {
1263 if ((p->r & VT_VALMASK) == r ||
1264 (p->r2 & VT_VALMASK) == r)
1265 goto notfound;
1267 return r;
1269 notfound: ;
1272 /* no register left : free the first one on the stack (VERY
1273 IMPORTANT to start from the bottom to ensure that we don't
1274 spill registers used in gen_opi()) */
1275 for(p=vstack;p<=vtop;p++) {
1276 /* look at second register (if long long) */
1277 r = p->r2 & VT_VALMASK;
1278 if (r < VT_CONST && (reg_classes[r] & rc))
1279 goto save_found;
1280 r = p->r & VT_VALMASK;
1281 if (r < VT_CONST && (reg_classes[r] & rc)) {
1282 save_found:
1283 save_reg(r);
1284 return r;
1287 /* Should never comes here */
1288 return -1;
1291 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1292 static int get_temp_local_var(int size,int align){
1293 int i;
1294 struct temp_local_variable *temp_var;
1295 int found_var;
1296 SValue *p;
1297 int r;
1298 char free;
1299 char found;
1300 found=0;
1301 for(i=0;i<nb_temp_local_vars;i++){
1302 temp_var=&arr_temp_local_vars[i];
1303 if(temp_var->size<size||align!=temp_var->align){
1304 continue;
1306 /*check if temp_var is free*/
1307 free=1;
1308 for(p=vstack;p<=vtop;p++) {
1309 r=p->r&VT_VALMASK;
1310 if(r==VT_LOCAL||r==VT_LLOCAL){
1311 if(p->c.i==temp_var->location){
1312 free=0;
1313 break;
1317 if(free){
1318 found_var=temp_var->location;
1319 found=1;
1320 break;
1323 if(!found){
1324 loc = (loc - size) & -align;
1325 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1326 temp_var=&arr_temp_local_vars[i];
1327 temp_var->location=loc;
1328 temp_var->size=size;
1329 temp_var->align=align;
1330 nb_temp_local_vars++;
1332 found_var=loc;
1334 return found_var;
1337 static void clear_temp_local_var_list(){
1338 nb_temp_local_vars=0;
1341 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1342 if needed */
1343 static void move_reg(int r, int s, int t)
1345 SValue sv;
1347 if (r != s) {
1348 save_reg(r);
1349 sv.type.t = t;
1350 sv.type.ref = NULL;
1351 sv.r = s;
1352 sv.c.i = 0;
1353 load(r, &sv);
1357 /* get address of vtop (vtop MUST BE an lvalue) */
1358 ST_FUNC void gaddrof(void)
1360 vtop->r &= ~VT_LVAL;
1361 /* tricky: if saved lvalue, then we can go back to lvalue */
1362 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1363 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1368 #ifdef CONFIG_TCC_BCHECK
1369 /* generate lvalue bound code */
1370 static void gbound(void)
1372 int lval_type;
1373 CType type1;
1375 vtop->r &= ~VT_MUSTBOUND;
1376 /* if lvalue, then use checking code before dereferencing */
1377 if (vtop->r & VT_LVAL) {
1378 /* if not VT_BOUNDED value, then make one */
1379 if (!(vtop->r & VT_BOUNDED)) {
1380 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1381 /* must save type because we must set it to int to get pointer */
1382 type1 = vtop->type;
1383 vtop->type.t = VT_PTR;
1384 gaddrof();
1385 vpushi(0);
1386 gen_bounded_ptr_add();
1387 vtop->r |= lval_type;
1388 vtop->type = type1;
1390 /* then check for dereferencing */
1391 gen_bounded_ptr_deref();
1394 #endif
1396 static void incr_bf_adr(int o)
1398 vtop->type = char_pointer_type;
1399 gaddrof();
1400 vpushi(o);
1401 gen_op('+');
1402 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1403 | (VT_BYTE|VT_UNSIGNED);
1404 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1405 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1408 /* single-byte load mode for packed or otherwise unaligned bitfields */
1409 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1411 int n, o, bits;
1412 save_reg_upstack(vtop->r, 1);
1413 vpush64(type->t & VT_BTYPE, 0); // B X
1414 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1415 do {
1416 vswap(); // X B
1417 incr_bf_adr(o);
1418 vdup(); // X B B
1419 n = 8 - bit_pos;
1420 if (n > bit_size)
1421 n = bit_size;
1422 if (bit_pos)
1423 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1424 if (n < 8)
1425 vpushi((1 << n) - 1), gen_op('&');
1426 gen_cast(type);
1427 if (bits)
1428 vpushi(bits), gen_op(TOK_SHL);
1429 vrotb(3); // B Y X
1430 gen_op('|'); // B X
1431 bits += n, bit_size -= n, o = 1;
1432 } while (bit_size);
1433 vswap(), vpop();
1434 if (!(type->t & VT_UNSIGNED)) {
1435 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1436 vpushi(n), gen_op(TOK_SHL);
1437 vpushi(n), gen_op(TOK_SAR);
1441 /* single-byte store mode for packed or otherwise unaligned bitfields */
1442 static void store_packed_bf(int bit_pos, int bit_size)
1444 int bits, n, o, m, c;
1446 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1447 vswap(); // X B
1448 save_reg_upstack(vtop->r, 1);
1449 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1450 do {
1451 incr_bf_adr(o); // X B
1452 vswap(); //B X
1453 c ? vdup() : gv_dup(); // B V X
1454 vrott(3); // X B V
1455 if (bits)
1456 vpushi(bits), gen_op(TOK_SHR);
1457 if (bit_pos)
1458 vpushi(bit_pos), gen_op(TOK_SHL);
1459 n = 8 - bit_pos;
1460 if (n > bit_size)
1461 n = bit_size;
1462 if (n < 8) {
1463 m = ((1 << n) - 1) << bit_pos;
1464 vpushi(m), gen_op('&'); // X B V1
1465 vpushv(vtop-1); // X B V1 B
1466 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1467 gen_op('&'); // X B V1 B1
1468 gen_op('|'); // X B V2
1470 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1471 vstore(), vpop(); // X B
1472 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1473 } while (bit_size);
1474 vpop(), vpop();
1477 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1479 int t;
1480 if (0 == sv->type.ref)
1481 return 0;
1482 t = sv->type.ref->auxtype;
1483 if (t != -1 && t != VT_STRUCT) {
1484 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1485 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1487 return t;
1490 /* store vtop a register belonging to class 'rc'. lvalues are
1491 converted to values. Cannot be used if cannot be converted to
1492 register value (such as structures). */
1493 ST_FUNC int gv(int rc)
1495 int r, bit_pos, bit_size, size, align, rc2;
1497 /* NOTE: get_reg can modify vstack[] */
1498 if (vtop->type.t & VT_BITFIELD) {
1499 CType type;
1501 bit_pos = BIT_POS(vtop->type.t);
1502 bit_size = BIT_SIZE(vtop->type.t);
1503 /* remove bit field info to avoid loops */
1504 vtop->type.t &= ~VT_STRUCT_MASK;
1506 type.ref = NULL;
1507 type.t = vtop->type.t & VT_UNSIGNED;
1508 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1509 type.t |= VT_UNSIGNED;
1511 r = adjust_bf(vtop, bit_pos, bit_size);
1513 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1514 type.t |= VT_LLONG;
1515 else
1516 type.t |= VT_INT;
1518 if (r == VT_STRUCT) {
1519 load_packed_bf(&type, bit_pos, bit_size);
1520 } else {
1521 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1522 /* cast to int to propagate signedness in following ops */
1523 gen_cast(&type);
1524 /* generate shifts */
1525 vpushi(bits - (bit_pos + bit_size));
1526 gen_op(TOK_SHL);
1527 vpushi(bits - bit_size);
1528 /* NOTE: transformed to SHR if unsigned */
1529 gen_op(TOK_SAR);
1531 r = gv(rc);
1532 } else {
1533 if (is_float(vtop->type.t) &&
1534 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1535 unsigned long offset;
1536 /* CPUs usually cannot use float constants, so we store them
1537 generically in data segment */
1538 size = type_size(&vtop->type, &align);
1539 if (NODATA_WANTED)
1540 size = 0, align = 1;
1541 offset = section_add(data_section, size, align);
1542 vpush_ref(&vtop->type, data_section, offset, size);
1543 vswap();
1544 init_putv(&vtop->type, data_section, offset);
1545 vtop->r |= VT_LVAL;
1547 #ifdef CONFIG_TCC_BCHECK
1548 if (vtop->r & VT_MUSTBOUND)
1549 gbound();
1550 #endif
1552 r = vtop->r & VT_VALMASK;
1553 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1554 #ifndef TCC_TARGET_ARM64
1555 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1556 if (rc == RC_IRET)
1557 rc2 = RC_LRET;
1558 #ifdef TCC_TARGET_X86_64
1559 else if (rc == RC_FRET)
1560 rc2 = RC_QRET;
1561 #endif
1562 #endif
1563 #endif
1564 /* need to reload if:
1565 - constant
1566 - lvalue (need to dereference pointer)
1567 - already a register, but not in the right class */
1568 if (r >= VT_CONST
1569 || (vtop->r & VT_LVAL)
1570 || !(reg_classes[r] & rc)
1571 #if PTR_SIZE == 8
1572 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1573 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1574 #else
1575 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1576 #endif
1579 r = get_reg(rc);
1580 #if PTR_SIZE == 8
1581 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1582 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1583 #else
1584 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1585 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1586 unsigned long long ll;
1587 #endif
1588 int r2, original_type;
1589 original_type = vtop->type.t;
1590 /* two register type load : expand to two words
1591 temporarily */
1592 #if PTR_SIZE == 4
1593 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1594 /* load constant */
1595 ll = vtop->c.i;
1596 vtop->c.i = ll; /* first word */
1597 load(r, vtop);
1598 vtop->r = r; /* save register value */
1599 vpushi(ll >> 32); /* second word */
1600 } else
1601 #endif
1602 if (vtop->r & VT_LVAL) {
1603 /* We do not want to modifier the long long
1604 pointer here, so the safest (and less
1605 efficient) is to save all the other registers
1606 in the stack. XXX: totally inefficient. */
1607 #if 0
1608 save_regs(1);
1609 #else
1610 /* lvalue_save: save only if used further down the stack */
1611 save_reg_upstack(vtop->r, 1);
1612 #endif
1613 /* load from memory */
1614 vtop->type.t = load_type;
1615 load(r, vtop);
1616 vdup();
1617 vtop[-1].r = r; /* save register value */
1618 /* increment pointer to get second word */
1619 vtop->type.t = addr_type;
1620 gaddrof();
1621 vpushi(load_size);
1622 gen_op('+');
1623 vtop->r |= VT_LVAL;
1624 vtop->type.t = load_type;
1625 } else {
1626 /* move registers */
1627 load(r, vtop);
1628 vdup();
1629 vtop[-1].r = r; /* save register value */
1630 vtop->r = vtop[-1].r2;
1632 /* Allocate second register. Here we rely on the fact that
1633 get_reg() tries first to free r2 of an SValue. */
1634 r2 = get_reg(rc2);
1635 load(r2, vtop);
1636 vpop();
1637 /* write second register */
1638 vtop->r2 = r2;
1639 vtop->type.t = original_type;
1640 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1641 int t1, t;
1642 /* lvalue of scalar type : need to use lvalue type
1643 because of possible cast */
1644 t = vtop->type.t;
1645 t1 = t;
1646 /* compute memory access type */
1647 if (vtop->r & VT_LVAL_BYTE)
1648 t = VT_BYTE;
1649 else if (vtop->r & VT_LVAL_SHORT)
1650 t = VT_SHORT;
1651 if (vtop->r & VT_LVAL_UNSIGNED)
1652 t |= VT_UNSIGNED;
1653 vtop->type.t = t;
1654 load(r, vtop);
1655 /* restore wanted type */
1656 vtop->type.t = t1;
1657 } else {
1658 if (vtop->r == VT_CMP)
1659 vset_VT_JMP();
1660 /* one register type load */
1661 load(r, vtop);
1664 vtop->r = r;
1665 #ifdef TCC_TARGET_C67
1666 /* uses register pairs for doubles */
1667 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1668 vtop->r2 = r+1;
1669 #endif
1671 return r;
1674 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1675 ST_FUNC void gv2(int rc1, int rc2)
1677 /* generate more generic register first. But VT_JMP or VT_CMP
1678 values must be generated first in all cases to avoid possible
1679 reload errors */
1680 if (vtop->r != VT_CMP && rc1 <= rc2) {
1681 vswap();
1682 gv(rc1);
1683 vswap();
1684 gv(rc2);
1685 /* test if reload is needed for first register */
1686 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1687 vswap();
1688 gv(rc1);
1689 vswap();
1691 } else {
1692 gv(rc2);
1693 vswap();
1694 gv(rc1);
1695 vswap();
1696 /* test if reload is needed for first register */
1697 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1698 gv(rc2);
1703 #ifndef TCC_TARGET_ARM64
1704 /* wrapper around RC_FRET to return a register by type */
1705 static int rc_fret(int t)
1707 #ifdef TCC_TARGET_X86_64
1708 if (t == VT_LDOUBLE) {
1709 return RC_ST0;
1711 #endif
1712 return RC_FRET;
1714 #endif
1716 /* wrapper around REG_FRET to return a register by type */
1717 static int reg_fret(int t)
1719 #ifdef TCC_TARGET_X86_64
1720 if (t == VT_LDOUBLE) {
1721 return TREG_ST0;
1723 #endif
1724 return REG_FRET;
1727 #if PTR_SIZE == 4
1728 /* expand 64bit on stack in two ints */
1729 ST_FUNC void lexpand(void)
1731 int u, v;
1732 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1733 v = vtop->r & (VT_VALMASK | VT_LVAL);
1734 if (v == VT_CONST) {
1735 vdup();
1736 vtop[0].c.i >>= 32;
1737 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1738 vdup();
1739 vtop[0].c.i += 4;
1740 } else {
1741 gv(RC_INT);
1742 vdup();
1743 vtop[0].r = vtop[-1].r2;
1744 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1746 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1748 #endif
1750 #if PTR_SIZE == 4
1751 /* build a long long from two ints */
1752 static void lbuild(int t)
1754 gv2(RC_INT, RC_INT);
1755 vtop[-1].r2 = vtop[0].r;
1756 vtop[-1].type.t = t;
1757 vpop();
1759 #endif
1761 /* convert stack entry to register and duplicate its value in another
1762 register */
1763 static void gv_dup(void)
1765 int rc, t, r, r1;
1766 SValue sv;
1768 t = vtop->type.t;
1769 #if PTR_SIZE == 4
1770 if ((t & VT_BTYPE) == VT_LLONG) {
1771 if (t & VT_BITFIELD) {
1772 gv(RC_INT);
1773 t = vtop->type.t;
1775 lexpand();
1776 gv_dup();
1777 vswap();
1778 vrotb(3);
1779 gv_dup();
1780 vrotb(4);
1781 /* stack: H L L1 H1 */
1782 lbuild(t);
1783 vrotb(3);
1784 vrotb(3);
1785 vswap();
1786 lbuild(t);
1787 vswap();
1788 } else
1789 #endif
1791 /* duplicate value */
1792 rc = RC_INT;
1793 sv.type.t = VT_INT;
1794 if (is_float(t)) {
1795 rc = RC_FLOAT;
1796 #ifdef TCC_TARGET_X86_64
1797 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1798 rc = RC_ST0;
1800 #endif
1801 sv.type.t = t;
1803 r = gv(rc);
1804 r1 = get_reg(rc);
1805 sv.r = r;
1806 sv.c.i = 0;
1807 load(r1, &sv); /* move r to r1 */
1808 vdup();
1809 /* duplicates value */
1810 if (r != r1)
1811 vtop->r = r1;
1815 #if PTR_SIZE == 4
1816 /* generate CPU independent (unsigned) long long operations */
1817 static void gen_opl(int op)
1819 int t, a, b, op1, c, i;
1820 int func;
1821 unsigned short reg_iret = REG_IRET;
1822 unsigned short reg_lret = REG_LRET;
1823 SValue tmp;
1825 switch(op) {
1826 case '/':
1827 case TOK_PDIV:
1828 func = TOK___divdi3;
1829 goto gen_func;
1830 case TOK_UDIV:
1831 func = TOK___udivdi3;
1832 goto gen_func;
1833 case '%':
1834 func = TOK___moddi3;
1835 goto gen_mod_func;
1836 case TOK_UMOD:
1837 func = TOK___umoddi3;
1838 gen_mod_func:
1839 #ifdef TCC_ARM_EABI
1840 reg_iret = TREG_R2;
1841 reg_lret = TREG_R3;
1842 #endif
1843 gen_func:
1844 /* call generic long long function */
1845 vpush_global_sym(&func_old_type, func);
1846 vrott(3);
1847 gfunc_call(2);
1848 vpushi(0);
1849 vtop->r = reg_iret;
1850 vtop->r2 = reg_lret;
1851 break;
1852 case '^':
1853 case '&':
1854 case '|':
1855 case '*':
1856 case '+':
1857 case '-':
1858 //pv("gen_opl A",0,2);
1859 t = vtop->type.t;
1860 vswap();
1861 lexpand();
1862 vrotb(3);
1863 lexpand();
1864 /* stack: L1 H1 L2 H2 */
1865 tmp = vtop[0];
1866 vtop[0] = vtop[-3];
1867 vtop[-3] = tmp;
1868 tmp = vtop[-2];
1869 vtop[-2] = vtop[-3];
1870 vtop[-3] = tmp;
1871 vswap();
1872 /* stack: H1 H2 L1 L2 */
1873 //pv("gen_opl B",0,4);
1874 if (op == '*') {
1875 vpushv(vtop - 1);
1876 vpushv(vtop - 1);
1877 gen_op(TOK_UMULL);
1878 lexpand();
1879 /* stack: H1 H2 L1 L2 ML MH */
1880 for(i=0;i<4;i++)
1881 vrotb(6);
1882 /* stack: ML MH H1 H2 L1 L2 */
1883 tmp = vtop[0];
1884 vtop[0] = vtop[-2];
1885 vtop[-2] = tmp;
1886 /* stack: ML MH H1 L2 H2 L1 */
1887 gen_op('*');
1888 vrotb(3);
1889 vrotb(3);
1890 gen_op('*');
1891 /* stack: ML MH M1 M2 */
1892 gen_op('+');
1893 gen_op('+');
1894 } else if (op == '+' || op == '-') {
1895 /* XXX: add non carry method too (for MIPS or alpha) */
1896 if (op == '+')
1897 op1 = TOK_ADDC1;
1898 else
1899 op1 = TOK_SUBC1;
1900 gen_op(op1);
1901 /* stack: H1 H2 (L1 op L2) */
1902 vrotb(3);
1903 vrotb(3);
1904 gen_op(op1 + 1); /* TOK_xxxC2 */
1905 } else {
1906 gen_op(op);
1907 /* stack: H1 H2 (L1 op L2) */
1908 vrotb(3);
1909 vrotb(3);
1910 /* stack: (L1 op L2) H1 H2 */
1911 gen_op(op);
1912 /* stack: (L1 op L2) (H1 op H2) */
1914 /* stack: L H */
1915 lbuild(t);
1916 break;
1917 case TOK_SAR:
1918 case TOK_SHR:
1919 case TOK_SHL:
1920 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1921 t = vtop[-1].type.t;
1922 vswap();
1923 lexpand();
1924 vrotb(3);
1925 /* stack: L H shift */
1926 c = (int)vtop->c.i;
1927 /* constant: simpler */
1928 /* NOTE: all comments are for SHL. the other cases are
1929 done by swapping words */
1930 vpop();
1931 if (op != TOK_SHL)
1932 vswap();
1933 if (c >= 32) {
1934 /* stack: L H */
1935 vpop();
1936 if (c > 32) {
1937 vpushi(c - 32);
1938 gen_op(op);
1940 if (op != TOK_SAR) {
1941 vpushi(0);
1942 } else {
1943 gv_dup();
1944 vpushi(31);
1945 gen_op(TOK_SAR);
1947 vswap();
1948 } else {
1949 vswap();
1950 gv_dup();
1951 /* stack: H L L */
1952 vpushi(c);
1953 gen_op(op);
1954 vswap();
1955 vpushi(32 - c);
1956 if (op == TOK_SHL)
1957 gen_op(TOK_SHR);
1958 else
1959 gen_op(TOK_SHL);
1960 vrotb(3);
1961 /* stack: L L H */
1962 vpushi(c);
1963 if (op == TOK_SHL)
1964 gen_op(TOK_SHL);
1965 else
1966 gen_op(TOK_SHR);
1967 gen_op('|');
1969 if (op != TOK_SHL)
1970 vswap();
1971 lbuild(t);
1972 } else {
1973 /* XXX: should provide a faster fallback on x86 ? */
1974 switch(op) {
1975 case TOK_SAR:
1976 func = TOK___ashrdi3;
1977 goto gen_func;
1978 case TOK_SHR:
1979 func = TOK___lshrdi3;
1980 goto gen_func;
1981 case TOK_SHL:
1982 func = TOK___ashldi3;
1983 goto gen_func;
1986 break;
1987 default:
1988 /* compare operations */
1989 t = vtop->type.t;
1990 vswap();
1991 lexpand();
1992 vrotb(3);
1993 lexpand();
1994 /* stack: L1 H1 L2 H2 */
1995 tmp = vtop[-1];
1996 vtop[-1] = vtop[-2];
1997 vtop[-2] = tmp;
1998 /* stack: L1 L2 H1 H2 */
1999 save_regs(4);
2000 /* compare high */
2001 op1 = op;
2002 /* when values are equal, we need to compare low words. since
2003 the jump is inverted, we invert the test too. */
2004 if (op1 == TOK_LT)
2005 op1 = TOK_LE;
2006 else if (op1 == TOK_GT)
2007 op1 = TOK_GE;
2008 else if (op1 == TOK_ULT)
2009 op1 = TOK_ULE;
2010 else if (op1 == TOK_UGT)
2011 op1 = TOK_UGE;
2012 a = 0;
2013 b = 0;
2014 gen_op(op1);
2015 if (op == TOK_NE) {
2016 b = gvtst(0, 0);
2017 } else {
2018 a = gvtst(1, 0);
2019 if (op != TOK_EQ) {
2020 /* generate non equal test */
2021 vpushi(0);
2022 vset_VT_CMP(TOK_NE);
2023 b = gvtst(0, 0);
2026 /* compare low. Always unsigned */
2027 op1 = op;
2028 if (op1 == TOK_LT)
2029 op1 = TOK_ULT;
2030 else if (op1 == TOK_LE)
2031 op1 = TOK_ULE;
2032 else if (op1 == TOK_GT)
2033 op1 = TOK_UGT;
2034 else if (op1 == TOK_GE)
2035 op1 = TOK_UGE;
2036 gen_op(op1);
2037 #if 0//def TCC_TARGET_I386
2038 if (op == TOK_NE) { gsym(b); break; }
2039 if (op == TOK_EQ) { gsym(a); break; }
2040 #endif
2041 gvtst_set(1, a);
2042 gvtst_set(0, b);
2043 break;
2046 #endif
2048 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2050 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2051 return (a ^ b) >> 63 ? -x : x;
2054 static int gen_opic_lt(uint64_t a, uint64_t b)
2056 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2059 /* handle integer constant optimizations and various machine
2060 independent opt */
2061 static void gen_opic(int op)
2063 SValue *v1 = vtop - 1;
2064 SValue *v2 = vtop;
2065 int t1 = v1->type.t & VT_BTYPE;
2066 int t2 = v2->type.t & VT_BTYPE;
2067 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2068 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2069 uint64_t l1 = c1 ? v1->c.i : 0;
2070 uint64_t l2 = c2 ? v2->c.i : 0;
2071 int shm = (t1 == VT_LLONG) ? 63 : 31;
2073 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2074 l1 = ((uint32_t)l1 |
2075 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2076 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2077 l2 = ((uint32_t)l2 |
2078 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2080 if (c1 && c2) {
2081 switch(op) {
2082 case '+': l1 += l2; break;
2083 case '-': l1 -= l2; break;
2084 case '&': l1 &= l2; break;
2085 case '^': l1 ^= l2; break;
2086 case '|': l1 |= l2; break;
2087 case '*': l1 *= l2; break;
2089 case TOK_PDIV:
2090 case '/':
2091 case '%':
2092 case TOK_UDIV:
2093 case TOK_UMOD:
2094 /* if division by zero, generate explicit division */
2095 if (l2 == 0) {
2096 if (const_wanted)
2097 tcc_error("division by zero in constant");
2098 goto general_case;
2100 switch(op) {
2101 default: l1 = gen_opic_sdiv(l1, l2); break;
2102 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2103 case TOK_UDIV: l1 = l1 / l2; break;
2104 case TOK_UMOD: l1 = l1 % l2; break;
2106 break;
2107 case TOK_SHL: l1 <<= (l2 & shm); break;
2108 case TOK_SHR: l1 >>= (l2 & shm); break;
2109 case TOK_SAR:
2110 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2111 break;
2112 /* tests */
2113 case TOK_ULT: l1 = l1 < l2; break;
2114 case TOK_UGE: l1 = l1 >= l2; break;
2115 case TOK_EQ: l1 = l1 == l2; break;
2116 case TOK_NE: l1 = l1 != l2; break;
2117 case TOK_ULE: l1 = l1 <= l2; break;
2118 case TOK_UGT: l1 = l1 > l2; break;
2119 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2120 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2121 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2122 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2123 /* logical */
2124 case TOK_LAND: l1 = l1 && l2; break;
2125 case TOK_LOR: l1 = l1 || l2; break;
2126 default:
2127 goto general_case;
2129 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2130 l1 = ((uint32_t)l1 |
2131 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2132 v1->c.i = l1;
2133 vtop--;
2134 } else {
2135 /* if commutative ops, put c2 as constant */
2136 if (c1 && (op == '+' || op == '&' || op == '^' ||
2137 op == '|' || op == '*')) {
2138 vswap();
2139 c2 = c1; //c = c1, c1 = c2, c2 = c;
2140 l2 = l1; //l = l1, l1 = l2, l2 = l;
2142 if (!const_wanted &&
2143 c1 && ((l1 == 0 &&
2144 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2145 (l1 == -1 && op == TOK_SAR))) {
2146 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2147 vtop--;
2148 } else if (!const_wanted &&
2149 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2150 (op == '|' &&
2151 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2152 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2153 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2154 if (l2 == 1)
2155 vtop->c.i = 0;
2156 vswap();
2157 vtop--;
2158 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2159 op == TOK_PDIV) &&
2160 l2 == 1) ||
2161 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2162 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2163 l2 == 0) ||
2164 (op == '&' &&
2165 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2166 /* filter out NOP operations like x*1, x-0, x&-1... */
2167 vtop--;
2168 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2169 /* try to use shifts instead of muls or divs */
2170 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2171 int n = -1;
2172 while (l2) {
2173 l2 >>= 1;
2174 n++;
2176 vtop->c.i = n;
2177 if (op == '*')
2178 op = TOK_SHL;
2179 else if (op == TOK_PDIV)
2180 op = TOK_SAR;
2181 else
2182 op = TOK_SHR;
2184 goto general_case;
2185 } else if (c2 && (op == '+' || op == '-') &&
2186 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2187 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2188 /* symbol + constant case */
2189 if (op == '-')
2190 l2 = -l2;
2191 l2 += vtop[-1].c.i;
2192 /* The backends can't always deal with addends to symbols
2193 larger than +-1<<31. Don't construct such. */
2194 if ((int)l2 != l2)
2195 goto general_case;
2196 vtop--;
2197 vtop->c.i = l2;
2198 } else {
2199 general_case:
2200 /* call low level op generator */
2201 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2202 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2203 gen_opl(op);
2204 else
2205 gen_opi(op);
2210 /* generate a floating point operation with constant propagation */
2211 static void gen_opif(int op)
2213 int c1, c2;
2214 SValue *v1, *v2;
2215 #if defined _MSC_VER && defined __x86_64__
2216 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2217 volatile
2218 #endif
2219 long double f1, f2;
2221 v1 = vtop - 1;
2222 v2 = vtop;
2223 /* currently, we cannot do computations with forward symbols */
2224 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2225 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2226 if (c1 && c2) {
2227 if (v1->type.t == VT_FLOAT) {
2228 f1 = v1->c.f;
2229 f2 = v2->c.f;
2230 } else if (v1->type.t == VT_DOUBLE) {
2231 f1 = v1->c.d;
2232 f2 = v2->c.d;
2233 } else {
2234 f1 = v1->c.ld;
2235 f2 = v2->c.ld;
2238 /* NOTE: we only do constant propagation if finite number (not
2239 NaN or infinity) (ANSI spec) */
2240 if (!ieee_finite(f1) || !ieee_finite(f2))
2241 goto general_case;
2243 switch(op) {
2244 case '+': f1 += f2; break;
2245 case '-': f1 -= f2; break;
2246 case '*': f1 *= f2; break;
2247 case '/':
2248 if (f2 == 0.0) {
2249 /* If not in initializer we need to potentially generate
2250 FP exceptions at runtime, otherwise we want to fold. */
2251 if (!const_wanted)
2252 goto general_case;
2254 f1 /= f2;
2255 break;
2256 /* XXX: also handles tests ? */
2257 default:
2258 goto general_case;
2260 /* XXX: overflow test ? */
2261 if (v1->type.t == VT_FLOAT) {
2262 v1->c.f = f1;
2263 } else if (v1->type.t == VT_DOUBLE) {
2264 v1->c.d = f1;
2265 } else {
2266 v1->c.ld = f1;
2268 vtop--;
2269 } else {
2270 general_case:
2271 gen_opf(op);
2275 static int pointed_size(CType *type)
2277 int align;
2278 return type_size(pointed_type(type), &align);
2281 static void vla_runtime_pointed_size(CType *type)
2283 int align;
2284 vla_runtime_type_size(pointed_type(type), &align);
2287 static inline int is_null_pointer(SValue *p)
2289 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2290 return 0;
2291 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2292 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2293 ((p->type.t & VT_BTYPE) == VT_PTR &&
2294 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2295 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2296 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2299 static inline int is_integer_btype(int bt)
2301 return (bt == VT_BYTE || bt == VT_SHORT ||
2302 bt == VT_INT || bt == VT_LLONG);
2305 /* check types for comparison or subtraction of pointers */
2306 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2308 CType *type1, *type2, tmp_type1, tmp_type2;
2309 int bt1, bt2;
2311 /* null pointers are accepted for all comparisons as gcc */
2312 if (is_null_pointer(p1) || is_null_pointer(p2))
2313 return;
2314 type1 = &p1->type;
2315 type2 = &p2->type;
2316 bt1 = type1->t & VT_BTYPE;
2317 bt2 = type2->t & VT_BTYPE;
2318 /* accept comparison between pointer and integer with a warning */
2319 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2320 if (op != TOK_LOR && op != TOK_LAND )
2321 tcc_warning("comparison between pointer and integer");
2322 return;
2325 /* both must be pointers or implicit function pointers */
2326 if (bt1 == VT_PTR) {
2327 type1 = pointed_type(type1);
2328 } else if (bt1 != VT_FUNC)
2329 goto invalid_operands;
2331 if (bt2 == VT_PTR) {
2332 type2 = pointed_type(type2);
2333 } else if (bt2 != VT_FUNC) {
2334 invalid_operands:
2335 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2337 if ((type1->t & VT_BTYPE) == VT_VOID ||
2338 (type2->t & VT_BTYPE) == VT_VOID)
2339 return;
2340 tmp_type1 = *type1;
2341 tmp_type2 = *type2;
2342 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2343 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2344 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2345 /* gcc-like error if '-' is used */
2346 if (op == '-')
2347 goto invalid_operands;
2348 else
2349 tcc_warning("comparison of distinct pointer types lacks a cast");
2353 /* generic gen_op: handles types problems */
2354 ST_FUNC void gen_op(int op)
2356 int u, t1, t2, bt1, bt2, t;
2357 CType type1;
2359 redo:
2360 t1 = vtop[-1].type.t;
2361 t2 = vtop[0].type.t;
2362 bt1 = t1 & VT_BTYPE;
2363 bt2 = t2 & VT_BTYPE;
2365 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2366 tcc_error("operation on a struct");
2367 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2368 if (bt2 == VT_FUNC) {
2369 mk_pointer(&vtop->type);
2370 gaddrof();
2372 if (bt1 == VT_FUNC) {
2373 vswap();
2374 mk_pointer(&vtop->type);
2375 gaddrof();
2376 vswap();
2378 goto redo;
2379 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2380 /* at least one operand is a pointer */
2381 /* relational op: must be both pointers */
2382 if (op >= TOK_ULT && op <= TOK_LOR) {
2383 check_comparison_pointer_types(vtop - 1, vtop, op);
2384 /* pointers are handled are unsigned */
2385 #if PTR_SIZE == 8
2386 t = VT_LLONG | VT_UNSIGNED;
2387 #else
2388 t = VT_INT | VT_UNSIGNED;
2389 #endif
2390 goto std_op;
2392 /* if both pointers, then it must be the '-' op */
2393 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2394 if (op != '-')
2395 tcc_error("cannot use pointers here");
2396 check_comparison_pointer_types(vtop - 1, vtop, op);
2397 /* XXX: check that types are compatible */
2398 if (vtop[-1].type.t & VT_VLA) {
2399 vla_runtime_pointed_size(&vtop[-1].type);
2400 } else {
2401 vpushi(pointed_size(&vtop[-1].type));
2403 vrott(3);
2404 gen_opic(op);
2405 vtop->type.t = ptrdiff_type.t;
2406 vswap();
2407 gen_op(TOK_PDIV);
2408 } else {
2409 /* exactly one pointer : must be '+' or '-'. */
2410 if (op != '-' && op != '+')
2411 tcc_error("cannot use pointers here");
2412 /* Put pointer as first operand */
2413 if (bt2 == VT_PTR) {
2414 vswap();
2415 t = t1, t1 = t2, t2 = t;
2417 #if PTR_SIZE == 4
2418 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2419 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2420 gen_cast_s(VT_INT);
2421 #endif
2422 type1 = vtop[-1].type;
2423 type1.t &= ~VT_ARRAY;
2424 if (vtop[-1].type.t & VT_VLA)
2425 vla_runtime_pointed_size(&vtop[-1].type);
2426 else {
2427 u = pointed_size(&vtop[-1].type);
2428 if (u < 0)
2429 tcc_error("unknown array element size");
2430 #if PTR_SIZE == 8
2431 vpushll(u);
2432 #else
2433 /* XXX: cast to int ? (long long case) */
2434 vpushi(u);
2435 #endif
2437 gen_op('*');
2438 #if 0
2439 /* #ifdef CONFIG_TCC_BCHECK
2440 The main reason to removing this code:
2441 #include <stdio.h>
2442 int main ()
2444 int v[10];
2445 int i = 10;
2446 int j = 9;
2447 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2448 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2450 When this code is on. then the output looks like
2451 v+i-j = 0xfffffffe
2452 v+(i-j) = 0xbff84000
2454 /* if evaluating constant expression, no code should be
2455 generated, so no bound check */
2456 if (tcc_state->do_bounds_check && !const_wanted) {
2457 /* if bounded pointers, we generate a special code to
2458 test bounds */
2459 if (op == '-') {
2460 vpushi(0);
2461 vswap();
2462 gen_op('-');
2464 gen_bounded_ptr_add();
2465 } else
2466 #endif
2468 gen_opic(op);
2470 /* put again type if gen_opic() swaped operands */
2471 vtop->type = type1;
2473 } else if (is_float(bt1) || is_float(bt2)) {
2474 /* compute bigger type and do implicit casts */
2475 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2476 t = VT_LDOUBLE;
2477 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2478 t = VT_DOUBLE;
2479 } else {
2480 t = VT_FLOAT;
2482 /* floats can only be used for a few operations */
2483 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2484 (op < TOK_ULT || op > TOK_GT))
2485 tcc_error("invalid operands for binary operation");
2486 goto std_op;
2487 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2488 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2489 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2490 t |= VT_UNSIGNED;
2491 t |= (VT_LONG & t1);
2492 goto std_op;
2493 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2494 /* cast to biggest op */
2495 t = VT_LLONG | VT_LONG;
2496 if (bt1 == VT_LLONG)
2497 t &= t1;
2498 if (bt2 == VT_LLONG)
2499 t &= t2;
2500 /* convert to unsigned if it does not fit in a long long */
2501 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2502 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2503 t |= VT_UNSIGNED;
2504 goto std_op;
2505 } else {
2506 /* integer operations */
2507 t = VT_INT | (VT_LONG & (t1 | t2));
2508 /* convert to unsigned if it does not fit in an integer */
2509 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2510 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2511 t |= VT_UNSIGNED;
2512 std_op:
2513 /* XXX: currently, some unsigned operations are explicit, so
2514 we modify them here */
2515 if (t & VT_UNSIGNED) {
2516 if (op == TOK_SAR)
2517 op = TOK_SHR;
2518 else if (op == '/')
2519 op = TOK_UDIV;
2520 else if (op == '%')
2521 op = TOK_UMOD;
2522 else if (op == TOK_LT)
2523 op = TOK_ULT;
2524 else if (op == TOK_GT)
2525 op = TOK_UGT;
2526 else if (op == TOK_LE)
2527 op = TOK_ULE;
2528 else if (op == TOK_GE)
2529 op = TOK_UGE;
2531 vswap();
2532 type1.t = t;
2533 type1.ref = NULL;
2534 gen_cast(&type1);
2535 vswap();
2536 /* special case for shifts and long long: we keep the shift as
2537 an integer */
2538 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2539 type1.t = VT_INT;
2540 gen_cast(&type1);
2541 if (is_float(t))
2542 gen_opif(op);
2543 else
2544 gen_opic(op);
2545 if (op >= TOK_ULT && op <= TOK_GT) {
2546 /* relational op: the result is an int */
2547 vtop->type.t = VT_INT;
2548 } else {
2549 vtop->type.t = t;
2552 // Make sure that we have converted to an rvalue:
2553 if (vtop->r & VT_LVAL)
2554 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2557 #ifndef TCC_TARGET_ARM
2558 /* generic itof for unsigned long long case */
2559 static void gen_cvt_itof1(int t)
2561 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2562 gen_cvt_itof(t);
2563 #else
2564 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2565 (VT_LLONG | VT_UNSIGNED)) {
2567 if (t == VT_FLOAT)
2568 vpush_global_sym(&func_old_type, TOK___floatundisf);
2569 #if LDOUBLE_SIZE != 8
2570 else if (t == VT_LDOUBLE)
2571 vpush_global_sym(&func_old_type, TOK___floatundixf);
2572 #endif
2573 else
2574 vpush_global_sym(&func_old_type, TOK___floatundidf);
2575 vrott(2);
2576 gfunc_call(1);
2577 vpushi(0);
2578 vtop->r = reg_fret(t);
2579 } else {
2580 gen_cvt_itof(t);
2582 #endif
2584 #endif
2586 /* generic ftoi for unsigned long long case */
2587 static void gen_cvt_ftoi1(int t)
2589 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2590 gen_cvt_ftoi(t);
2591 #else
2592 int st;
2594 if (t == (VT_LLONG | VT_UNSIGNED)) {
2595 /* not handled natively */
2596 st = vtop->type.t & VT_BTYPE;
2597 if (st == VT_FLOAT)
2598 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2599 #if LDOUBLE_SIZE != 8
2600 else if (st == VT_LDOUBLE)
2601 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2602 #endif
2603 else
2604 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2605 vrott(2);
2606 gfunc_call(1);
2607 vpushi(0);
2608 vtop->r = REG_IRET;
2609 vtop->r2 = REG_LRET;
2610 } else {
2611 gen_cvt_ftoi(t);
2613 #endif
2616 /* force char or short cast */
2617 static void force_charshort_cast(int t)
2619 int bits, dbt;
2621 /* cannot cast static initializers */
2622 if (STATIC_DATA_WANTED)
2623 return;
2625 dbt = t & VT_BTYPE;
2626 /* XXX: add optimization if lvalue : just change type and offset */
2627 if (dbt == VT_BYTE)
2628 bits = 8;
2629 else
2630 bits = 16;
2631 if (t & VT_UNSIGNED) {
2632 vpushi((1 << bits) - 1);
2633 gen_op('&');
2634 } else {
2635 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2636 bits = 64 - bits;
2637 else
2638 bits = 32 - bits;
2639 vpushi(bits);
2640 gen_op(TOK_SHL);
2641 /* result must be signed or the SAR is converted to an SHL
2642 This was not the case when "t" was a signed short
2643 and the last value on the stack was an unsigned int */
2644 vtop->type.t &= ~VT_UNSIGNED;
2645 vpushi(bits);
2646 gen_op(TOK_SAR);
2650 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2651 static void gen_cast_s(int t)
2653 CType type;
2654 type.t = t;
2655 type.ref = NULL;
2656 gen_cast(&type);
2659 static void gen_cast(CType *type)
2661 int sbt, dbt, sf, df, c, p;
2663 /* special delayed cast for char/short */
2664 /* XXX: in some cases (multiple cascaded casts), it may still
2665 be incorrect */
2666 if (vtop->r & VT_MUSTCAST) {
2667 vtop->r &= ~VT_MUSTCAST;
2668 force_charshort_cast(vtop->type.t);
2671 /* bitfields first get cast to ints */
2672 if (vtop->type.t & VT_BITFIELD) {
2673 gv(RC_INT);
2676 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2677 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2679 if (sbt != dbt) {
2680 sf = is_float(sbt);
2681 df = is_float(dbt);
2682 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2683 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2684 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2685 c &= dbt != VT_LDOUBLE;
2686 #endif
2687 if (c) {
2688 /* constant case: we can do it now */
2689 /* XXX: in ISOC, cannot do it if error in convert */
2690 if (sbt == VT_FLOAT)
2691 vtop->c.ld = vtop->c.f;
2692 else if (sbt == VT_DOUBLE)
2693 vtop->c.ld = vtop->c.d;
2695 if (df) {
2696 if ((sbt & VT_BTYPE) == VT_LLONG) {
2697 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2698 vtop->c.ld = vtop->c.i;
2699 else
2700 vtop->c.ld = -(long double)-vtop->c.i;
2701 } else if(!sf) {
2702 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2703 vtop->c.ld = (uint32_t)vtop->c.i;
2704 else
2705 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2708 if (dbt == VT_FLOAT)
2709 vtop->c.f = (float)vtop->c.ld;
2710 else if (dbt == VT_DOUBLE)
2711 vtop->c.d = (double)vtop->c.ld;
2712 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2713 vtop->c.i = vtop->c.ld;
2714 } else if (sf && dbt == VT_BOOL) {
2715 vtop->c.i = (vtop->c.ld != 0);
2716 } else {
2717 if(sf)
2718 vtop->c.i = vtop->c.ld;
2719 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2721 else if (sbt & VT_UNSIGNED)
2722 vtop->c.i = (uint32_t)vtop->c.i;
2723 #if PTR_SIZE == 8
2724 else if (sbt == VT_PTR)
2726 #endif
2727 else if (sbt != VT_LLONG)
2728 vtop->c.i = ((uint32_t)vtop->c.i |
2729 -(vtop->c.i & 0x80000000));
2731 if (dbt == (VT_LLONG|VT_UNSIGNED))
2733 else if (dbt == VT_BOOL)
2734 vtop->c.i = (vtop->c.i != 0);
2735 #if PTR_SIZE == 8
2736 else if (dbt == VT_PTR)
2738 #endif
2739 else if (dbt != VT_LLONG) {
2740 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2741 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2742 0xffffffff);
2743 vtop->c.i &= m;
2744 if (!(dbt & VT_UNSIGNED))
2745 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2748 } else if (p && dbt == VT_BOOL) {
2749 vtop->r = VT_CONST;
2750 vtop->c.i = 1;
2751 } else {
2752 /* non constant case: generate code */
2753 if (sf && df) {
2754 /* convert from fp to fp */
2755 gen_cvt_ftof(dbt);
2756 } else if (df) {
2757 /* convert int to fp */
2758 gen_cvt_itof1(dbt);
2759 } else if (sf) {
2760 /* convert fp to int */
2761 if (dbt == VT_BOOL) {
2762 vpushi(0);
2763 gen_op(TOK_NE);
2764 } else {
2765 /* we handle char/short/etc... with generic code */
2766 if (dbt != (VT_INT | VT_UNSIGNED) &&
2767 dbt != (VT_LLONG | VT_UNSIGNED) &&
2768 dbt != VT_LLONG)
2769 dbt = VT_INT;
2770 gen_cvt_ftoi1(dbt);
2771 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2772 /* additional cast for char/short... */
2773 vtop->type.t = dbt;
2774 gen_cast(type);
2777 #if PTR_SIZE == 4
2778 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2779 if ((sbt & VT_BTYPE) != VT_LLONG) {
2780 /* scalar to long long */
2781 /* machine independent conversion */
2782 gv(RC_INT);
2783 /* generate high word */
2784 if (sbt == (VT_INT | VT_UNSIGNED)) {
2785 vpushi(0);
2786 gv(RC_INT);
2787 } else {
2788 if (sbt == VT_PTR) {
2789 /* cast from pointer to int before we apply
2790 shift operation, which pointers don't support*/
2791 gen_cast_s(VT_INT);
2793 gv_dup();
2794 vpushi(31);
2795 gen_op(TOK_SAR);
2797 /* patch second register */
2798 vtop[-1].r2 = vtop->r;
2799 vpop();
2801 #else
2802 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2803 (dbt & VT_BTYPE) == VT_PTR ||
2804 (dbt & VT_BTYPE) == VT_FUNC) {
2805 if ((sbt & VT_BTYPE) != VT_LLONG &&
2806 (sbt & VT_BTYPE) != VT_PTR &&
2807 (sbt & VT_BTYPE) != VT_FUNC) {
2808 /* need to convert from 32bit to 64bit */
2809 gv(RC_INT);
2810 if (sbt != (VT_INT | VT_UNSIGNED)) {
2811 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2812 gen_cvt_sxtw();
2813 #elif defined(TCC_TARGET_X86_64)
2814 int r = gv(RC_INT);
2815 /* x86_64 specific: movslq */
2816 o(0x6348);
2817 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2818 #else
2819 #error
2820 #endif
2823 #endif
2824 } else if (dbt == VT_BOOL) {
2825 /* scalar to bool */
2826 vpushi(0);
2827 gen_op(TOK_NE);
2828 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2829 (dbt & VT_BTYPE) == VT_SHORT) {
2830 if (sbt == VT_PTR) {
2831 vtop->type.t = VT_INT;
2832 tcc_warning("nonportable conversion from pointer to char/short");
2834 force_charshort_cast(dbt);
2835 } else if ((dbt & VT_BTYPE) == VT_INT) {
2836 /* scalar to int */
2837 if ((sbt & VT_BTYPE) == VT_LLONG) {
2838 #if PTR_SIZE == 4
2839 /* from long long: just take low order word */
2840 lexpand();
2841 vpop();
2842 #else
2843 /* XXX some architectures (e.g. risc-v) would like it
2844 better for this merely being a 32-to-64 sign or zero-
2845 extension. */
2846 vpushi(0xffffffff);
2847 vtop->type.t |= VT_UNSIGNED;
2848 gen_op('&');
2849 #endif
2851 /* if lvalue and single word type, nothing to do because
2852 the lvalue already contains the real type size (see
2853 VT_LVAL_xxx constants) */
2856 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2857 /* if we are casting between pointer types,
2858 we must update the VT_LVAL_xxx size */
2859 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2860 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2862 vtop->type = *type;
2863 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2866 /* return type size as known at compile time. Put alignment at 'a' */
2867 ST_FUNC int type_size(CType *type, int *a)
2869 Sym *s;
2870 int bt;
2872 bt = type->t & VT_BTYPE;
2873 if (bt == VT_STRUCT) {
2874 /* struct/union */
2875 s = type->ref;
2876 *a = s->r;
2877 return s->c;
2878 } else if (bt == VT_PTR) {
2879 if (type->t & VT_ARRAY) {
2880 int ts;
2882 s = type->ref;
2883 ts = type_size(&s->type, a);
2885 if (ts < 0 && s->c < 0)
2886 ts = -ts;
2888 return ts * s->c;
2889 } else {
2890 *a = PTR_SIZE;
2891 return PTR_SIZE;
2893 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2894 return -1; /* incomplete enum */
2895 } else if (bt == VT_LDOUBLE) {
2896 *a = LDOUBLE_ALIGN;
2897 return LDOUBLE_SIZE;
2898 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2899 #ifdef TCC_TARGET_I386
2900 #ifdef TCC_TARGET_PE
2901 *a = 8;
2902 #else
2903 *a = 4;
2904 #endif
2905 #elif defined(TCC_TARGET_ARM)
2906 #ifdef TCC_ARM_EABI
2907 *a = 8;
2908 #else
2909 *a = 4;
2910 #endif
2911 #else
2912 *a = 8;
2913 #endif
2914 return 8;
2915 } else if (bt == VT_INT || bt == VT_FLOAT) {
2916 *a = 4;
2917 return 4;
2918 } else if (bt == VT_SHORT) {
2919 *a = 2;
2920 return 2;
2921 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2922 *a = 8;
2923 return 16;
2924 } else {
2925 /* char, void, function, _Bool */
2926 *a = 1;
2927 return 1;
2931 /* push type size as known at runtime time on top of value stack. Put
2932 alignment at 'a' */
2933 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2935 if (type->t & VT_VLA) {
2936 type_size(&type->ref->type, a);
2937 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2938 } else {
2939 vpushi(type_size(type, a));
2943 /* return the pointed type of t */
2944 static inline CType *pointed_type(CType *type)
2946 return &type->ref->type;
2949 /* modify type so that its it is a pointer to type. */
2950 ST_FUNC void mk_pointer(CType *type)
2952 Sym *s;
2953 s = sym_push(SYM_FIELD, type, 0, -1);
2954 type->t = VT_PTR | (type->t & VT_STORAGE);
2955 type->ref = s;
2958 /* compare function types. OLD functions match any new functions */
2959 static int is_compatible_func(CType *type1, CType *type2)
2961 Sym *s1, *s2;
2963 s1 = type1->ref;
2964 s2 = type2->ref;
2965 if (s1->f.func_call != s2->f.func_call)
2966 return 0;
2967 if (s1->f.func_type != s2->f.func_type
2968 && s1->f.func_type != FUNC_OLD
2969 && s2->f.func_type != FUNC_OLD)
2970 return 0;
2971 /* we should check the function return type for FUNC_OLD too
2972 but that causes problems with the internally used support
2973 functions such as TOK_memmove */
2974 if (s1->f.func_type == FUNC_OLD && !s1->next)
2975 return 1;
2976 if (s2->f.func_type == FUNC_OLD && !s2->next)
2977 return 1;
2978 for (;;) {
2979 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2980 return 0;
2981 s1 = s1->next;
2982 s2 = s2->next;
2983 if (!s1)
2984 return !s2;
2985 if (!s2)
2986 return 0;
2990 /* return true if type1 and type2 are the same. If unqualified is
2991 true, qualifiers on the types are ignored.
2993 static int compare_types(CType *type1, CType *type2, int unqualified)
2995 int bt1, t1, t2;
2997 t1 = type1->t & VT_TYPE;
2998 t2 = type2->t & VT_TYPE;
2999 if (unqualified) {
3000 /* strip qualifiers before comparing */
3001 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3002 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3005 /* Default Vs explicit signedness only matters for char */
3006 if ((t1 & VT_BTYPE) != VT_BYTE) {
3007 t1 &= ~VT_DEFSIGN;
3008 t2 &= ~VT_DEFSIGN;
3010 /* XXX: bitfields ? */
3011 if (t1 != t2)
3012 return 0;
3014 if ((t1 & VT_ARRAY)
3015 && !(type1->ref->c < 0
3016 || type2->ref->c < 0
3017 || type1->ref->c == type2->ref->c))
3018 return 0;
3020 /* test more complicated cases */
3021 bt1 = t1 & VT_BTYPE;
3022 if (bt1 == VT_PTR) {
3023 type1 = pointed_type(type1);
3024 type2 = pointed_type(type2);
3025 return is_compatible_types(type1, type2);
3026 } else if (bt1 == VT_STRUCT) {
3027 return (type1->ref == type2->ref);
3028 } else if (bt1 == VT_FUNC) {
3029 return is_compatible_func(type1, type2);
3030 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3031 return type1->ref == type2->ref;
3032 } else {
3033 return 1;
3037 /* return true if type1 and type2 are exactly the same (including
3038 qualifiers).
3040 static int is_compatible_types(CType *type1, CType *type2)
3042 return compare_types(type1,type2,0);
3045 /* return true if type1 and type2 are the same (ignoring qualifiers).
3047 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3049 return compare_types(type1,type2,1);
3052 /* print a type. If 'varstr' is not NULL, then the variable is also
3053 printed in the type */
3054 /* XXX: union */
3055 /* XXX: add array and function pointers */
3056 static void type_to_str(char *buf, int buf_size,
3057 CType *type, const char *varstr)
3059 int bt, v, t;
3060 Sym *s, *sa;
3061 char buf1[256];
3062 const char *tstr;
3064 t = type->t;
3065 bt = t & VT_BTYPE;
3066 buf[0] = '\0';
3068 if (t & VT_EXTERN)
3069 pstrcat(buf, buf_size, "extern ");
3070 if (t & VT_STATIC)
3071 pstrcat(buf, buf_size, "static ");
3072 if (t & VT_TYPEDEF)
3073 pstrcat(buf, buf_size, "typedef ");
3074 if (t & VT_INLINE)
3075 pstrcat(buf, buf_size, "inline ");
3076 if (t & VT_VOLATILE)
3077 pstrcat(buf, buf_size, "volatile ");
3078 if (t & VT_CONSTANT)
3079 pstrcat(buf, buf_size, "const ");
3081 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3082 || ((t & VT_UNSIGNED)
3083 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3084 && !IS_ENUM(t)
3086 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3088 buf_size -= strlen(buf);
3089 buf += strlen(buf);
3091 switch(bt) {
3092 case VT_VOID:
3093 tstr = "void";
3094 goto add_tstr;
3095 case VT_BOOL:
3096 tstr = "_Bool";
3097 goto add_tstr;
3098 case VT_BYTE:
3099 tstr = "char";
3100 goto add_tstr;
3101 case VT_SHORT:
3102 tstr = "short";
3103 goto add_tstr;
3104 case VT_INT:
3105 tstr = "int";
3106 goto maybe_long;
3107 case VT_LLONG:
3108 tstr = "long long";
3109 maybe_long:
3110 if (t & VT_LONG)
3111 tstr = "long";
3112 if (!IS_ENUM(t))
3113 goto add_tstr;
3114 tstr = "enum ";
3115 goto tstruct;
3116 case VT_FLOAT:
3117 tstr = "float";
3118 goto add_tstr;
3119 case VT_DOUBLE:
3120 tstr = "double";
3121 goto add_tstr;
3122 case VT_LDOUBLE:
3123 tstr = "long double";
3124 add_tstr:
3125 pstrcat(buf, buf_size, tstr);
3126 break;
3127 case VT_STRUCT:
3128 tstr = "struct ";
3129 if (IS_UNION(t))
3130 tstr = "union ";
3131 tstruct:
3132 pstrcat(buf, buf_size, tstr);
3133 v = type->ref->v & ~SYM_STRUCT;
3134 if (v >= SYM_FIRST_ANOM)
3135 pstrcat(buf, buf_size, "<anonymous>");
3136 else
3137 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3138 break;
3139 case VT_FUNC:
3140 s = type->ref;
3141 buf1[0]=0;
3142 if (varstr && '*' == *varstr) {
3143 pstrcat(buf1, sizeof(buf1), "(");
3144 pstrcat(buf1, sizeof(buf1), varstr);
3145 pstrcat(buf1, sizeof(buf1), ")");
3147 pstrcat(buf1, buf_size, "(");
3148 sa = s->next;
3149 while (sa != NULL) {
3150 char buf2[256];
3151 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3152 pstrcat(buf1, sizeof(buf1), buf2);
3153 sa = sa->next;
3154 if (sa)
3155 pstrcat(buf1, sizeof(buf1), ", ");
3157 if (s->f.func_type == FUNC_ELLIPSIS)
3158 pstrcat(buf1, sizeof(buf1), ", ...");
3159 pstrcat(buf1, sizeof(buf1), ")");
3160 type_to_str(buf, buf_size, &s->type, buf1);
3161 goto no_var;
3162 case VT_PTR:
3163 s = type->ref;
3164 if (t & VT_ARRAY) {
3165 if (varstr && '*' == *varstr)
3166 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3167 else
3168 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3169 type_to_str(buf, buf_size, &s->type, buf1);
3170 goto no_var;
3172 pstrcpy(buf1, sizeof(buf1), "*");
3173 if (t & VT_CONSTANT)
3174 pstrcat(buf1, buf_size, "const ");
3175 if (t & VT_VOLATILE)
3176 pstrcat(buf1, buf_size, "volatile ");
3177 if (varstr)
3178 pstrcat(buf1, sizeof(buf1), varstr);
3179 type_to_str(buf, buf_size, &s->type, buf1);
3180 goto no_var;
3182 if (varstr) {
3183 pstrcat(buf, buf_size, " ");
3184 pstrcat(buf, buf_size, varstr);
3186 no_var: ;
3189 /* verify type compatibility to store vtop in 'dt' type, and generate
3190 casts if needed. */
3191 static void gen_assign_cast(CType *dt)
3193 CType *st, *type1, *type2;
3194 char buf1[256], buf2[256];
3195 int dbt, sbt, qualwarn, lvl;
3197 st = &vtop->type; /* source type */
3198 dbt = dt->t & VT_BTYPE;
3199 sbt = st->t & VT_BTYPE;
3200 if (sbt == VT_VOID || dbt == VT_VOID) {
3201 if (sbt == VT_VOID && dbt == VT_VOID)
3202 ; /* It is Ok if both are void */
3203 else
3204 tcc_error("cannot cast from/to void");
3206 if (dt->t & VT_CONSTANT)
3207 tcc_warning("assignment of read-only location");
3208 switch(dbt) {
3209 case VT_PTR:
3210 /* special cases for pointers */
3211 /* '0' can also be a pointer */
3212 if (is_null_pointer(vtop))
3213 break;
3214 /* accept implicit pointer to integer cast with warning */
3215 if (is_integer_btype(sbt)) {
3216 tcc_warning("assignment makes pointer from integer without a cast");
3217 break;
3219 type1 = pointed_type(dt);
3220 if (sbt == VT_PTR)
3221 type2 = pointed_type(st);
3222 else if (sbt == VT_FUNC)
3223 type2 = st; /* a function is implicitly a function pointer */
3224 else
3225 goto error;
3226 if (is_compatible_types(type1, type2))
3227 break;
3228 for (qualwarn = lvl = 0;; ++lvl) {
3229 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3230 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3231 qualwarn = 1;
3232 dbt = type1->t & (VT_BTYPE|VT_LONG);
3233 sbt = type2->t & (VT_BTYPE|VT_LONG);
3234 if (dbt != VT_PTR || sbt != VT_PTR)
3235 break;
3236 type1 = pointed_type(type1);
3237 type2 = pointed_type(type2);
3239 if (!is_compatible_unqualified_types(type1, type2)) {
3240 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3241 /* void * can match anything */
3242 } else if (dbt == sbt
3243 && is_integer_btype(sbt & VT_BTYPE)
3244 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3245 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3246 /* Like GCC don't warn by default for merely changes
3247 in pointer target signedness. Do warn for different
3248 base types, though, in particular for unsigned enums
3249 and signed int targets. */
3250 } else {
3251 tcc_warning("assignment from incompatible pointer type");
3252 break;
3255 if (qualwarn)
3256 tcc_warning("assignment discards qualifiers from pointer target type");
3257 break;
3258 case VT_BYTE:
3259 case VT_SHORT:
3260 case VT_INT:
3261 case VT_LLONG:
3262 if (sbt == VT_PTR || sbt == VT_FUNC) {
3263 tcc_warning("assignment makes integer from pointer without a cast");
3264 } else if (sbt == VT_STRUCT) {
3265 goto case_VT_STRUCT;
3267 /* XXX: more tests */
3268 break;
3269 case VT_STRUCT:
3270 case_VT_STRUCT:
3271 if (!is_compatible_unqualified_types(dt, st)) {
3272 error:
3273 type_to_str(buf1, sizeof(buf1), st, NULL);
3274 type_to_str(buf2, sizeof(buf2), dt, NULL);
3275 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3277 break;
3279 gen_cast(dt);
3282 /* store vtop in lvalue pushed on stack */
3283 ST_FUNC void vstore(void)
3285 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3287 ft = vtop[-1].type.t;
3288 sbt = vtop->type.t & VT_BTYPE;
3289 dbt = ft & VT_BTYPE;
3290 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3291 (sbt == VT_INT && dbt == VT_SHORT))
3292 && !(vtop->type.t & VT_BITFIELD)) {
3293 /* optimize char/short casts */
3294 delayed_cast = VT_MUSTCAST;
3295 vtop->type.t = ft & VT_TYPE;
3296 /* XXX: factorize */
3297 if (ft & VT_CONSTANT)
3298 tcc_warning("assignment of read-only location");
3299 } else {
3300 delayed_cast = 0;
3301 if (!(ft & VT_BITFIELD))
3302 gen_assign_cast(&vtop[-1].type);
3305 if (sbt == VT_STRUCT) {
3306 /* if structure, only generate pointer */
3307 /* structure assignment : generate memcpy */
3308 /* XXX: optimize if small size */
3309 size = type_size(&vtop->type, &align);
3311 /* destination */
3312 vswap();
3313 vtop->type.t = VT_PTR;
3314 gaddrof();
3316 /* address of memcpy() */
3317 #ifdef TCC_ARM_EABI
3318 if(!(align & 7))
3319 vpush_global_sym(&func_old_type, TOK_memcpy8);
3320 else if(!(align & 3))
3321 vpush_global_sym(&func_old_type, TOK_memcpy4);
3322 else
3323 #endif
3324 /* Use memmove, rather than memcpy, as dest and src may be same: */
3325 vpush_global_sym(&func_old_type, TOK_memmove);
3327 vswap();
3328 /* source */
3329 vpushv(vtop - 2);
3330 vtop->type.t = VT_PTR;
3331 gaddrof();
3332 /* type size */
3333 vpushi(size);
3334 gfunc_call(3);
3336 /* leave source on stack */
3337 } else if (ft & VT_BITFIELD) {
3338 /* bitfield store handling */
3340 /* save lvalue as expression result (example: s.b = s.a = n;) */
3341 vdup(), vtop[-1] = vtop[-2];
3343 bit_pos = BIT_POS(ft);
3344 bit_size = BIT_SIZE(ft);
3345 /* remove bit field info to avoid loops */
3346 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3348 if ((ft & VT_BTYPE) == VT_BOOL) {
3349 gen_cast(&vtop[-1].type);
3350 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3353 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3354 if (r == VT_STRUCT) {
3355 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3356 store_packed_bf(bit_pos, bit_size);
3357 } else {
3358 unsigned long long mask = (1ULL << bit_size) - 1;
3359 if ((ft & VT_BTYPE) != VT_BOOL) {
3360 /* mask source */
3361 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3362 vpushll(mask);
3363 else
3364 vpushi((unsigned)mask);
3365 gen_op('&');
3367 /* shift source */
3368 vpushi(bit_pos);
3369 gen_op(TOK_SHL);
3370 vswap();
3371 /* duplicate destination */
3372 vdup();
3373 vrott(3);
3374 /* load destination, mask and or with source */
3375 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3376 vpushll(~(mask << bit_pos));
3377 else
3378 vpushi(~((unsigned)mask << bit_pos));
3379 gen_op('&');
3380 gen_op('|');
3381 /* store result */
3382 vstore();
3383 /* ... and discard */
3384 vpop();
3386 } else if (dbt == VT_VOID) {
3387 --vtop;
3388 } else {
3389 #ifdef CONFIG_TCC_BCHECK
3390 /* bound check case */
3391 if (vtop[-1].r & VT_MUSTBOUND) {
3392 vswap();
3393 gbound();
3394 vswap();
3396 #endif
3397 rc = RC_INT;
3398 if (is_float(ft)) {
3399 rc = RC_FLOAT;
3400 #ifdef TCC_TARGET_X86_64
3401 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3402 rc = RC_ST0;
3403 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3404 rc = RC_FRET;
3406 #endif
3408 r = gv(rc); /* generate value */
3409 /* if lvalue was saved on stack, must read it */
3410 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3411 SValue sv;
3412 t = get_reg(RC_INT);
3413 #if PTR_SIZE == 8
3414 sv.type.t = VT_PTR;
3415 #else
3416 sv.type.t = VT_INT;
3417 #endif
3418 sv.r = VT_LOCAL | VT_LVAL;
3419 sv.c.i = vtop[-1].c.i;
3420 load(t, &sv);
3421 vtop[-1].r = t | VT_LVAL;
3423 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3424 #if PTR_SIZE == 8
3425 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3426 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3427 #else
3428 if ((ft & VT_BTYPE) == VT_LLONG) {
3429 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3430 #endif
3431 vtop[-1].type.t = load_type;
3432 store(r, vtop - 1);
3433 vswap();
3434 /* convert to int to increment easily */
3435 vtop->type.t = addr_type;
3436 gaddrof();
3437 vpushi(load_size);
3438 gen_op('+');
3439 vtop->r |= VT_LVAL;
3440 vswap();
3441 vtop[-1].type.t = load_type;
3442 /* XXX: it works because r2 is spilled last ! */
3443 store(vtop->r2, vtop - 1);
3444 } else {
3445 store(r, vtop - 1);
3448 vswap();
3449 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3450 vtop->r |= delayed_cast;
3454 /* post defines POST/PRE add. c is the token ++ or -- */
3455 ST_FUNC void inc(int post, int c)
3457 test_lvalue();
3458 vdup(); /* save lvalue */
3459 if (post) {
3460 gv_dup(); /* duplicate value */
3461 vrotb(3);
3462 vrotb(3);
3464 /* add constant */
3465 vpushi(c - TOK_MID);
3466 gen_op('+');
3467 vstore(); /* store value */
3468 if (post)
3469 vpop(); /* if post op, return saved value */
3472 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3474 /* read the string */
3475 if (tok != TOK_STR)
3476 expect(msg);
3477 cstr_new(astr);
3478 while (tok == TOK_STR) {
3479 /* XXX: add \0 handling too ? */
3480 cstr_cat(astr, tokc.str.data, -1);
3481 next();
3483 cstr_ccat(astr, '\0');
3486 /* If I is >= 1 and a power of two, returns log2(i)+1.
3487 If I is 0 returns 0. */
3488 static int exact_log2p1(int i)
3490 int ret;
3491 if (!i)
3492 return 0;
3493 for (ret = 1; i >= 1 << 8; ret += 8)
3494 i >>= 8;
3495 if (i >= 1 << 4)
3496 ret += 4, i >>= 4;
3497 if (i >= 1 << 2)
3498 ret += 2, i >>= 2;
3499 if (i >= 1 << 1)
3500 ret++;
3501 return ret;
3504 /* Parse __attribute__((...)) GNUC extension. */
3505 static void parse_attribute(AttributeDef *ad)
3507 int t, n;
3508 CString astr;
3510 redo:
3511 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3512 return;
3513 next();
3514 skip('(');
3515 skip('(');
3516 while (tok != ')') {
3517 if (tok < TOK_IDENT)
3518 expect("attribute name");
3519 t = tok;
3520 next();
3521 switch(t) {
3522 case TOK_CLEANUP1:
3523 case TOK_CLEANUP2:
3525 Sym *s;
3527 skip('(');
3528 s = sym_find(tok);
3529 if (!s) {
3530 tcc_warning("implicit declaration of function '%s'",
3531 get_tok_str(tok, &tokc));
3532 s = external_global_sym(tok, &func_old_type);
3534 ad->cleanup_func = s;
3535 next();
3536 skip(')');
3537 break;
3539 case TOK_SECTION1:
3540 case TOK_SECTION2:
3541 skip('(');
3542 parse_mult_str(&astr, "section name");
3543 ad->section = find_section(tcc_state, (char *)astr.data);
3544 skip(')');
3545 cstr_free(&astr);
3546 break;
3547 case TOK_ALIAS1:
3548 case TOK_ALIAS2:
3549 skip('(');
3550 parse_mult_str(&astr, "alias(\"target\")");
3551 ad->alias_target = /* save string as token, for later */
3552 tok_alloc((char*)astr.data, astr.size-1)->tok;
3553 skip(')');
3554 cstr_free(&astr);
3555 break;
3556 case TOK_VISIBILITY1:
3557 case TOK_VISIBILITY2:
3558 skip('(');
3559 parse_mult_str(&astr,
3560 "visibility(\"default|hidden|internal|protected\")");
3561 if (!strcmp (astr.data, "default"))
3562 ad->a.visibility = STV_DEFAULT;
3563 else if (!strcmp (astr.data, "hidden"))
3564 ad->a.visibility = STV_HIDDEN;
3565 else if (!strcmp (astr.data, "internal"))
3566 ad->a.visibility = STV_INTERNAL;
3567 else if (!strcmp (astr.data, "protected"))
3568 ad->a.visibility = STV_PROTECTED;
3569 else
3570 expect("visibility(\"default|hidden|internal|protected\")");
3571 skip(')');
3572 cstr_free(&astr);
3573 break;
3574 case TOK_ALIGNED1:
3575 case TOK_ALIGNED2:
3576 if (tok == '(') {
3577 next();
3578 n = expr_const();
3579 if (n <= 0 || (n & (n - 1)) != 0)
3580 tcc_error("alignment must be a positive power of two");
3581 skip(')');
3582 } else {
3583 n = MAX_ALIGN;
3585 ad->a.aligned = exact_log2p1(n);
3586 if (n != 1 << (ad->a.aligned - 1))
3587 tcc_error("alignment of %d is larger than implemented", n);
3588 break;
3589 case TOK_PACKED1:
3590 case TOK_PACKED2:
3591 ad->a.packed = 1;
3592 break;
3593 case TOK_WEAK1:
3594 case TOK_WEAK2:
3595 ad->a.weak = 1;
3596 break;
3597 case TOK_UNUSED1:
3598 case TOK_UNUSED2:
3599 /* currently, no need to handle it because tcc does not
3600 track unused objects */
3601 break;
3602 case TOK_NORETURN1:
3603 case TOK_NORETURN2:
3604 ad->f.func_noreturn = 1;
3605 break;
3606 case TOK_CDECL1:
3607 case TOK_CDECL2:
3608 case TOK_CDECL3:
3609 ad->f.func_call = FUNC_CDECL;
3610 break;
3611 case TOK_STDCALL1:
3612 case TOK_STDCALL2:
3613 case TOK_STDCALL3:
3614 ad->f.func_call = FUNC_STDCALL;
3615 break;
3616 #ifdef TCC_TARGET_I386
3617 case TOK_REGPARM1:
3618 case TOK_REGPARM2:
3619 skip('(');
3620 n = expr_const();
3621 if (n > 3)
3622 n = 3;
3623 else if (n < 0)
3624 n = 0;
3625 if (n > 0)
3626 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3627 skip(')');
3628 break;
3629 case TOK_FASTCALL1:
3630 case TOK_FASTCALL2:
3631 case TOK_FASTCALL3:
3632 ad->f.func_call = FUNC_FASTCALLW;
3633 break;
3634 #endif
3635 case TOK_MODE:
3636 skip('(');
3637 switch(tok) {
3638 case TOK_MODE_DI:
3639 ad->attr_mode = VT_LLONG + 1;
3640 break;
3641 case TOK_MODE_QI:
3642 ad->attr_mode = VT_BYTE + 1;
3643 break;
3644 case TOK_MODE_HI:
3645 ad->attr_mode = VT_SHORT + 1;
3646 break;
3647 case TOK_MODE_SI:
3648 case TOK_MODE_word:
3649 ad->attr_mode = VT_INT + 1;
3650 break;
3651 default:
3652 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3653 break;
3655 next();
3656 skip(')');
3657 break;
3658 case TOK_DLLEXPORT:
3659 ad->a.dllexport = 1;
3660 break;
3661 case TOK_NODECORATE:
3662 ad->a.nodecorate = 1;
3663 break;
3664 case TOK_DLLIMPORT:
3665 ad->a.dllimport = 1;
3666 break;
3667 default:
3668 if (tcc_state->warn_unsupported)
3669 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3670 /* skip parameters */
3671 if (tok == '(') {
3672 int parenthesis = 0;
3673 do {
3674 if (tok == '(')
3675 parenthesis++;
3676 else if (tok == ')')
3677 parenthesis--;
3678 next();
3679 } while (parenthesis && tok != -1);
3681 break;
3683 if (tok != ',')
3684 break;
3685 next();
3687 skip(')');
3688 skip(')');
3689 goto redo;
3692 static Sym * find_field (CType *type, int v, int *cumofs)
3694 Sym *s = type->ref;
3695 v |= SYM_FIELD;
3696 while ((s = s->next) != NULL) {
3697 if ((s->v & SYM_FIELD) &&
3698 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3699 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3700 Sym *ret = find_field (&s->type, v, cumofs);
3701 if (ret) {
3702 *cumofs += s->c;
3703 return ret;
3706 if (s->v == v)
3707 break;
3709 return s;
3712 static void struct_layout(CType *type, AttributeDef *ad)
3714 int size, align, maxalign, offset, c, bit_pos, bit_size;
3715 int packed, a, bt, prevbt, prev_bit_size;
3716 int pcc = !tcc_state->ms_bitfields;
3717 int pragma_pack = *tcc_state->pack_stack_ptr;
3718 Sym *f;
3720 maxalign = 1;
3721 offset = 0;
3722 c = 0;
3723 bit_pos = 0;
3724 prevbt = VT_STRUCT; /* make it never match */
3725 prev_bit_size = 0;
3727 //#define BF_DEBUG
3729 for (f = type->ref->next; f; f = f->next) {
3730 if (f->type.t & VT_BITFIELD)
3731 bit_size = BIT_SIZE(f->type.t);
3732 else
3733 bit_size = -1;
3734 size = type_size(&f->type, &align);
3735 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3736 packed = 0;
3738 if (pcc && bit_size == 0) {
3739 /* in pcc mode, packing does not affect zero-width bitfields */
3741 } else {
3742 /* in pcc mode, attribute packed overrides if set. */
3743 if (pcc && (f->a.packed || ad->a.packed))
3744 align = packed = 1;
3746 /* pragma pack overrides align if lesser and packs bitfields always */
3747 if (pragma_pack) {
3748 packed = 1;
3749 if (pragma_pack < align)
3750 align = pragma_pack;
3751 /* in pcc mode pragma pack also overrides individual align */
3752 if (pcc && pragma_pack < a)
3753 a = 0;
3756 /* some individual align was specified */
3757 if (a)
3758 align = a;
3760 if (type->ref->type.t == VT_UNION) {
3761 if (pcc && bit_size >= 0)
3762 size = (bit_size + 7) >> 3;
3763 offset = 0;
3764 if (size > c)
3765 c = size;
3767 } else if (bit_size < 0) {
3768 if (pcc)
3769 c += (bit_pos + 7) >> 3;
3770 c = (c + align - 1) & -align;
3771 offset = c;
3772 if (size > 0)
3773 c += size;
3774 bit_pos = 0;
3775 prevbt = VT_STRUCT;
3776 prev_bit_size = 0;
3778 } else {
3779 /* A bit-field. Layout is more complicated. There are two
3780 options: PCC (GCC) compatible and MS compatible */
3781 if (pcc) {
3782 /* In PCC layout a bit-field is placed adjacent to the
3783 preceding bit-fields, except if:
3784 - it has zero-width
3785 - an individual alignment was given
3786 - it would overflow its base type container and
3787 there is no packing */
3788 if (bit_size == 0) {
3789 new_field:
3790 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3791 bit_pos = 0;
3792 } else if (f->a.aligned) {
3793 goto new_field;
3794 } else if (!packed) {
3795 int a8 = align * 8;
3796 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3797 if (ofs > size / align)
3798 goto new_field;
3801 /* in pcc mode, long long bitfields have type int if they fit */
3802 if (size == 8 && bit_size <= 32)
3803 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3805 while (bit_pos >= align * 8)
3806 c += align, bit_pos -= align * 8;
3807 offset = c;
3809 /* In PCC layout named bit-fields influence the alignment
3810 of the containing struct using the base types alignment,
3811 except for packed fields (which here have correct align). */
3812 if (f->v & SYM_FIRST_ANOM
3813 // && bit_size // ??? gcc on ARM/rpi does that
3815 align = 1;
3817 } else {
3818 bt = f->type.t & VT_BTYPE;
3819 if ((bit_pos + bit_size > size * 8)
3820 || (bit_size > 0) == (bt != prevbt)
3822 c = (c + align - 1) & -align;
3823 offset = c;
3824 bit_pos = 0;
3825 /* In MS bitfield mode a bit-field run always uses
3826 at least as many bits as the underlying type.
3827 To start a new run it's also required that this
3828 or the last bit-field had non-zero width. */
3829 if (bit_size || prev_bit_size)
3830 c += size;
3832 /* In MS layout the records alignment is normally
3833 influenced by the field, except for a zero-width
3834 field at the start of a run (but by further zero-width
3835 fields it is again). */
3836 if (bit_size == 0 && prevbt != bt)
3837 align = 1;
3838 prevbt = bt;
3839 prev_bit_size = bit_size;
3842 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3843 | (bit_pos << VT_STRUCT_SHIFT);
3844 bit_pos += bit_size;
3846 if (align > maxalign)
3847 maxalign = align;
3849 #ifdef BF_DEBUG
3850 printf("set field %s offset %-2d size %-2d align %-2d",
3851 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3852 if (f->type.t & VT_BITFIELD) {
3853 printf(" pos %-2d bits %-2d",
3854 BIT_POS(f->type.t),
3855 BIT_SIZE(f->type.t)
3858 printf("\n");
3859 #endif
3861 f->c = offset;
3862 f->r = 0;
3865 if (pcc)
3866 c += (bit_pos + 7) >> 3;
3868 /* store size and alignment */
3869 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3870 if (a < maxalign)
3871 a = maxalign;
3872 type->ref->r = a;
3873 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3874 /* can happen if individual align for some member was given. In
3875 this case MSVC ignores maxalign when aligning the size */
3876 a = pragma_pack;
3877 if (a < bt)
3878 a = bt;
3880 c = (c + a - 1) & -a;
3881 type->ref->c = c;
3883 #ifdef BF_DEBUG
3884 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3885 #endif
3887 /* check whether we can access bitfields by their type */
3888 for (f = type->ref->next; f; f = f->next) {
3889 int s, px, cx, c0;
3890 CType t;
3892 if (0 == (f->type.t & VT_BITFIELD))
3893 continue;
3894 f->type.ref = f;
3895 f->auxtype = -1;
3896 bit_size = BIT_SIZE(f->type.t);
3897 if (bit_size == 0)
3898 continue;
3899 bit_pos = BIT_POS(f->type.t);
3900 size = type_size(&f->type, &align);
3901 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3902 continue;
3904 /* try to access the field using a different type */
3905 c0 = -1, s = align = 1;
3906 for (;;) {
3907 px = f->c * 8 + bit_pos;
3908 cx = (px >> 3) & -align;
3909 px = px - (cx << 3);
3910 if (c0 == cx)
3911 break;
3912 s = (px + bit_size + 7) >> 3;
3913 if (s > 4) {
3914 t.t = VT_LLONG;
3915 } else if (s > 2) {
3916 t.t = VT_INT;
3917 } else if (s > 1) {
3918 t.t = VT_SHORT;
3919 } else {
3920 t.t = VT_BYTE;
3922 s = type_size(&t, &align);
3923 c0 = cx;
3926 if (px + bit_size <= s * 8 && cx + s <= c) {
3927 /* update offset and bit position */
3928 f->c = cx;
3929 bit_pos = px;
3930 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3931 | (bit_pos << VT_STRUCT_SHIFT);
3932 if (s != size)
3933 f->auxtype = t.t;
3934 #ifdef BF_DEBUG
3935 printf("FIX field %s offset %-2d size %-2d align %-2d "
3936 "pos %-2d bits %-2d\n",
3937 get_tok_str(f->v & ~SYM_FIELD, NULL),
3938 cx, s, align, px, bit_size);
3939 #endif
3940 } else {
3941 /* fall back to load/store single-byte wise */
3942 f->auxtype = VT_STRUCT;
3943 #ifdef BF_DEBUG
3944 printf("FIX field %s : load byte-wise\n",
3945 get_tok_str(f->v & ~SYM_FIELD, NULL));
3946 #endif
3951 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3952 static void struct_decl(CType *type, int u)
3954 int v, c, size, align, flexible;
3955 int bit_size, bsize, bt;
3956 Sym *s, *ss, **ps;
3957 AttributeDef ad, ad1;
3958 CType type1, btype;
3960 memset(&ad, 0, sizeof ad);
3961 next();
3962 parse_attribute(&ad);
3963 if (tok != '{') {
3964 v = tok;
3965 next();
3966 /* struct already defined ? return it */
3967 if (v < TOK_IDENT)
3968 expect("struct/union/enum name");
3969 s = struct_find(v);
3970 if (s && (s->sym_scope == local_scope || tok != '{')) {
3971 if (u == s->type.t)
3972 goto do_decl;
3973 if (u == VT_ENUM && IS_ENUM(s->type.t))
3974 goto do_decl;
3975 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3977 } else {
3978 v = anon_sym++;
3980 /* Record the original enum/struct/union token. */
3981 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3982 type1.ref = NULL;
3983 /* we put an undefined size for struct/union */
3984 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3985 s->r = 0; /* default alignment is zero as gcc */
3986 do_decl:
3987 type->t = s->type.t;
3988 type->ref = s;
3990 if (tok == '{') {
3991 next();
3992 if (s->c != -1)
3993 tcc_error("struct/union/enum already defined");
3994 s->c = -2;
3995 /* cannot be empty */
3996 /* non empty enums are not allowed */
3997 ps = &s->next;
3998 if (u == VT_ENUM) {
3999 long long ll = 0, pl = 0, nl = 0;
4000 CType t;
4001 t.ref = s;
4002 /* enum symbols have static storage */
4003 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4004 for(;;) {
4005 v = tok;
4006 if (v < TOK_UIDENT)
4007 expect("identifier");
4008 ss = sym_find(v);
4009 if (ss && !local_stack)
4010 tcc_error("redefinition of enumerator '%s'",
4011 get_tok_str(v, NULL));
4012 next();
4013 if (tok == '=') {
4014 next();
4015 ll = expr_const64();
4017 ss = sym_push(v, &t, VT_CONST, 0);
4018 ss->enum_val = ll;
4019 *ps = ss, ps = &ss->next;
4020 if (ll < nl)
4021 nl = ll;
4022 if (ll > pl)
4023 pl = ll;
4024 if (tok != ',')
4025 break;
4026 next();
4027 ll++;
4028 /* NOTE: we accept a trailing comma */
4029 if (tok == '}')
4030 break;
4032 skip('}');
4033 /* set integral type of the enum */
4034 t.t = VT_INT;
4035 if (nl >= 0) {
4036 if (pl != (unsigned)pl)
4037 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4038 t.t |= VT_UNSIGNED;
4039 } else if (pl != (int)pl || nl != (int)nl)
4040 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4041 s->type.t = type->t = t.t | VT_ENUM;
4042 s->c = 0;
4043 /* set type for enum members */
4044 for (ss = s->next; ss; ss = ss->next) {
4045 ll = ss->enum_val;
4046 if (ll == (int)ll) /* default is int if it fits */
4047 continue;
4048 if (t.t & VT_UNSIGNED) {
4049 ss->type.t |= VT_UNSIGNED;
4050 if (ll == (unsigned)ll)
4051 continue;
4053 ss->type.t = (ss->type.t & ~VT_BTYPE)
4054 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4056 } else {
4057 c = 0;
4058 flexible = 0;
4059 while (tok != '}') {
4060 if (!parse_btype(&btype, &ad1)) {
4061 skip(';');
4062 continue;
4064 while (1) {
4065 if (flexible)
4066 tcc_error("flexible array member '%s' not at the end of struct",
4067 get_tok_str(v, NULL));
4068 bit_size = -1;
4069 v = 0;
4070 type1 = btype;
4071 if (tok != ':') {
4072 if (tok != ';')
4073 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4074 if (v == 0) {
4075 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4076 expect("identifier");
4077 else {
4078 int v = btype.ref->v;
4079 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4080 if (tcc_state->ms_extensions == 0)
4081 expect("identifier");
4085 if (type_size(&type1, &align) < 0) {
4086 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4087 flexible = 1;
4088 else
4089 tcc_error("field '%s' has incomplete type",
4090 get_tok_str(v, NULL));
4092 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4093 (type1.t & VT_BTYPE) == VT_VOID ||
4094 (type1.t & VT_STORAGE))
4095 tcc_error("invalid type for '%s'",
4096 get_tok_str(v, NULL));
4098 if (tok == ':') {
4099 next();
4100 bit_size = expr_const();
4101 /* XXX: handle v = 0 case for messages */
4102 if (bit_size < 0)
4103 tcc_error("negative width in bit-field '%s'",
4104 get_tok_str(v, NULL));
4105 if (v && bit_size == 0)
4106 tcc_error("zero width for bit-field '%s'",
4107 get_tok_str(v, NULL));
4108 parse_attribute(&ad1);
4110 size = type_size(&type1, &align);
4111 if (bit_size >= 0) {
4112 bt = type1.t & VT_BTYPE;
4113 if (bt != VT_INT &&
4114 bt != VT_BYTE &&
4115 bt != VT_SHORT &&
4116 bt != VT_BOOL &&
4117 bt != VT_LLONG)
4118 tcc_error("bitfields must have scalar type");
4119 bsize = size * 8;
4120 if (bit_size > bsize) {
4121 tcc_error("width of '%s' exceeds its type",
4122 get_tok_str(v, NULL));
4123 } else if (bit_size == bsize
4124 && !ad.a.packed && !ad1.a.packed) {
4125 /* no need for bit fields */
4127 } else if (bit_size == 64) {
4128 tcc_error("field width 64 not implemented");
4129 } else {
4130 type1.t = (type1.t & ~VT_STRUCT_MASK)
4131 | VT_BITFIELD
4132 | (bit_size << (VT_STRUCT_SHIFT + 6));
4135 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4136 /* Remember we've seen a real field to check
4137 for placement of flexible array member. */
4138 c = 1;
4140 /* If member is a struct or bit-field, enforce
4141 placing into the struct (as anonymous). */
4142 if (v == 0 &&
4143 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4144 bit_size >= 0)) {
4145 v = anon_sym++;
4147 if (v) {
4148 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4149 ss->a = ad1.a;
4150 *ps = ss;
4151 ps = &ss->next;
4153 if (tok == ';' || tok == TOK_EOF)
4154 break;
4155 skip(',');
4157 skip(';');
4159 skip('}');
4160 parse_attribute(&ad);
4161 struct_layout(type, &ad);
4166 static void sym_to_attr(AttributeDef *ad, Sym *s)
4168 merge_symattr(&ad->a, &s->a);
4169 merge_funcattr(&ad->f, &s->f);
4172 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4173 are added to the element type, copied because it could be a typedef. */
4174 static void parse_btype_qualify(CType *type, int qualifiers)
4176 while (type->t & VT_ARRAY) {
4177 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4178 type = &type->ref->type;
4180 type->t |= qualifiers;
4183 /* return 0 if no type declaration. otherwise, return the basic type
4184 and skip it.
4186 static int parse_btype(CType *type, AttributeDef *ad)
4188 int t, u, bt, st, type_found, typespec_found, g, n;
4189 Sym *s;
4190 CType type1;
4192 memset(ad, 0, sizeof(AttributeDef));
4193 type_found = 0;
4194 typespec_found = 0;
4195 t = VT_INT;
4196 bt = st = -1;
4197 type->ref = NULL;
4199 while(1) {
4200 switch(tok) {
4201 case TOK_EXTENSION:
4202 /* currently, we really ignore extension */
4203 next();
4204 continue;
4206 /* basic types */
4207 case TOK_CHAR:
4208 u = VT_BYTE;
4209 basic_type:
4210 next();
4211 basic_type1:
4212 if (u == VT_SHORT || u == VT_LONG) {
4213 if (st != -1 || (bt != -1 && bt != VT_INT))
4214 tmbt: tcc_error("too many basic types");
4215 st = u;
4216 } else {
4217 if (bt != -1 || (st != -1 && u != VT_INT))
4218 goto tmbt;
4219 bt = u;
4221 if (u != VT_INT)
4222 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4223 typespec_found = 1;
4224 break;
4225 case TOK_VOID:
4226 u = VT_VOID;
4227 goto basic_type;
4228 case TOK_SHORT:
4229 u = VT_SHORT;
4230 goto basic_type;
4231 case TOK_INT:
4232 u = VT_INT;
4233 goto basic_type;
4234 case TOK_ALIGNAS:
4235 { int n;
4236 AttributeDef ad1;
4237 next();
4238 skip('(');
4239 memset(&ad1, 0, sizeof(AttributeDef));
4240 if (parse_btype(&type1, &ad1)) {
4241 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4242 if (ad1.a.aligned)
4243 n = 1 << (ad1.a.aligned - 1);
4244 else
4245 type_size(&type1, &n);
4246 } else {
4247 n = expr_const();
4248 if (n <= 0 || (n & (n - 1)) != 0)
4249 tcc_error("alignment must be a positive power of two");
4251 skip(')');
4252 ad->a.aligned = exact_log2p1(n);
4254 continue;
4255 case TOK_LONG:
4256 if ((t & VT_BTYPE) == VT_DOUBLE) {
4257 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4258 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4259 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4260 } else {
4261 u = VT_LONG;
4262 goto basic_type;
4264 next();
4265 break;
4266 #ifdef TCC_TARGET_ARM64
4267 case TOK_UINT128:
4268 /* GCC's __uint128_t appears in some Linux header files. Make it a
4269 synonym for long double to get the size and alignment right. */
4270 u = VT_LDOUBLE;
4271 goto basic_type;
4272 #endif
4273 case TOK_BOOL:
4274 u = VT_BOOL;
4275 goto basic_type;
4276 case TOK_FLOAT:
4277 u = VT_FLOAT;
4278 goto basic_type;
4279 case TOK_DOUBLE:
4280 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4281 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4282 } else {
4283 u = VT_DOUBLE;
4284 goto basic_type;
4286 next();
4287 break;
4288 case TOK_ENUM:
4289 struct_decl(&type1, VT_ENUM);
4290 basic_type2:
4291 u = type1.t;
4292 type->ref = type1.ref;
4293 goto basic_type1;
4294 case TOK_STRUCT:
4295 struct_decl(&type1, VT_STRUCT);
4296 goto basic_type2;
4297 case TOK_UNION:
4298 struct_decl(&type1, VT_UNION);
4299 goto basic_type2;
4301 /* type modifiers */
4302 case TOK_CONST1:
4303 case TOK_CONST2:
4304 case TOK_CONST3:
4305 type->t = t;
4306 parse_btype_qualify(type, VT_CONSTANT);
4307 t = type->t;
4308 next();
4309 break;
4310 case TOK_VOLATILE1:
4311 case TOK_VOLATILE2:
4312 case TOK_VOLATILE3:
4313 type->t = t;
4314 parse_btype_qualify(type, VT_VOLATILE);
4315 t = type->t;
4316 next();
4317 break;
4318 case TOK_SIGNED1:
4319 case TOK_SIGNED2:
4320 case TOK_SIGNED3:
4321 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4322 tcc_error("signed and unsigned modifier");
4323 t |= VT_DEFSIGN;
4324 next();
4325 typespec_found = 1;
4326 break;
4327 case TOK_REGISTER:
4328 case TOK_AUTO:
4329 case TOK_RESTRICT1:
4330 case TOK_RESTRICT2:
4331 case TOK_RESTRICT3:
4332 next();
4333 break;
4334 case TOK_UNSIGNED:
4335 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4336 tcc_error("signed and unsigned modifier");
4337 t |= VT_DEFSIGN | VT_UNSIGNED;
4338 next();
4339 typespec_found = 1;
4340 break;
4342 /* storage */
4343 case TOK_EXTERN:
4344 g = VT_EXTERN;
4345 goto storage;
4346 case TOK_STATIC:
4347 g = VT_STATIC;
4348 goto storage;
4349 case TOK_TYPEDEF:
4350 g = VT_TYPEDEF;
4351 goto storage;
4352 storage:
4353 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4354 tcc_error("multiple storage classes");
4355 t |= g;
4356 next();
4357 break;
4358 case TOK_INLINE1:
4359 case TOK_INLINE2:
4360 case TOK_INLINE3:
4361 t |= VT_INLINE;
4362 next();
4363 break;
4364 case TOK_NORETURN3:
4365 /* currently, no need to handle it because tcc does not
4366 track unused objects */
4367 next();
4368 break;
4369 /* GNUC attribute */
4370 case TOK_ATTRIBUTE1:
4371 case TOK_ATTRIBUTE2:
4372 parse_attribute(ad);
4373 if (ad->attr_mode) {
4374 u = ad->attr_mode -1;
4375 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4377 continue;
4378 /* GNUC typeof */
4379 case TOK_TYPEOF1:
4380 case TOK_TYPEOF2:
4381 case TOK_TYPEOF3:
4382 next();
4383 parse_expr_type(&type1);
4384 /* remove all storage modifiers except typedef */
4385 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4386 if (type1.ref)
4387 sym_to_attr(ad, type1.ref);
4388 goto basic_type2;
4389 default:
4390 if (typespec_found)
4391 goto the_end;
4392 s = sym_find(tok);
4393 if (!s || !(s->type.t & VT_TYPEDEF))
4394 goto the_end;
4396 n = tok, next();
4397 if (tok == ':' && !in_generic) {
4398 /* ignore if it's a label */
4399 unget_tok(n);
4400 goto the_end;
4403 t &= ~(VT_BTYPE|VT_LONG);
4404 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4405 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4406 type->ref = s->type.ref;
4407 if (t)
4408 parse_btype_qualify(type, t);
4409 t = type->t;
4410 /* get attributes from typedef */
4411 sym_to_attr(ad, s);
4412 typespec_found = 1;
4413 st = bt = -2;
4414 break;
4416 type_found = 1;
4418 the_end:
4419 if (tcc_state->char_is_unsigned) {
4420 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4421 t |= VT_UNSIGNED;
4423 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4424 bt = t & (VT_BTYPE|VT_LONG);
4425 if (bt == VT_LONG)
4426 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4427 #ifdef TCC_TARGET_PE
4428 if (bt == VT_LDOUBLE)
4429 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4430 #endif
4431 type->t = t;
4432 return type_found;
4435 /* convert a function parameter type (array to pointer and function to
4436 function pointer) */
4437 static inline void convert_parameter_type(CType *pt)
4439 /* remove const and volatile qualifiers (XXX: const could be used
4440 to indicate a const function parameter */
4441 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4442 /* array must be transformed to pointer according to ANSI C */
4443 pt->t &= ~VT_ARRAY;
4444 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4445 mk_pointer(pt);
4449 ST_FUNC void parse_asm_str(CString *astr)
4451 skip('(');
4452 parse_mult_str(astr, "string constant");
4455 /* Parse an asm label and return the token */
4456 static int asm_label_instr(void)
4458 int v;
4459 CString astr;
4461 next();
4462 parse_asm_str(&astr);
4463 skip(')');
4464 #ifdef ASM_DEBUG
4465 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4466 #endif
4467 v = tok_alloc(astr.data, astr.size - 1)->tok;
4468 cstr_free(&astr);
4469 return v;
4472 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4474 int n, l, t1, arg_size, align, unused_align;
4475 Sym **plast, *s, *first;
4476 AttributeDef ad1;
4477 CType pt;
4479 if (tok == '(') {
4480 /* function type, or recursive declarator (return if so) */
4481 next();
4482 if (td && !(td & TYPE_ABSTRACT))
4483 return 0;
4484 if (tok == ')')
4485 l = 0;
4486 else if (parse_btype(&pt, &ad1))
4487 l = FUNC_NEW;
4488 else if (td) {
4489 merge_attr (ad, &ad1);
4490 return 0;
4491 } else
4492 l = FUNC_OLD;
4493 first = NULL;
4494 plast = &first;
4495 arg_size = 0;
4496 if (l) {
4497 for(;;) {
4498 /* read param name and compute offset */
4499 if (l != FUNC_OLD) {
4500 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4501 break;
4502 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4503 if ((pt.t & VT_BTYPE) == VT_VOID)
4504 tcc_error("parameter declared as void");
4505 } else {
4506 n = tok;
4507 if (n < TOK_UIDENT)
4508 expect("identifier");
4509 pt.t = VT_VOID; /* invalid type */
4510 pt.ref = NULL;
4511 next();
4513 convert_parameter_type(&pt);
4514 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4515 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4516 *plast = s;
4517 plast = &s->next;
4518 if (tok == ')')
4519 break;
4520 skip(',');
4521 if (l == FUNC_NEW && tok == TOK_DOTS) {
4522 l = FUNC_ELLIPSIS;
4523 next();
4524 break;
4526 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4527 tcc_error("invalid type");
4529 } else
4530 /* if no parameters, then old type prototype */
4531 l = FUNC_OLD;
4532 skip(')');
4533 /* NOTE: const is ignored in returned type as it has a special
4534 meaning in gcc / C++ */
4535 type->t &= ~VT_CONSTANT;
4536 /* some ancient pre-K&R C allows a function to return an array
4537 and the array brackets to be put after the arguments, such
4538 that "int c()[]" means something like "int[] c()" */
4539 if (tok == '[') {
4540 next();
4541 skip(']'); /* only handle simple "[]" */
4542 mk_pointer(type);
4544 /* we push a anonymous symbol which will contain the function prototype */
4545 ad->f.func_args = arg_size;
4546 ad->f.func_type = l;
4547 s = sym_push(SYM_FIELD, type, 0, 0);
4548 s->a = ad->a;
4549 s->f = ad->f;
4550 s->next = first;
4551 type->t = VT_FUNC;
4552 type->ref = s;
4553 } else if (tok == '[') {
4554 int saved_nocode_wanted = nocode_wanted;
4555 /* array definition */
4556 next();
4557 while (1) {
4558 /* XXX The optional type-quals and static should only be accepted
4559 in parameter decls. The '*' as well, and then even only
4560 in prototypes (not function defs). */
4561 switch (tok) {
4562 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4563 case TOK_CONST1:
4564 case TOK_VOLATILE1:
4565 case TOK_STATIC:
4566 case '*':
4567 next();
4568 continue;
4569 default:
4570 break;
4572 break;
4574 n = -1;
4575 t1 = 0;
4576 if (tok != ']') {
4577 if (!local_stack || (storage & VT_STATIC))
4578 vpushi(expr_const());
4579 else {
4580 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4581 length must always be evaluated, even under nocode_wanted,
4582 so that its size slot is initialized (e.g. under sizeof
4583 or typeof). */
4584 nocode_wanted = 0;
4585 gexpr();
4587 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4588 n = vtop->c.i;
4589 if (n < 0)
4590 tcc_error("invalid array size");
4591 } else {
4592 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4593 tcc_error("size of variable length array should be an integer");
4594 n = 0;
4595 t1 = VT_VLA;
4598 skip(']');
4599 /* parse next post type */
4600 post_type(type, ad, storage, 0);
4602 if ((type->t & VT_BTYPE) == VT_FUNC)
4603 tcc_error("declaration of an array of functions");
4604 if ((type->t & VT_BTYPE) == VT_VOID
4605 || type_size(type, &unused_align) < 0)
4606 tcc_error("declaration of an array of incomplete type elements");
4608 t1 |= type->t & VT_VLA;
4610 if (t1 & VT_VLA) {
4611 if (n < 0)
4612 tcc_error("need explicit inner array size in VLAs");
4613 loc -= type_size(&int_type, &align);
4614 loc &= -align;
4615 n = loc;
4617 vla_runtime_type_size(type, &align);
4618 gen_op('*');
4619 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4620 vswap();
4621 vstore();
4623 if (n != -1)
4624 vpop();
4625 nocode_wanted = saved_nocode_wanted;
4627 /* we push an anonymous symbol which will contain the array
4628 element type */
4629 s = sym_push(SYM_FIELD, type, 0, n);
4630 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4631 type->ref = s;
4633 return 1;
4636 /* Parse a type declarator (except basic type), and return the type
4637 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4638 expected. 'type' should contain the basic type. 'ad' is the
4639 attribute definition of the basic type. It can be modified by
4640 type_decl(). If this (possibly abstract) declarator is a pointer chain
4641 it returns the innermost pointed to type (equals *type, but is a different
4642 pointer), otherwise returns type itself, that's used for recursive calls. */
4643 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4645 CType *post, *ret;
4646 int qualifiers, storage;
4648 /* recursive type, remove storage bits first, apply them later again */
4649 storage = type->t & VT_STORAGE;
4650 type->t &= ~VT_STORAGE;
4651 post = ret = type;
4653 while (tok == '*') {
4654 qualifiers = 0;
4655 redo:
4656 next();
4657 switch(tok) {
4658 case TOK_CONST1:
4659 case TOK_CONST2:
4660 case TOK_CONST3:
4661 qualifiers |= VT_CONSTANT;
4662 goto redo;
4663 case TOK_VOLATILE1:
4664 case TOK_VOLATILE2:
4665 case TOK_VOLATILE3:
4666 qualifiers |= VT_VOLATILE;
4667 goto redo;
4668 case TOK_RESTRICT1:
4669 case TOK_RESTRICT2:
4670 case TOK_RESTRICT3:
4671 goto redo;
4672 /* XXX: clarify attribute handling */
4673 case TOK_ATTRIBUTE1:
4674 case TOK_ATTRIBUTE2:
4675 parse_attribute(ad);
4676 break;
4678 mk_pointer(type);
4679 type->t |= qualifiers;
4680 if (ret == type)
4681 /* innermost pointed to type is the one for the first derivation */
4682 ret = pointed_type(type);
4685 if (tok == '(') {
4686 /* This is possibly a parameter type list for abstract declarators
4687 ('int ()'), use post_type for testing this. */
4688 if (!post_type(type, ad, 0, td)) {
4689 /* It's not, so it's a nested declarator, and the post operations
4690 apply to the innermost pointed to type (if any). */
4691 /* XXX: this is not correct to modify 'ad' at this point, but
4692 the syntax is not clear */
4693 parse_attribute(ad);
4694 post = type_decl(type, ad, v, td);
4695 skip(')');
4696 } else
4697 goto abstract;
4698 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4699 /* type identifier */
4700 *v = tok;
4701 next();
4702 } else {
4703 abstract:
4704 if (!(td & TYPE_ABSTRACT))
4705 expect("identifier");
4706 *v = 0;
4708 post_type(post, ad, storage, 0);
4709 parse_attribute(ad);
4710 type->t |= storage;
4711 return ret;
4714 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4715 ST_FUNC int lvalue_type(int t)
4717 int bt, r;
4718 r = VT_LVAL;
4719 bt = t & VT_BTYPE;
4720 if (bt == VT_BYTE || bt == VT_BOOL)
4721 r |= VT_LVAL_BYTE;
4722 else if (bt == VT_SHORT)
4723 r |= VT_LVAL_SHORT;
4724 else
4725 return r;
4726 if (t & VT_UNSIGNED)
4727 r |= VT_LVAL_UNSIGNED;
4728 return r;
4731 /* indirection with full error checking and bound check */
4732 ST_FUNC void indir(void)
4734 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4735 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4736 return;
4737 expect("pointer");
4739 if (vtop->r & VT_LVAL)
4740 gv(RC_INT);
4741 vtop->type = *pointed_type(&vtop->type);
4742 /* Arrays and functions are never lvalues */
4743 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4744 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4745 vtop->r |= lvalue_type(vtop->type.t);
4746 /* if bound checking, the referenced pointer must be checked */
4747 #ifdef CONFIG_TCC_BCHECK
4748 if (tcc_state->do_bounds_check)
4749 vtop->r |= VT_MUSTBOUND;
4750 #endif
4754 /* pass a parameter to a function and do type checking and casting */
4755 static void gfunc_param_typed(Sym *func, Sym *arg)
4757 int func_type;
4758 CType type;
4760 func_type = func->f.func_type;
4761 if (func_type == FUNC_OLD ||
4762 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4763 /* default casting : only need to convert float to double */
4764 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4765 gen_cast_s(VT_DOUBLE);
4766 } else if (vtop->type.t & VT_BITFIELD) {
4767 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4768 type.ref = vtop->type.ref;
4769 gen_cast(&type);
4771 } else if (arg == NULL) {
4772 tcc_error("too many arguments to function");
4773 } else {
4774 type = arg->type;
4775 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4776 gen_assign_cast(&type);
4780 /* parse an expression and return its type without any side effect. */
4781 static void expr_type(CType *type, void (*expr_fn)(void))
4783 nocode_wanted++;
4784 expr_fn();
4785 *type = vtop->type;
4786 vpop();
4787 nocode_wanted--;
4790 /* parse an expression of the form '(type)' or '(expr)' and return its
4791 type */
4792 static void parse_expr_type(CType *type)
4794 int n;
4795 AttributeDef ad;
4797 skip('(');
4798 if (parse_btype(type, &ad)) {
4799 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4800 } else {
4801 expr_type(type, gexpr);
4803 skip(')');
4806 static void parse_type(CType *type)
4808 AttributeDef ad;
4809 int n;
4811 if (!parse_btype(type, &ad)) {
4812 expect("type");
4814 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4817 static void parse_builtin_params(int nc, const char *args)
4819 char c, sep = '(';
4820 CType t;
4821 if (nc)
4822 nocode_wanted++;
4823 next();
4824 while ((c = *args++)) {
4825 skip(sep);
4826 sep = ',';
4827 switch (c) {
4828 case 'e': expr_eq(); continue;
4829 case 't': parse_type(&t); vpush(&t); continue;
4830 default: tcc_error("internal error"); break;
4833 skip(')');
4834 if (nc)
4835 nocode_wanted--;
4838 ST_FUNC void unary(void)
4840 int n, t, align, size, r, sizeof_caller;
4841 CType type;
4842 Sym *s;
4843 AttributeDef ad;
4845 sizeof_caller = in_sizeof;
4846 in_sizeof = 0;
4847 type.ref = NULL;
4848 /* XXX: GCC 2.95.3 does not generate a table although it should be
4849 better here */
4850 tok_next:
4851 switch(tok) {
4852 case TOK_EXTENSION:
4853 next();
4854 goto tok_next;
4855 case TOK_LCHAR:
4856 #ifdef TCC_TARGET_PE
4857 t = VT_SHORT|VT_UNSIGNED;
4858 goto push_tokc;
4859 #endif
4860 case TOK_CINT:
4861 case TOK_CCHAR:
4862 t = VT_INT;
4863 push_tokc:
4864 type.t = t;
4865 vsetc(&type, VT_CONST, &tokc);
4866 next();
4867 break;
4868 case TOK_CUINT:
4869 t = VT_INT | VT_UNSIGNED;
4870 goto push_tokc;
4871 case TOK_CLLONG:
4872 t = VT_LLONG;
4873 goto push_tokc;
4874 case TOK_CULLONG:
4875 t = VT_LLONG | VT_UNSIGNED;
4876 goto push_tokc;
4877 case TOK_CFLOAT:
4878 t = VT_FLOAT;
4879 goto push_tokc;
4880 case TOK_CDOUBLE:
4881 t = VT_DOUBLE;
4882 goto push_tokc;
4883 case TOK_CLDOUBLE:
4884 t = VT_LDOUBLE;
4885 goto push_tokc;
4886 case TOK_CLONG:
4887 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4888 goto push_tokc;
4889 case TOK_CULONG:
4890 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4891 goto push_tokc;
4892 case TOK___FUNCTION__:
4893 if (!gnu_ext)
4894 goto tok_identifier;
4895 /* fall thru */
4896 case TOK___FUNC__:
4898 void *ptr;
4899 int len;
4900 /* special function name identifier */
4901 len = strlen(funcname) + 1;
4902 /* generate char[len] type */
4903 type.t = VT_BYTE;
4904 mk_pointer(&type);
4905 type.t |= VT_ARRAY;
4906 type.ref->c = len;
4907 vpush_ref(&type, data_section, data_section->data_offset, len);
4908 if (!NODATA_WANTED) {
4909 ptr = section_ptr_add(data_section, len);
4910 memcpy(ptr, funcname, len);
4912 next();
4914 break;
4915 case TOK_LSTR:
4916 #ifdef TCC_TARGET_PE
4917 t = VT_SHORT | VT_UNSIGNED;
4918 #else
4919 t = VT_INT;
4920 #endif
4921 goto str_init;
4922 case TOK_STR:
4923 /* string parsing */
4924 t = VT_BYTE;
4925 if (tcc_state->char_is_unsigned)
4926 t = VT_BYTE | VT_UNSIGNED;
4927 str_init:
4928 if (tcc_state->warn_write_strings)
4929 t |= VT_CONSTANT;
4930 type.t = t;
4931 mk_pointer(&type);
4932 type.t |= VT_ARRAY;
4933 memset(&ad, 0, sizeof(AttributeDef));
4934 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4935 break;
4936 case '(':
4937 next();
4938 /* cast ? */
4939 if (parse_btype(&type, &ad)) {
4940 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4941 skip(')');
4942 /* check ISOC99 compound literal */
4943 if (tok == '{') {
4944 /* data is allocated locally by default */
4945 if (global_expr)
4946 r = VT_CONST;
4947 else
4948 r = VT_LOCAL;
4949 /* all except arrays are lvalues */
4950 if (!(type.t & VT_ARRAY))
4951 r |= lvalue_type(type.t);
4952 memset(&ad, 0, sizeof(AttributeDef));
4953 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4954 } else {
4955 if (sizeof_caller) {
4956 vpush(&type);
4957 return;
4959 unary();
4960 gen_cast(&type);
4962 } else if (tok == '{') {
4963 int saved_nocode_wanted = nocode_wanted;
4964 if (const_wanted)
4965 tcc_error("expected constant");
4966 /* save all registers */
4967 save_regs(0);
4968 /* statement expression : we do not accept break/continue
4969 inside as GCC does. We do retain the nocode_wanted state,
4970 as statement expressions can't ever be entered from the
4971 outside, so any reactivation of code emission (from labels
4972 or loop heads) can be disabled again after the end of it. */
4973 block(1);
4974 nocode_wanted = saved_nocode_wanted;
4975 skip(')');
4976 } else {
4977 gexpr();
4978 skip(')');
4980 break;
4981 case '*':
4982 next();
4983 unary();
4984 indir();
4985 break;
4986 case '&':
4987 next();
4988 unary();
4989 /* functions names must be treated as function pointers,
4990 except for unary '&' and sizeof. Since we consider that
4991 functions are not lvalues, we only have to handle it
4992 there and in function calls. */
4993 /* arrays can also be used although they are not lvalues */
4994 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4995 !(vtop->type.t & VT_ARRAY))
4996 test_lvalue();
4997 mk_pointer(&vtop->type);
4998 gaddrof();
4999 break;
5000 case '!':
5001 next();
5002 unary();
5003 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5004 gen_cast_s(VT_BOOL);
5005 vtop->c.i = !vtop->c.i;
5006 } else if (vtop->r == VT_CMP) {
5007 vtop->cmp_op ^= 1;
5008 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5009 } else {
5010 vpushi(0);
5011 gen_op(TOK_EQ);
5013 break;
5014 case '~':
5015 next();
5016 unary();
5017 vpushi(-1);
5018 gen_op('^');
5019 break;
5020 case '+':
5021 next();
5022 unary();
5023 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5024 tcc_error("pointer not accepted for unary plus");
5025 /* In order to force cast, we add zero, except for floating point
5026 where we really need an noop (otherwise -0.0 will be transformed
5027 into +0.0). */
5028 if (!is_float(vtop->type.t)) {
5029 vpushi(0);
5030 gen_op('+');
5032 break;
5033 case TOK_SIZEOF:
5034 case TOK_ALIGNOF1:
5035 case TOK_ALIGNOF2:
5036 case TOK_ALIGNOF3:
5037 t = tok;
5038 next();
5039 in_sizeof++;
5040 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5041 s = NULL;
5042 if (vtop[1].r & VT_SYM)
5043 s = vtop[1].sym; /* hack: accessing previous vtop */
5044 size = type_size(&type, &align);
5045 if (s && s->a.aligned)
5046 align = 1 << (s->a.aligned - 1);
5047 if (t == TOK_SIZEOF) {
5048 if (!(type.t & VT_VLA)) {
5049 if (size < 0)
5050 tcc_error("sizeof applied to an incomplete type");
5051 vpushs(size);
5052 } else {
5053 vla_runtime_type_size(&type, &align);
5055 } else {
5056 vpushs(align);
5058 vtop->type.t |= VT_UNSIGNED;
5059 break;
5061 case TOK_builtin_expect:
5062 /* __builtin_expect is a no-op for now */
5063 parse_builtin_params(0, "ee");
5064 vpop();
5065 break;
5066 case TOK_builtin_types_compatible_p:
5067 parse_builtin_params(0, "tt");
5068 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5069 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5070 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5071 vtop -= 2;
5072 vpushi(n);
5073 break;
5074 case TOK_builtin_choose_expr:
5076 int64_t c;
5077 next();
5078 skip('(');
5079 c = expr_const64();
5080 skip(',');
5081 if (!c) {
5082 nocode_wanted++;
5084 expr_eq();
5085 if (!c) {
5086 vpop();
5087 nocode_wanted--;
5089 skip(',');
5090 if (c) {
5091 nocode_wanted++;
5093 expr_eq();
5094 if (c) {
5095 vpop();
5096 nocode_wanted--;
5098 skip(')');
5100 break;
5101 case TOK_builtin_constant_p:
5102 parse_builtin_params(1, "e");
5103 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5104 vtop--;
5105 vpushi(n);
5106 break;
5107 case TOK_builtin_frame_address:
5108 case TOK_builtin_return_address:
5110 int tok1 = tok;
5111 int level;
5112 next();
5113 skip('(');
5114 if (tok != TOK_CINT) {
5115 tcc_error("%s only takes positive integers",
5116 tok1 == TOK_builtin_return_address ?
5117 "__builtin_return_address" :
5118 "__builtin_frame_address");
5120 level = (uint32_t)tokc.i;
5121 next();
5122 skip(')');
5123 type.t = VT_VOID;
5124 mk_pointer(&type);
5125 vset(&type, VT_LOCAL, 0); /* local frame */
5126 while (level--) {
5127 mk_pointer(&vtop->type);
5128 indir(); /* -> parent frame */
5130 if (tok1 == TOK_builtin_return_address) {
5131 // assume return address is just above frame pointer on stack
5132 vpushi(PTR_SIZE);
5133 gen_op('+');
5134 mk_pointer(&vtop->type);
5135 indir();
5138 break;
5139 #ifdef TCC_TARGET_X86_64
5140 #ifdef TCC_TARGET_PE
5141 case TOK_builtin_va_start:
5142 parse_builtin_params(0, "ee");
5143 r = vtop->r & VT_VALMASK;
5144 if (r == VT_LLOCAL)
5145 r = VT_LOCAL;
5146 if (r != VT_LOCAL)
5147 tcc_error("__builtin_va_start expects a local variable");
5148 vtop->r = r;
5149 vtop->type = char_pointer_type;
5150 vtop->c.i += 8;
5151 vstore();
5152 break;
5153 #else
5154 case TOK_builtin_va_arg_types:
5155 parse_builtin_params(0, "t");
5156 vpushi(classify_x86_64_va_arg(&vtop->type));
5157 vswap();
5158 vpop();
5159 break;
5160 #endif
5161 #endif
5163 #ifdef TCC_TARGET_ARM64
5164 case TOK___va_start: {
5165 parse_builtin_params(0, "ee");
5166 //xx check types
5167 gen_va_start();
5168 vpushi(0);
5169 vtop->type.t = VT_VOID;
5170 break;
5172 case TOK___va_arg: {
5173 parse_builtin_params(0, "et");
5174 type = vtop->type;
5175 vpop();
5176 //xx check types
5177 gen_va_arg(&type);
5178 vtop->type = type;
5179 break;
5181 case TOK___arm64_clear_cache: {
5182 parse_builtin_params(0, "ee");
5183 gen_clear_cache();
5184 vpushi(0);
5185 vtop->type.t = VT_VOID;
5186 break;
5188 #endif
5189 /* pre operations */
5190 case TOK_INC:
5191 case TOK_DEC:
5192 t = tok;
5193 next();
5194 unary();
5195 inc(0, t);
5196 break;
5197 case '-':
5198 next();
5199 unary();
5200 t = vtop->type.t & VT_BTYPE;
5201 if (is_float(t)) {
5202 /* In IEEE negate(x) isn't subtract(0,x), but rather
5203 subtract(-0, x). */
5204 vpush(&vtop->type);
5205 if (t == VT_FLOAT)
5206 vtop->c.f = -1.0 * 0.0;
5207 else if (t == VT_DOUBLE)
5208 vtop->c.d = -1.0 * 0.0;
5209 else
5210 vtop->c.ld = -1.0 * 0.0;
5211 } else
5212 vpushi(0);
5213 vswap();
5214 gen_op('-');
5215 break;
5216 case TOK_LAND:
5217 if (!gnu_ext)
5218 goto tok_identifier;
5219 next();
5220 /* allow to take the address of a label */
5221 if (tok < TOK_UIDENT)
5222 expect("label identifier");
5223 s = label_find(tok);
5224 if (!s) {
5225 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5226 } else {
5227 if (s->r == LABEL_DECLARED)
5228 s->r = LABEL_FORWARD;
5230 if (!s->type.t) {
5231 s->type.t = VT_VOID;
5232 mk_pointer(&s->type);
5233 s->type.t |= VT_STATIC;
5235 vpushsym(&s->type, s);
5236 next();
5237 break;
5239 case TOK_GENERIC:
5241 CType controlling_type;
5242 int has_default = 0;
5243 int has_match = 0;
5244 int learn = 0;
5245 TokenString *str = NULL;
5246 int saved_const_wanted = const_wanted;
5248 next();
5249 skip('(');
5250 const_wanted = 0;
5251 expr_type(&controlling_type, expr_eq);
5252 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5253 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5254 mk_pointer(&controlling_type);
5255 const_wanted = saved_const_wanted;
5256 for (;;) {
5257 learn = 0;
5258 skip(',');
5259 if (tok == TOK_DEFAULT) {
5260 if (has_default)
5261 tcc_error("too many 'default'");
5262 has_default = 1;
5263 if (!has_match)
5264 learn = 1;
5265 next();
5266 } else {
5267 AttributeDef ad_tmp;
5268 int itmp;
5269 CType cur_type;
5271 in_generic++;
5272 parse_btype(&cur_type, &ad_tmp);
5273 in_generic--;
5275 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5276 if (compare_types(&controlling_type, &cur_type, 0)) {
5277 if (has_match) {
5278 tcc_error("type match twice");
5280 has_match = 1;
5281 learn = 1;
5284 skip(':');
5285 if (learn) {
5286 if (str)
5287 tok_str_free(str);
5288 skip_or_save_block(&str);
5289 } else {
5290 skip_or_save_block(NULL);
5292 if (tok == ')')
5293 break;
5295 if (!str) {
5296 char buf[60];
5297 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5298 tcc_error("type '%s' does not match any association", buf);
5300 begin_macro(str, 1);
5301 next();
5302 expr_eq();
5303 if (tok != TOK_EOF)
5304 expect(",");
5305 end_macro();
5306 next();
5307 break;
5309 // special qnan , snan and infinity values
5310 case TOK___NAN__:
5311 n = 0x7fc00000;
5312 special_math_val:
5313 vpushi(n);
5314 vtop->type.t = VT_FLOAT;
5315 next();
5316 break;
5317 case TOK___SNAN__:
5318 n = 0x7f800001;
5319 goto special_math_val;
5320 case TOK___INF__:
5321 n = 0x7f800000;
5322 goto special_math_val;
5324 default:
5325 tok_identifier:
5326 t = tok;
5327 next();
5328 if (t < TOK_UIDENT)
5329 expect("identifier");
5330 s = sym_find(t);
5331 if (!s || IS_ASM_SYM(s)) {
5332 const char *name = get_tok_str(t, NULL);
5333 if (tok != '(')
5334 tcc_error("'%s' undeclared", name);
5335 /* for simple function calls, we tolerate undeclared
5336 external reference to int() function */
5337 if (tcc_state->warn_implicit_function_declaration
5338 #ifdef TCC_TARGET_PE
5339 /* people must be warned about using undeclared WINAPI functions
5340 (which usually start with uppercase letter) */
5341 || (name[0] >= 'A' && name[0] <= 'Z')
5342 #endif
5344 tcc_warning("implicit declaration of function '%s'", name);
5345 s = external_global_sym(t, &func_old_type);
5348 r = s->r;
5349 /* A symbol that has a register is a local register variable,
5350 which starts out as VT_LOCAL value. */
5351 if ((r & VT_VALMASK) < VT_CONST)
5352 r = (r & ~VT_VALMASK) | VT_LOCAL;
5354 vset(&s->type, r, s->c);
5355 /* Point to s as backpointer (even without r&VT_SYM).
5356 Will be used by at least the x86 inline asm parser for
5357 regvars. */
5358 vtop->sym = s;
5360 if (r & VT_SYM) {
5361 vtop->c.i = 0;
5362 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5363 vtop->c.i = s->enum_val;
5365 break;
5368 /* post operations */
5369 while (1) {
5370 if (tok == TOK_INC || tok == TOK_DEC) {
5371 inc(1, tok);
5372 next();
5373 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5374 int qualifiers, cumofs = 0;
5375 /* field */
5376 if (tok == TOK_ARROW)
5377 indir();
5378 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5379 test_lvalue();
5380 gaddrof();
5381 /* expect pointer on structure */
5382 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5383 expect("struct or union");
5384 if (tok == TOK_CDOUBLE)
5385 expect("field name");
5386 next();
5387 if (tok == TOK_CINT || tok == TOK_CUINT)
5388 expect("field name");
5389 s = find_field(&vtop->type, tok, &cumofs);
5390 if (!s)
5391 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5392 /* add field offset to pointer */
5393 vtop->type = char_pointer_type; /* change type to 'char *' */
5394 vpushi(cumofs + s->c);
5395 gen_op('+');
5396 /* change type to field type, and set to lvalue */
5397 vtop->type = s->type;
5398 vtop->type.t |= qualifiers;
5399 /* an array is never an lvalue */
5400 if (!(vtop->type.t & VT_ARRAY)) {
5401 vtop->r |= lvalue_type(vtop->type.t);
5402 #ifdef CONFIG_TCC_BCHECK
5403 /* if bound checking, the referenced pointer must be checked */
5404 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5405 vtop->r |= VT_MUSTBOUND;
5406 #endif
5408 next();
5409 } else if (tok == '[') {
5410 next();
5411 gexpr();
5412 gen_op('+');
5413 indir();
5414 skip(']');
5415 } else if (tok == '(') {
5416 SValue ret;
5417 Sym *sa;
5418 int nb_args, ret_nregs, ret_align, regsize, variadic;
5420 /* function call */
5421 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5422 /* pointer test (no array accepted) */
5423 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5424 vtop->type = *pointed_type(&vtop->type);
5425 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5426 goto error_func;
5427 } else {
5428 error_func:
5429 expect("function pointer");
5431 } else {
5432 vtop->r &= ~VT_LVAL; /* no lvalue */
5434 /* get return type */
5435 s = vtop->type.ref;
5436 next();
5437 sa = s->next; /* first parameter */
5438 nb_args = regsize = 0;
5439 ret.r2 = VT_CONST;
5440 /* compute first implicit argument if a structure is returned */
5441 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5442 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5443 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5444 &ret_align, &regsize);
5445 if (!ret_nregs) {
5446 /* get some space for the returned structure */
5447 size = type_size(&s->type, &align);
5448 #ifdef TCC_TARGET_ARM64
5449 /* On arm64, a small struct is return in registers.
5450 It is much easier to write it to memory if we know
5451 that we are allowed to write some extra bytes, so
5452 round the allocated space up to a power of 2: */
5453 if (size < 16)
5454 while (size & (size - 1))
5455 size = (size | (size - 1)) + 1;
5456 #endif
5457 loc = (loc - size) & -align;
5458 ret.type = s->type;
5459 ret.r = VT_LOCAL | VT_LVAL;
5460 /* pass it as 'int' to avoid structure arg passing
5461 problems */
5462 vseti(VT_LOCAL, loc);
5463 ret.c = vtop->c;
5464 nb_args++;
5466 } else {
5467 ret_nregs = 1;
5468 ret.type = s->type;
5471 if (ret_nregs) {
5472 /* return in register */
5473 if (is_float(ret.type.t)) {
5474 ret.r = reg_fret(ret.type.t);
5475 #ifdef TCC_TARGET_X86_64
5476 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5477 ret.r2 = REG_QRET;
5478 #endif
5479 } else {
5480 #ifndef TCC_TARGET_ARM64
5481 #ifndef TCC_TARGET_RISCV64
5482 #ifdef TCC_TARGET_X86_64
5483 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5484 #else
5485 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5486 #endif
5487 ret.r2 = REG_LRET;
5488 #endif
5489 #endif
5490 ret.r = REG_IRET;
5492 ret.c.i = 0;
5494 if (tok != ')') {
5495 for(;;) {
5496 expr_eq();
5497 gfunc_param_typed(s, sa);
5498 nb_args++;
5499 if (sa)
5500 sa = sa->next;
5501 if (tok == ')')
5502 break;
5503 skip(',');
5506 if (sa)
5507 tcc_error("too few arguments to function");
5508 skip(')');
5509 gfunc_call(nb_args);
5511 /* return value */
5512 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5513 vsetc(&ret.type, r, &ret.c);
5514 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5517 /* handle packed struct return */
5518 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5519 int addr, offset;
5521 size = type_size(&s->type, &align);
5522 /* We're writing whole regs often, make sure there's enough
5523 space. Assume register size is power of 2. */
5524 if (regsize > align)
5525 align = regsize;
5526 loc = (loc - size) & -align;
5527 addr = loc;
5528 offset = 0;
5529 for (;;) {
5530 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5531 vswap();
5532 vstore();
5533 vtop--;
5534 if (--ret_nregs == 0)
5535 break;
5536 offset += regsize;
5538 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5540 if (s->f.func_noreturn)
5541 CODE_OFF();
5542 } else {
5543 break;
5548 ST_FUNC void expr_prod(void)
5550 int t;
5552 unary();
5553 while (tok == '*' || tok == '/' || tok == '%') {
5554 t = tok;
5555 next();
5556 unary();
5557 gen_op(t);
5561 ST_FUNC void expr_sum(void)
5563 int t;
5565 expr_prod();
5566 while (tok == '+' || tok == '-') {
5567 t = tok;
5568 next();
5569 expr_prod();
5570 gen_op(t);
5574 static void expr_shift(void)
5576 int t;
5578 expr_sum();
5579 while (tok == TOK_SHL || tok == TOK_SAR) {
5580 t = tok;
5581 next();
5582 expr_sum();
5583 gen_op(t);
5587 static void expr_cmp(void)
5589 int t;
5591 expr_shift();
5592 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5593 tok == TOK_ULT || tok == TOK_UGE) {
5594 t = tok;
5595 next();
5596 expr_shift();
5597 gen_op(t);
5601 static void expr_cmpeq(void)
5603 int t;
5605 expr_cmp();
5606 while (tok == TOK_EQ || tok == TOK_NE) {
5607 t = tok;
5608 next();
5609 expr_cmp();
5610 gen_op(t);
5614 static void expr_and(void)
5616 expr_cmpeq();
5617 while (tok == '&') {
5618 next();
5619 expr_cmpeq();
5620 gen_op('&');
5624 static void expr_xor(void)
5626 expr_and();
5627 while (tok == '^') {
5628 next();
5629 expr_and();
5630 gen_op('^');
5634 static void expr_or(void)
5636 expr_xor();
5637 while (tok == '|') {
5638 next();
5639 expr_xor();
5640 gen_op('|');
5644 static int condition_3way(void);
5646 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5648 int t = 0, cc = 1, f = 0, c;
5649 for(;;) {
5650 c = f ? i : condition_3way();
5651 if (c < 0) {
5652 save_regs(1), cc = 0;
5653 } else if (c != i) {
5654 nocode_wanted++, f = 1;
5656 if (tok != e_op) {
5657 if (cc || f) {
5658 vpop();
5659 vpushi(i ^ f);
5660 gsym(t);
5661 nocode_wanted -= f;
5662 } else {
5663 gvtst_set(i, t);
5665 break;
5667 if (c < 0)
5668 t = gvtst(i, t);
5669 else
5670 vpop();
5671 next();
5672 e_fn();
5676 static void expr_land(void)
5678 expr_or();
5679 if (tok == TOK_LAND)
5680 expr_landor(expr_or, TOK_LAND, 1);
5683 static void expr_lor(void)
5685 expr_land();
5686 if (tok == TOK_LOR)
5687 expr_landor(expr_land, TOK_LOR, 0);
5690 /* Assuming vtop is a value used in a conditional context
5691 (i.e. compared with zero) return 0 if it's false, 1 if
5692 true and -1 if it can't be statically determined. */
5693 static int condition_3way(void)
5695 int c = -1;
5696 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5697 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5698 vdup();
5699 gen_cast_s(VT_BOOL);
5700 c = vtop->c.i;
5701 vpop();
5703 return c;
5706 static int is_cond_bool(SValue *sv)
5708 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5709 && (sv->type.t & VT_BTYPE) == VT_INT)
5710 return (unsigned)sv->c.i < 2;
5711 if (sv->r == VT_CMP)
5712 return 1;
5713 return 0;
5716 static void expr_cond(void)
5718 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5719 SValue sv;
5720 CType type, type1, type2;
5721 int ncw_prev;
5723 expr_lor();
5724 if (tok == '?') {
5725 next();
5726 c = condition_3way();
5727 g = (tok == ':' && gnu_ext);
5728 tt = 0;
5729 if (!g) {
5730 if (c < 0) {
5731 save_regs(1);
5732 tt = gvtst(1, 0);
5733 } else {
5734 vpop();
5736 } else if (c < 0) {
5737 /* needed to avoid having different registers saved in
5738 each branch */
5739 rc = RC_INT;
5740 if (is_float(vtop->type.t)) {
5741 rc = RC_FLOAT;
5742 #ifdef TCC_TARGET_X86_64
5743 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5744 rc = RC_ST0;
5746 #endif
5748 gv(rc);
5749 save_regs(1);
5750 gv_dup();
5751 tt = gvtst(0, 0);
5754 ncw_prev = nocode_wanted;
5755 if (1) {
5756 if (c == 0)
5757 nocode_wanted++;
5758 if (!g)
5759 gexpr();
5761 if (c < 0 && vtop->r == VT_CMP) {
5762 t1 = gvtst(0, 0);
5763 vpushi(0);
5764 gvtst_set(0, t1);
5767 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5768 mk_pointer(&vtop->type);
5769 type1 = vtop->type;
5770 sv = *vtop; /* save value to handle it later */
5771 vtop--; /* no vpop so that FP stack is not flushed */
5773 if (g) {
5774 u = tt;
5775 } else if (c < 0) {
5776 u = gjmp(0);
5777 gsym(tt);
5778 } else
5779 u = 0;
5781 nocode_wanted = ncw_prev;
5782 if (c == 1)
5783 nocode_wanted++;
5784 skip(':');
5785 expr_cond();
5787 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5788 if (sv.r == VT_CMP) {
5789 t1 = sv.jtrue;
5790 t2 = u;
5791 } else {
5792 t1 = gvtst(0, 0);
5793 t2 = gjmp(0);
5794 gsym(u);
5795 vpushv(&sv);
5797 gvtst_set(0, t1);
5798 gvtst_set(1, t2);
5799 nocode_wanted = ncw_prev;
5800 // tcc_warning("two conditions expr_cond");
5801 return;
5804 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5805 mk_pointer(&vtop->type);
5806 type2=vtop->type;
5807 t1 = type1.t;
5808 bt1 = t1 & VT_BTYPE;
5809 t2 = type2.t;
5810 bt2 = t2 & VT_BTYPE;
5811 type.ref = NULL;
5813 /* cast operands to correct type according to ISOC rules */
5814 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5815 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5816 } else if (is_float(bt1) || is_float(bt2)) {
5817 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5818 type.t = VT_LDOUBLE;
5820 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5821 type.t = VT_DOUBLE;
5822 } else {
5823 type.t = VT_FLOAT;
5825 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5826 /* cast to biggest op */
5827 type.t = VT_LLONG | VT_LONG;
5828 if (bt1 == VT_LLONG)
5829 type.t &= t1;
5830 if (bt2 == VT_LLONG)
5831 type.t &= t2;
5832 /* convert to unsigned if it does not fit in a long long */
5833 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5834 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5835 type.t |= VT_UNSIGNED;
5836 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5837 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5838 /* If one is a null ptr constant the result type
5839 is the other. */
5840 if (is_null_pointer (vtop)) type = type1;
5841 else if (is_null_pointer (&sv)) type = type2;
5842 else if (bt1 != bt2)
5843 tcc_error("incompatible types in conditional expressions");
5844 else {
5845 CType *pt1 = pointed_type(&type1);
5846 CType *pt2 = pointed_type(&type2);
5847 int pbt1 = pt1->t & VT_BTYPE;
5848 int pbt2 = pt2->t & VT_BTYPE;
5849 int newquals, copied = 0;
5850 /* pointers to void get preferred, otherwise the
5851 pointed to types minus qualifs should be compatible */
5852 type = (pbt1 == VT_VOID) ? type1 : type2;
5853 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5854 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5855 tcc_warning("pointer type mismatch in conditional expression\n");
5857 /* combine qualifs */
5858 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5859 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5860 & newquals)
5862 /* copy the pointer target symbol */
5863 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5864 0, type.ref->c);
5865 copied = 1;
5866 pointed_type(&type)->t |= newquals;
5868 /* pointers to incomplete arrays get converted to
5869 pointers to completed ones if possible */
5870 if (pt1->t & VT_ARRAY
5871 && pt2->t & VT_ARRAY
5872 && pointed_type(&type)->ref->c < 0
5873 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5875 if (!copied)
5876 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5877 0, type.ref->c);
5878 pointed_type(&type)->ref =
5879 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5880 0, pointed_type(&type)->ref->c);
5881 pointed_type(&type)->ref->c =
5882 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5885 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5886 /* XXX: test structure compatibility */
5887 type = bt1 == VT_STRUCT ? type1 : type2;
5888 } else {
5889 /* integer operations */
5890 type.t = VT_INT | (VT_LONG & (t1 | t2));
5891 /* convert to unsigned if it does not fit in an integer */
5892 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5893 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5894 type.t |= VT_UNSIGNED;
5896 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5897 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5898 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5900 /* now we convert second operand */
5901 if (c != 1) {
5902 gen_cast(&type);
5903 if (islv) {
5904 mk_pointer(&vtop->type);
5905 gaddrof();
5906 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5907 gaddrof();
5910 rc = RC_INT;
5911 if (is_float(type.t)) {
5912 rc = RC_FLOAT;
5913 #ifdef TCC_TARGET_X86_64
5914 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5915 rc = RC_ST0;
5917 #endif
5918 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5919 /* for long longs, we use fixed registers to avoid having
5920 to handle a complicated move */
5921 rc = RC_IRET;
5924 tt = r2 = 0;
5925 if (c < 0) {
5926 r2 = gv(rc);
5927 tt = gjmp(0);
5929 gsym(u);
5930 nocode_wanted = ncw_prev;
5932 /* this is horrible, but we must also convert first
5933 operand */
5934 if (c != 0) {
5935 *vtop = sv;
5936 gen_cast(&type);
5937 if (islv) {
5938 mk_pointer(&vtop->type);
5939 gaddrof();
5940 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5941 gaddrof();
5944 if (c < 0) {
5945 r1 = gv(rc);
5946 move_reg(r2, r1, type.t);
5947 vtop->r = r2;
5948 gsym(tt);
5951 if (islv)
5952 indir();
5957 static void expr_eq(void)
5959 int t;
5961 expr_cond();
5962 if (tok == '=' ||
5963 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5964 tok == TOK_A_XOR || tok == TOK_A_OR ||
5965 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5966 test_lvalue();
5967 t = tok;
5968 next();
5969 if (t == '=') {
5970 expr_eq();
5971 } else {
5972 vdup();
5973 expr_eq();
5974 gen_op(t & 0x7f);
5976 vstore();
5980 ST_FUNC void gexpr(void)
5982 while (1) {
5983 expr_eq();
5984 if (tok != ',')
5985 break;
5986 vpop();
5987 next();
5991 /* parse a constant expression and return value in vtop. */
5992 static void expr_const1(void)
5994 const_wanted++;
5995 nocode_wanted++;
5996 expr_cond();
5997 nocode_wanted--;
5998 const_wanted--;
6001 /* parse an integer constant and return its value. */
6002 static inline int64_t expr_const64(void)
6004 int64_t c;
6005 expr_const1();
6006 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6007 expect("constant expression");
6008 c = vtop->c.i;
6009 vpop();
6010 return c;
6013 /* parse an integer constant and return its value.
6014 Complain if it doesn't fit 32bit (signed or unsigned). */
6015 ST_FUNC int expr_const(void)
6017 int c;
6018 int64_t wc = expr_const64();
6019 c = wc;
6020 if (c != wc && (unsigned)c != wc)
6021 tcc_error("constant exceeds 32 bit");
6022 return c;
6025 /* ------------------------------------------------------------------------- */
6026 /* return from function */
6028 #ifndef TCC_TARGET_ARM64
6029 #ifndef TCC_TARGET_RISCV64
6030 static void gfunc_return(CType *func_type)
6032 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6033 CType type, ret_type;
6034 int ret_align, ret_nregs, regsize;
6035 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6036 &ret_align, &regsize);
6037 if (0 == ret_nregs) {
6038 /* if returning structure, must copy it to implicit
6039 first pointer arg location */
6040 type = *func_type;
6041 mk_pointer(&type);
6042 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6043 indir();
6044 vswap();
6045 /* copy structure value to pointer */
6046 vstore();
6047 } else {
6048 /* returning structure packed into registers */
6049 int r, size, addr, align;
6050 size = type_size(func_type,&align);
6051 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6052 (vtop->c.i & (ret_align-1)))
6053 && (align & (ret_align-1))) {
6054 loc = (loc - size) & -ret_align;
6055 addr = loc;
6056 type = *func_type;
6057 vset(&type, VT_LOCAL | VT_LVAL, addr);
6058 vswap();
6059 vstore();
6060 vpop();
6061 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6063 vtop->type = ret_type;
6064 if (is_float(ret_type.t))
6065 r = rc_fret(ret_type.t);
6066 else
6067 r = RC_IRET;
6069 if (ret_nregs == 1)
6070 gv(r);
6071 else {
6072 for (;;) {
6073 vdup();
6074 gv(r);
6075 vpop();
6076 if (--ret_nregs == 0)
6077 break;
6078 /* We assume that when a structure is returned in multiple
6079 registers, their classes are consecutive values of the
6080 suite s(n) = 2^n */
6081 r <<= 1;
6082 vtop->c.i += regsize;
6086 } else if (is_float(func_type->t)) {
6087 gv(rc_fret(func_type->t));
6088 } else {
6089 gv(RC_IRET);
6091 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6093 #endif
6094 #endif
6096 static void check_func_return(void)
6098 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6099 return;
6100 if (!strcmp (funcname, "main")
6101 && (func_vt.t & VT_BTYPE) == VT_INT) {
6102 /* main returns 0 by default */
6103 vpushi(0);
6104 gen_assign_cast(&func_vt);
6105 gfunc_return(&func_vt);
6106 } else {
6107 tcc_warning("function might return no value: '%s'", funcname);
6111 /* ------------------------------------------------------------------------- */
6112 /* switch/case */
6114 static int case_cmp(const void *pa, const void *pb)
6116 int64_t a = (*(struct case_t**) pa)->v1;
6117 int64_t b = (*(struct case_t**) pb)->v1;
6118 return a < b ? -1 : a > b;
6121 static void gtst_addr(int t, int a)
6123 gsym_addr(gvtst(0, t), a);
6126 static void gcase(struct case_t **base, int len, int *bsym)
6128 struct case_t *p;
6129 int e;
6130 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6131 while (len > 8) {
6132 /* binary search */
6133 p = base[len/2];
6134 vdup();
6135 if (ll)
6136 vpushll(p->v2);
6137 else
6138 vpushi(p->v2);
6139 gen_op(TOK_LE);
6140 e = gvtst(1, 0);
6141 vdup();
6142 if (ll)
6143 vpushll(p->v1);
6144 else
6145 vpushi(p->v1);
6146 gen_op(TOK_GE);
6147 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6148 /* x < v1 */
6149 gcase(base, len/2, bsym);
6150 /* x > v2 */
6151 gsym(e);
6152 e = len/2 + 1;
6153 base += e; len -= e;
6155 /* linear scan */
6156 while (len--) {
6157 p = *base++;
6158 vdup();
6159 if (ll)
6160 vpushll(p->v2);
6161 else
6162 vpushi(p->v2);
6163 if (p->v1 == p->v2) {
6164 gen_op(TOK_EQ);
6165 gtst_addr(0, p->sym);
6166 } else {
6167 gen_op(TOK_LE);
6168 e = gvtst(1, 0);
6169 vdup();
6170 if (ll)
6171 vpushll(p->v1);
6172 else
6173 vpushi(p->v1);
6174 gen_op(TOK_GE);
6175 gtst_addr(0, p->sym);
6176 gsym(e);
6179 *bsym = gjmp(*bsym);
6182 /* ------------------------------------------------------------------------- */
6183 /* __attribute__((cleanup(fn))) */
6185 static void try_call_scope_cleanup(Sym *stop)
6187 Sym *cls = cur_scope->cl.s;
6189 for (; cls != stop; cls = cls->ncl) {
6190 Sym *fs = cls->next;
6191 Sym *vs = cls->prev_tok;
6193 vpushsym(&fs->type, fs);
6194 vset(&vs->type, vs->r, vs->c);
6195 vtop->sym = vs;
6196 mk_pointer(&vtop->type);
6197 gaddrof();
6198 gfunc_call(1);
6202 static void try_call_cleanup_goto(Sym *cleanupstate)
6204 Sym *oc, *cc;
6205 int ocd, ccd;
6207 if (!cur_scope->cl.s)
6208 return;
6210 /* search NCA of both cleanup chains given parents and initial depth */
6211 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6212 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6214 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6216 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6219 try_call_scope_cleanup(cc);
6222 /* call 'func' for each __attribute__((cleanup(func))) */
6223 static void block_cleanup(struct scope *o)
6225 int jmp = 0;
6226 Sym *g, **pg;
6227 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6228 if (g->prev_tok->r & LABEL_FORWARD) {
6229 Sym *pcl = g->next;
6230 if (!jmp)
6231 jmp = gjmp(0);
6232 gsym(pcl->jnext);
6233 try_call_scope_cleanup(o->cl.s);
6234 pcl->jnext = gjmp(0);
6235 if (!o->cl.n)
6236 goto remove_pending;
6237 g->c = o->cl.n;
6238 pg = &g->prev;
6239 } else {
6240 remove_pending:
6241 *pg = g->prev;
6242 sym_free(g);
6245 gsym(jmp);
6246 try_call_scope_cleanup(o->cl.s);
6249 /* ------------------------------------------------------------------------- */
6250 /* VLA */
6252 static void vla_restore(int loc)
6254 if (loc)
6255 gen_vla_sp_restore(loc);
6258 static void vla_leave(struct scope *o)
6260 if (o->vla.num < cur_scope->vla.num)
6261 vla_restore(o->vla.loc);
6264 /* ------------------------------------------------------------------------- */
6265 /* local scopes */
6267 void new_scope(struct scope *o)
6269 /* copy and link previous scope */
6270 *o = *cur_scope;
6271 o->prev = cur_scope;
6272 cur_scope = o;
6274 /* record local declaration stack position */
6275 o->lstk = local_stack;
6276 o->llstk = local_label_stack;
6278 ++local_scope;
6281 void prev_scope(struct scope *o, int is_expr)
6283 vla_leave(o->prev);
6285 if (o->cl.s != o->prev->cl.s)
6286 block_cleanup(o->prev);
6288 /* pop locally defined labels */
6289 label_pop(&local_label_stack, o->llstk, is_expr);
6291 /* In the is_expr case (a statement expression is finished here),
6292 vtop might refer to symbols on the local_stack. Either via the
6293 type or via vtop->sym. We can't pop those nor any that in turn
6294 might be referred to. To make it easier we don't roll back
6295 any symbols in that case; some upper level call to block() will
6296 do that. We do have to remove such symbols from the lookup
6297 tables, though. sym_pop will do that. */
6299 /* pop locally defined symbols */
6300 sym_pop(&local_stack, o->lstk, is_expr);
6302 cur_scope = o->prev;
6303 --local_scope;
6306 /* leave a scope via break/continue(/goto) */
6307 void leave_scope(struct scope *o)
6309 if (!o)
6310 return;
6311 try_call_scope_cleanup(o->cl.s);
6312 vla_leave(o);
6315 /* ------------------------------------------------------------------------- */
6316 /* call block from 'for do while' loops */
6318 static void lblock(int *bsym, int *csym)
6320 struct scope *lo = loop_scope, *co = cur_scope;
6321 int *b = co->bsym, *c = co->csym;
6322 if (csym) {
6323 co->csym = csym;
6324 loop_scope = co;
6326 co->bsym = bsym;
6327 block(0);
6328 co->bsym = b;
6329 if (csym) {
6330 co->csym = c;
6331 loop_scope = lo;
6335 static void block(int is_expr)
6337 int a, b, c, d, e, t;
6338 Sym *s;
6340 if (is_expr) {
6341 /* default return value is (void) */
6342 vpushi(0);
6343 vtop->type.t = VT_VOID;
6346 again:
6347 t = tok, next();
6349 if (t == TOK_IF) {
6350 skip('(');
6351 gexpr();
6352 skip(')');
6353 a = gvtst(1, 0);
6354 block(0);
6355 if (tok == TOK_ELSE) {
6356 d = gjmp(0);
6357 gsym(a);
6358 next();
6359 block(0);
6360 gsym(d); /* patch else jmp */
6361 } else {
6362 gsym(a);
6365 } else if (t == TOK_WHILE) {
6366 d = gind();
6367 skip('(');
6368 gexpr();
6369 skip(')');
6370 a = gvtst(1, 0);
6371 b = 0;
6372 lblock(&a, &b);
6373 gjmp_addr(d);
6374 gsym_addr(b, d);
6375 gsym(a);
6377 } else if (t == '{') {
6378 struct scope o;
6379 new_scope(&o);
6381 /* handle local labels declarations */
6382 while (tok == TOK_LABEL) {
6383 do {
6384 next();
6385 if (tok < TOK_UIDENT)
6386 expect("label identifier");
6387 label_push(&local_label_stack, tok, LABEL_DECLARED);
6388 next();
6389 } while (tok == ',');
6390 skip(';');
6393 while (tok != '}') {
6394 decl(VT_LOCAL);
6395 if (tok != '}') {
6396 if (is_expr)
6397 vpop();
6398 block(is_expr);
6402 prev_scope(&o, is_expr);
6404 if (0 == local_scope && !nocode_wanted)
6405 check_func_return();
6406 next();
6408 } else if (t == TOK_RETURN) {
6409 a = tok != ';';
6410 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6411 if (a)
6412 gexpr(), gen_assign_cast(&func_vt);
6413 leave_scope(root_scope);
6414 if (a && b)
6415 gfunc_return(&func_vt);
6416 else if (a)
6417 vtop--;
6418 else if (b)
6419 tcc_warning("'return' with no value.");
6420 skip(';');
6421 /* jump unless last stmt in top-level block */
6422 if (tok != '}' || local_scope != 1)
6423 rsym = gjmp(rsym);
6424 CODE_OFF();
6426 } else if (t == TOK_BREAK) {
6427 /* compute jump */
6428 if (!cur_scope->bsym)
6429 tcc_error("cannot break");
6430 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6431 leave_scope(loop_scope);
6432 else
6433 leave_scope(cur_switch->scope);
6434 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6435 skip(';');
6437 } else if (t == TOK_CONTINUE) {
6438 /* compute jump */
6439 if (!cur_scope->csym)
6440 tcc_error("cannot continue");
6441 leave_scope(loop_scope);
6442 *cur_scope->csym = gjmp(*cur_scope->csym);
6443 skip(';');
6445 } else if (t == TOK_FOR) {
6446 struct scope o;
6447 new_scope(&o);
6449 skip('(');
6450 if (tok != ';') {
6451 /* c99 for-loop init decl? */
6452 if (!decl0(VT_LOCAL, 1, NULL)) {
6453 /* no, regular for-loop init expr */
6454 gexpr();
6455 vpop();
6458 skip(';');
6459 a = b = 0;
6460 c = d = gind();
6461 if (tok != ';') {
6462 gexpr();
6463 a = gvtst(1, 0);
6465 skip(';');
6466 if (tok != ')') {
6467 e = gjmp(0);
6468 d = gind();
6469 gexpr();
6470 vpop();
6471 gjmp_addr(c);
6472 gsym(e);
6474 skip(')');
6475 lblock(&a, &b);
6476 gjmp_addr(d);
6477 gsym_addr(b, d);
6478 gsym(a);
6479 prev_scope(&o, 0);
6481 } else if (t == TOK_DO) {
6482 a = b = 0;
6483 d = gind();
6484 lblock(&a, &b);
6485 gsym(b);
6486 skip(TOK_WHILE);
6487 skip('(');
6488 gexpr();
6489 skip(')');
6490 skip(';');
6491 c = gvtst(0, 0);
6492 gsym_addr(c, d);
6493 gsym(a);
6495 } else if (t == TOK_SWITCH) {
6496 struct switch_t *saved, sw;
6497 SValue switchval;
6499 sw.p = NULL;
6500 sw.n = 0;
6501 sw.def_sym = 0;
6502 sw.bsym = &a;
6503 sw.scope = cur_scope;
6505 saved = cur_switch;
6506 cur_switch = &sw;
6508 skip('(');
6509 gexpr();
6510 skip(')');
6511 switchval = *vtop--;
6513 a = 0;
6514 b = gjmp(0); /* jump to first case */
6515 lblock(&a, NULL);
6516 a = gjmp(a); /* add implicit break */
6517 /* case lookup */
6518 gsym(b);
6520 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6521 for (b = 1; b < sw.n; b++)
6522 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6523 tcc_error("duplicate case value");
6525 /* Our switch table sorting is signed, so the compared
6526 value needs to be as well when it's 64bit. */
6527 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6528 switchval.type.t &= ~VT_UNSIGNED;
6529 vpushv(&switchval);
6530 gv(RC_INT);
6531 d = 0, gcase(sw.p, sw.n, &d);
6532 vpop();
6533 if (sw.def_sym)
6534 gsym_addr(d, sw.def_sym);
6535 else
6536 gsym(d);
6537 /* break label */
6538 gsym(a);
6540 dynarray_reset(&sw.p, &sw.n);
6541 cur_switch = saved;
6543 } else if (t == TOK_CASE) {
6544 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6545 if (!cur_switch)
6546 expect("switch");
6547 cr->v1 = cr->v2 = expr_const64();
6548 if (gnu_ext && tok == TOK_DOTS) {
6549 next();
6550 cr->v2 = expr_const64();
6551 if (cr->v2 < cr->v1)
6552 tcc_warning("empty case range");
6554 cr->sym = gind();
6555 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6556 skip(':');
6557 is_expr = 0;
6558 goto block_after_label;
6560 } else if (t == TOK_DEFAULT) {
6561 if (!cur_switch)
6562 expect("switch");
6563 if (cur_switch->def_sym)
6564 tcc_error("too many 'default'");
6565 cur_switch->def_sym = gind();
6566 skip(':');
6567 is_expr = 0;
6568 goto block_after_label;
6570 } else if (t == TOK_GOTO) {
6571 vla_restore(root_scope->vla.loc);
6572 if (tok == '*' && gnu_ext) {
6573 /* computed goto */
6574 next();
6575 gexpr();
6576 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6577 expect("pointer");
6578 ggoto();
6580 } else if (tok >= TOK_UIDENT) {
6581 s = label_find(tok);
6582 /* put forward definition if needed */
6583 if (!s)
6584 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6585 else if (s->r == LABEL_DECLARED)
6586 s->r = LABEL_FORWARD;
6588 if (s->r & LABEL_FORWARD) {
6589 /* start new goto chain for cleanups, linked via label->next */
6590 if (cur_scope->cl.s && !nocode_wanted) {
6591 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6592 pending_gotos->prev_tok = s;
6593 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6594 pending_gotos->next = s;
6596 s->jnext = gjmp(s->jnext);
6597 } else {
6598 try_call_cleanup_goto(s->cleanupstate);
6599 gjmp_addr(s->jnext);
6601 next();
6603 } else {
6604 expect("label identifier");
6606 skip(';');
6608 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6609 asm_instr();
6611 } else {
6612 if (tok == ':' && t >= TOK_UIDENT) {
6613 /* label case */
6614 next();
6615 s = label_find(t);
6616 if (s) {
6617 if (s->r == LABEL_DEFINED)
6618 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6619 s->r = LABEL_DEFINED;
6620 if (s->next) {
6621 Sym *pcl; /* pending cleanup goto */
6622 for (pcl = s->next; pcl; pcl = pcl->prev)
6623 gsym(pcl->jnext);
6624 sym_pop(&s->next, NULL, 0);
6625 } else
6626 gsym(s->jnext);
6627 } else {
6628 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6630 s->jnext = gind();
6631 s->cleanupstate = cur_scope->cl.s;
6633 block_after_label:
6634 vla_restore(cur_scope->vla.loc);
6635 /* we accept this, but it is a mistake */
6636 if (tok == '}') {
6637 tcc_warning("deprecated use of label at end of compound statement");
6638 } else {
6639 goto again;
6642 } else {
6643 /* expression case */
6644 if (t != ';') {
6645 unget_tok(t);
6646 if (is_expr) {
6647 vpop();
6648 gexpr();
6649 } else {
6650 gexpr();
6651 vpop();
6653 skip(';');
6659 /* This skips over a stream of tokens containing balanced {} and ()
6660 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6661 with a '{'). If STR then allocates and stores the skipped tokens
6662 in *STR. This doesn't check if () and {} are nested correctly,
6663 i.e. "({)}" is accepted. */
6664 static void skip_or_save_block(TokenString **str)
6666 int braces = tok == '{';
6667 int level = 0;
6668 if (str)
6669 *str = tok_str_alloc();
6671 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6672 int t;
6673 if (tok == TOK_EOF) {
6674 if (str || level > 0)
6675 tcc_error("unexpected end of file");
6676 else
6677 break;
6679 if (str)
6680 tok_str_add_tok(*str);
6681 t = tok;
6682 next();
6683 if (t == '{' || t == '(') {
6684 level++;
6685 } else if (t == '}' || t == ')') {
6686 level--;
6687 if (level == 0 && braces && t == '}')
6688 break;
6691 if (str) {
6692 tok_str_add(*str, -1);
6693 tok_str_add(*str, 0);
6697 #define EXPR_CONST 1
6698 #define EXPR_ANY 2
6700 static void parse_init_elem(int expr_type)
6702 int saved_global_expr;
6703 switch(expr_type) {
6704 case EXPR_CONST:
6705 /* compound literals must be allocated globally in this case */
6706 saved_global_expr = global_expr;
6707 global_expr = 1;
6708 expr_const1();
6709 global_expr = saved_global_expr;
6710 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6711 (compound literals). */
6712 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6713 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6714 || vtop->sym->v < SYM_FIRST_ANOM))
6715 #ifdef TCC_TARGET_PE
6716 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6717 #endif
6719 tcc_error("initializer element is not constant");
6720 break;
6721 case EXPR_ANY:
6722 expr_eq();
6723 break;
6727 /* put zeros for variable based init */
6728 static void init_putz(Section *sec, unsigned long c, int size)
6730 if (sec) {
6731 /* nothing to do because globals are already set to zero */
6732 } else {
6733 vpush_global_sym(&func_old_type, TOK_memset);
6734 vseti(VT_LOCAL, c);
6735 #ifdef TCC_TARGET_ARM
6736 vpushs(size);
6737 vpushi(0);
6738 #else
6739 vpushi(0);
6740 vpushs(size);
6741 #endif
6742 gfunc_call(3);
6746 #define DIF_FIRST 1
6747 #define DIF_SIZE_ONLY 2
6748 #define DIF_HAVE_ELEM 4
6750 /* t is the array or struct type. c is the array or struct
6751 address. cur_field is the pointer to the current
6752 field, for arrays the 'c' member contains the current start
6753 index. 'flags' is as in decl_initializer.
6754 'al' contains the already initialized length of the
6755 current container (starting at c). This returns the new length of that. */
6756 static int decl_designator(CType *type, Section *sec, unsigned long c,
6757 Sym **cur_field, int flags, int al)
6759 Sym *s, *f;
6760 int index, index_last, align, l, nb_elems, elem_size;
6761 unsigned long corig = c;
6763 elem_size = 0;
6764 nb_elems = 1;
6766 if (flags & DIF_HAVE_ELEM)
6767 goto no_designator;
6769 if (gnu_ext && tok >= TOK_UIDENT) {
6770 l = tok, next();
6771 if (tok == ':')
6772 goto struct_field;
6773 unget_tok(l);
6776 /* NOTE: we only support ranges for last designator */
6777 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6778 if (tok == '[') {
6779 if (!(type->t & VT_ARRAY))
6780 expect("array type");
6781 next();
6782 index = index_last = expr_const();
6783 if (tok == TOK_DOTS && gnu_ext) {
6784 next();
6785 index_last = expr_const();
6787 skip(']');
6788 s = type->ref;
6789 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6790 index_last < index)
6791 tcc_error("invalid index");
6792 if (cur_field)
6793 (*cur_field)->c = index_last;
6794 type = pointed_type(type);
6795 elem_size = type_size(type, &align);
6796 c += index * elem_size;
6797 nb_elems = index_last - index + 1;
6798 } else {
6799 int cumofs;
6800 next();
6801 l = tok;
6802 struct_field:
6803 next();
6804 if ((type->t & VT_BTYPE) != VT_STRUCT)
6805 expect("struct/union type");
6806 cumofs = 0;
6807 f = find_field(type, l, &cumofs);
6808 if (!f)
6809 expect("field");
6810 if (cur_field)
6811 *cur_field = f;
6812 type = &f->type;
6813 c += cumofs + f->c;
6815 cur_field = NULL;
6817 if (!cur_field) {
6818 if (tok == '=') {
6819 next();
6820 } else if (!gnu_ext) {
6821 expect("=");
6823 } else {
6824 no_designator:
6825 if (type->t & VT_ARRAY) {
6826 index = (*cur_field)->c;
6827 if (type->ref->c >= 0 && index >= type->ref->c)
6828 tcc_error("index too large");
6829 type = pointed_type(type);
6830 c += index * type_size(type, &align);
6831 } else {
6832 f = *cur_field;
6833 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6834 *cur_field = f = f->next;
6835 if (!f)
6836 tcc_error("too many field init");
6837 type = &f->type;
6838 c += f->c;
6841 /* must put zero in holes (note that doing it that way
6842 ensures that it even works with designators) */
6843 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6844 init_putz(sec, corig + al, c - corig - al);
6845 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6847 /* XXX: make it more general */
6848 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6849 unsigned long c_end;
6850 uint8_t *src, *dst;
6851 int i;
6853 if (!sec) {
6854 vset(type, VT_LOCAL|VT_LVAL, c);
6855 for (i = 1; i < nb_elems; i++) {
6856 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6857 vswap();
6858 vstore();
6860 vpop();
6861 } else if (!NODATA_WANTED) {
6862 c_end = c + nb_elems * elem_size;
6863 if (c_end > sec->data_allocated)
6864 section_realloc(sec, c_end);
6865 src = sec->data + c;
6866 dst = src;
6867 for(i = 1; i < nb_elems; i++) {
6868 dst += elem_size;
6869 memcpy(dst, src, elem_size);
6873 c += nb_elems * type_size(type, &align);
6874 if (c - corig > al)
6875 al = c - corig;
6876 return al;
6879 /* store a value or an expression directly in global data or in local array */
6880 static void init_putv(CType *type, Section *sec, unsigned long c)
6882 int bt;
6883 void *ptr;
6884 CType dtype;
6886 dtype = *type;
6887 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6889 if (sec) {
6890 int size, align;
6891 /* XXX: not portable */
6892 /* XXX: generate error if incorrect relocation */
6893 gen_assign_cast(&dtype);
6894 bt = type->t & VT_BTYPE;
6896 if ((vtop->r & VT_SYM)
6897 && bt != VT_PTR
6898 && bt != VT_FUNC
6899 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6900 || (type->t & VT_BITFIELD))
6901 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6903 tcc_error("initializer element is not computable at load time");
6905 if (NODATA_WANTED) {
6906 vtop--;
6907 return;
6910 size = type_size(type, &align);
6911 section_reserve(sec, c + size);
6912 ptr = sec->data + c;
6914 /* XXX: make code faster ? */
6915 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6916 vtop->sym->v >= SYM_FIRST_ANOM &&
6917 /* XXX This rejects compound literals like
6918 '(void *){ptr}'. The problem is that '&sym' is
6919 represented the same way, which would be ruled out
6920 by the SYM_FIRST_ANOM check above, but also '"string"'
6921 in 'char *p = "string"' is represented the same
6922 with the type being VT_PTR and the symbol being an
6923 anonymous one. That is, there's no difference in vtop
6924 between '(void *){x}' and '&(void *){x}'. Ignore
6925 pointer typed entities here. Hopefully no real code
6926 will every use compound literals with scalar type. */
6927 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6928 /* These come from compound literals, memcpy stuff over. */
6929 Section *ssec;
6930 ElfSym *esym;
6931 ElfW_Rel *rel;
6932 esym = elfsym(vtop->sym);
6933 ssec = tcc_state->sections[esym->st_shndx];
6934 memmove (ptr, ssec->data + esym->st_value, size);
6935 if (ssec->reloc) {
6936 /* We need to copy over all memory contents, and that
6937 includes relocations. Use the fact that relocs are
6938 created it order, so look from the end of relocs
6939 until we hit one before the copied region. */
6940 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6941 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6942 while (num_relocs--) {
6943 rel--;
6944 if (rel->r_offset >= esym->st_value + size)
6945 continue;
6946 if (rel->r_offset < esym->st_value)
6947 break;
6948 /* Note: if the same fields are initialized multiple
6949 times (possible with designators) then we possibly
6950 add multiple relocations for the same offset here.
6951 That would lead to wrong code, the last reloc needs
6952 to win. We clean this up later after the whole
6953 initializer is parsed. */
6954 put_elf_reloca(symtab_section, sec,
6955 c + rel->r_offset - esym->st_value,
6956 ELFW(R_TYPE)(rel->r_info),
6957 ELFW(R_SYM)(rel->r_info),
6958 #if PTR_SIZE == 8
6959 rel->r_addend
6960 #else
6962 #endif
6966 } else {
6967 if (type->t & VT_BITFIELD) {
6968 int bit_pos, bit_size, bits, n;
6969 unsigned char *p, v, m;
6970 bit_pos = BIT_POS(vtop->type.t);
6971 bit_size = BIT_SIZE(vtop->type.t);
6972 p = (unsigned char*)ptr + (bit_pos >> 3);
6973 bit_pos &= 7, bits = 0;
6974 while (bit_size) {
6975 n = 8 - bit_pos;
6976 if (n > bit_size)
6977 n = bit_size;
6978 v = vtop->c.i >> bits << bit_pos;
6979 m = ((1 << n) - 1) << bit_pos;
6980 *p = (*p & ~m) | (v & m);
6981 bits += n, bit_size -= n, bit_pos = 0, ++p;
6983 } else
6984 switch(bt) {
6985 /* XXX: when cross-compiling we assume that each type has the
6986 same representation on host and target, which is likely to
6987 be wrong in the case of long double */
6988 case VT_BOOL:
6989 vtop->c.i = vtop->c.i != 0;
6990 case VT_BYTE:
6991 *(char *)ptr |= vtop->c.i;
6992 break;
6993 case VT_SHORT:
6994 *(short *)ptr |= vtop->c.i;
6995 break;
6996 case VT_FLOAT:
6997 *(float*)ptr = vtop->c.f;
6998 break;
6999 case VT_DOUBLE:
7000 *(double *)ptr = vtop->c.d;
7001 break;
7002 case VT_LDOUBLE:
7003 #if defined TCC_IS_NATIVE_387
7004 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7005 memcpy(ptr, &vtop->c.ld, 10);
7006 #ifdef __TINYC__
7007 else if (sizeof (long double) == sizeof (double))
7008 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7009 #endif
7010 else if (vtop->c.ld == 0.0)
7012 else
7013 #endif
7014 if (sizeof(long double) == LDOUBLE_SIZE)
7015 *(long double*)ptr = vtop->c.ld;
7016 else if (sizeof(double) == LDOUBLE_SIZE)
7017 *(double *)ptr = (double)vtop->c.ld;
7018 else
7019 tcc_error("can't cross compile long double constants");
7020 break;
7021 #if PTR_SIZE != 8
7022 case VT_LLONG:
7023 *(long long *)ptr |= vtop->c.i;
7024 break;
7025 #else
7026 case VT_LLONG:
7027 #endif
7028 case VT_PTR:
7030 addr_t val = vtop->c.i;
7031 #if PTR_SIZE == 8
7032 if (vtop->r & VT_SYM)
7033 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7034 else
7035 *(addr_t *)ptr |= val;
7036 #else
7037 if (vtop->r & VT_SYM)
7038 greloc(sec, vtop->sym, c, R_DATA_PTR);
7039 *(addr_t *)ptr |= val;
7040 #endif
7041 break;
7043 default:
7045 int val = vtop->c.i;
7046 #if PTR_SIZE == 8
7047 if (vtop->r & VT_SYM)
7048 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7049 else
7050 *(int *)ptr |= val;
7051 #else
7052 if (vtop->r & VT_SYM)
7053 greloc(sec, vtop->sym, c, R_DATA_PTR);
7054 *(int *)ptr |= val;
7055 #endif
7056 break;
7060 vtop--;
7061 } else {
7062 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7063 vswap();
7064 vstore();
7065 vpop();
7069 /* 't' contains the type and storage info. 'c' is the offset of the
7070 object in section 'sec'. If 'sec' is NULL, it means stack based
7071 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7072 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7073 size only evaluation is wanted (only for arrays). */
7074 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7075 int flags)
7077 int len, n, no_oblock, nb, i;
7078 int size1, align1;
7079 Sym *s, *f;
7080 Sym indexsym;
7081 CType *t1;
7083 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7084 /* In case of strings we have special handling for arrays, so
7085 don't consume them as initializer value (which would commit them
7086 to some anonymous symbol). */
7087 tok != TOK_LSTR && tok != TOK_STR &&
7088 !(flags & DIF_SIZE_ONLY)) {
7089 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7090 flags |= DIF_HAVE_ELEM;
7093 if ((flags & DIF_HAVE_ELEM) &&
7094 !(type->t & VT_ARRAY) &&
7095 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7096 The source type might have VT_CONSTANT set, which is
7097 of course assignable to non-const elements. */
7098 is_compatible_unqualified_types(type, &vtop->type)) {
7099 init_putv(type, sec, c);
7100 } else if (type->t & VT_ARRAY) {
7101 s = type->ref;
7102 n = s->c;
7103 t1 = pointed_type(type);
7104 size1 = type_size(t1, &align1);
7106 no_oblock = 1;
7107 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7108 tok == '{') {
7109 if (tok != '{')
7110 tcc_error("character array initializer must be a literal,"
7111 " optionally enclosed in braces");
7112 skip('{');
7113 no_oblock = 0;
7116 /* only parse strings here if correct type (otherwise: handle
7117 them as ((w)char *) expressions */
7118 if ((tok == TOK_LSTR &&
7119 #ifdef TCC_TARGET_PE
7120 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7121 #else
7122 (t1->t & VT_BTYPE) == VT_INT
7123 #endif
7124 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7125 len = 0;
7126 while (tok == TOK_STR || tok == TOK_LSTR) {
7127 int cstr_len, ch;
7129 /* compute maximum number of chars wanted */
7130 if (tok == TOK_STR)
7131 cstr_len = tokc.str.size;
7132 else
7133 cstr_len = tokc.str.size / sizeof(nwchar_t);
7134 cstr_len--;
7135 nb = cstr_len;
7136 if (n >= 0 && nb > (n - len))
7137 nb = n - len;
7138 if (!(flags & DIF_SIZE_ONLY)) {
7139 if (cstr_len > nb)
7140 tcc_warning("initializer-string for array is too long");
7141 /* in order to go faster for common case (char
7142 string in global variable, we handle it
7143 specifically */
7144 if (sec && tok == TOK_STR && size1 == 1) {
7145 if (!NODATA_WANTED)
7146 memcpy(sec->data + c + len, tokc.str.data, nb);
7147 } else {
7148 for(i=0;i<nb;i++) {
7149 if (tok == TOK_STR)
7150 ch = ((unsigned char *)tokc.str.data)[i];
7151 else
7152 ch = ((nwchar_t *)tokc.str.data)[i];
7153 vpushi(ch);
7154 init_putv(t1, sec, c + (len + i) * size1);
7158 len += nb;
7159 next();
7161 /* only add trailing zero if enough storage (no
7162 warning in this case since it is standard) */
7163 if (n < 0 || len < n) {
7164 if (!(flags & DIF_SIZE_ONLY)) {
7165 vpushi(0);
7166 init_putv(t1, sec, c + (len * size1));
7168 len++;
7170 len *= size1;
7171 } else {
7172 indexsym.c = 0;
7173 f = &indexsym;
7175 do_init_list:
7176 len = 0;
7177 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7178 len = decl_designator(type, sec, c, &f, flags, len);
7179 flags &= ~DIF_HAVE_ELEM;
7180 if (type->t & VT_ARRAY) {
7181 ++indexsym.c;
7182 /* special test for multi dimensional arrays (may not
7183 be strictly correct if designators are used at the
7184 same time) */
7185 if (no_oblock && len >= n*size1)
7186 break;
7187 } else {
7188 if (s->type.t == VT_UNION)
7189 f = NULL;
7190 else
7191 f = f->next;
7192 if (no_oblock && f == NULL)
7193 break;
7196 if (tok == '}')
7197 break;
7198 skip(',');
7201 /* put zeros at the end */
7202 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7203 init_putz(sec, c + len, n*size1 - len);
7204 if (!no_oblock)
7205 skip('}');
7206 /* patch type size if needed, which happens only for array types */
7207 if (n < 0)
7208 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7209 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7210 size1 = 1;
7211 no_oblock = 1;
7212 if ((flags & DIF_FIRST) || tok == '{') {
7213 skip('{');
7214 no_oblock = 0;
7216 s = type->ref;
7217 f = s->next;
7218 n = s->c;
7219 goto do_init_list;
7220 } else if (tok == '{') {
7221 if (flags & DIF_HAVE_ELEM)
7222 skip(';');
7223 next();
7224 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7225 skip('}');
7226 } else if ((flags & DIF_SIZE_ONLY)) {
7227 /* If we supported only ISO C we wouldn't have to accept calling
7228 this on anything than an array if DIF_SIZE_ONLY (and even then
7229 only on the outermost level, so no recursion would be needed),
7230 because initializing a flex array member isn't supported.
7231 But GNU C supports it, so we need to recurse even into
7232 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7233 /* just skip expression */
7234 skip_or_save_block(NULL);
7235 } else {
7236 if (!(flags & DIF_HAVE_ELEM)) {
7237 /* This should happen only when we haven't parsed
7238 the init element above for fear of committing a
7239 string constant to memory too early. */
7240 if (tok != TOK_STR && tok != TOK_LSTR)
7241 expect("string constant");
7242 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7244 init_putv(type, sec, c);
7248 /* parse an initializer for type 't' if 'has_init' is non zero, and
7249 allocate space in local or global data space ('r' is either
7250 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7251 variable 'v' of scope 'scope' is declared before initializers
7252 are parsed. If 'v' is zero, then a reference to the new object
7253 is put in the value stack. If 'has_init' is 2, a special parsing
7254 is done to handle string constants. */
7255 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7256 int has_init, int v, int scope)
7258 int size, align, addr;
7259 TokenString *init_str = NULL;
7261 Section *sec;
7262 Sym *flexible_array;
7263 Sym *sym = NULL;
7264 int saved_nocode_wanted = nocode_wanted;
7265 #ifdef CONFIG_TCC_BCHECK
7266 int bcheck;
7267 #endif
7269 /* Always allocate static or global variables */
7270 if (v && (r & VT_VALMASK) == VT_CONST)
7271 nocode_wanted |= 0x80000000;
7273 #ifdef CONFIG_TCC_BCHECK
7274 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7275 #endif
7277 flexible_array = NULL;
7278 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7279 Sym *field = type->ref->next;
7280 if (field) {
7281 while (field->next)
7282 field = field->next;
7283 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7284 flexible_array = field;
7288 size = type_size(type, &align);
7289 /* If unknown size, we must evaluate it before
7290 evaluating initializers because
7291 initializers can generate global data too
7292 (e.g. string pointers or ISOC99 compound
7293 literals). It also simplifies local
7294 initializers handling */
7295 if (size < 0 || (flexible_array && has_init)) {
7296 if (!has_init)
7297 tcc_error("unknown type size");
7298 /* get all init string */
7299 if (has_init == 2) {
7300 init_str = tok_str_alloc();
7301 /* only get strings */
7302 while (tok == TOK_STR || tok == TOK_LSTR) {
7303 tok_str_add_tok(init_str);
7304 next();
7306 tok_str_add(init_str, -1);
7307 tok_str_add(init_str, 0);
7308 } else {
7309 skip_or_save_block(&init_str);
7311 unget_tok(0);
7313 /* compute size */
7314 begin_macro(init_str, 1);
7315 next();
7316 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7317 /* prepare second initializer parsing */
7318 macro_ptr = init_str->str;
7319 next();
7321 /* if still unknown size, error */
7322 size = type_size(type, &align);
7323 if (size < 0)
7324 tcc_error("unknown type size");
7326 /* If there's a flex member and it was used in the initializer
7327 adjust size. */
7328 if (flexible_array &&
7329 flexible_array->type.ref->c > 0)
7330 size += flexible_array->type.ref->c
7331 * pointed_size(&flexible_array->type);
7332 /* take into account specified alignment if bigger */
7333 if (ad->a.aligned) {
7334 int speca = 1 << (ad->a.aligned - 1);
7335 if (speca > align)
7336 align = speca;
7337 } else if (ad->a.packed) {
7338 align = 1;
7341 if (!v && NODATA_WANTED)
7342 size = 0, align = 1;
7344 if ((r & VT_VALMASK) == VT_LOCAL) {
7345 sec = NULL;
7346 #ifdef CONFIG_TCC_BCHECK
7347 if (bcheck && (type->t & VT_ARRAY)) {
7348 loc--;
7350 #endif
7351 loc = (loc - size) & -align;
7352 addr = loc;
7353 #ifdef CONFIG_TCC_BCHECK
7354 /* handles bounds */
7355 /* XXX: currently, since we do only one pass, we cannot track
7356 '&' operators, so we add only arrays */
7357 if (bcheck && (type->t & VT_ARRAY)) {
7358 addr_t *bounds_ptr;
7359 /* add padding between regions */
7360 loc--;
7361 /* then add local bound info */
7362 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7363 bounds_ptr[0] = addr;
7364 bounds_ptr[1] = size;
7366 #endif
7367 if (v) {
7368 /* local variable */
7369 #ifdef CONFIG_TCC_ASM
7370 if (ad->asm_label) {
7371 int reg = asm_parse_regvar(ad->asm_label);
7372 if (reg >= 0)
7373 r = (r & ~VT_VALMASK) | reg;
7375 #endif
7376 sym = sym_push(v, type, r, addr);
7377 if (ad->cleanup_func) {
7378 Sym *cls = sym_push2(&all_cleanups,
7379 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7380 cls->prev_tok = sym;
7381 cls->next = ad->cleanup_func;
7382 cls->ncl = cur_scope->cl.s;
7383 cur_scope->cl.s = cls;
7386 sym->a = ad->a;
7387 } else {
7388 /* push local reference */
7389 vset(type, r, addr);
7391 } else {
7392 if (v && scope == VT_CONST) {
7393 /* see if the symbol was already defined */
7394 sym = sym_find(v);
7395 if (sym) {
7396 patch_storage(sym, ad, type);
7397 /* we accept several definitions of the same global variable. */
7398 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7399 goto no_alloc;
7403 /* allocate symbol in corresponding section */
7404 sec = ad->section;
7405 if (!sec) {
7406 if (has_init)
7407 sec = data_section;
7408 else if (tcc_state->nocommon)
7409 sec = bss_section;
7412 if (sec) {
7413 addr = section_add(sec, size, align);
7414 #ifdef CONFIG_TCC_BCHECK
7415 /* add padding if bound check */
7416 if (bcheck)
7417 section_add(sec, 1, 1);
7418 #endif
7419 } else {
7420 addr = align; /* SHN_COMMON is special, symbol value is align */
7421 sec = common_section;
7424 if (v) {
7425 if (!sym) {
7426 sym = sym_push(v, type, r | VT_SYM, 0);
7427 patch_storage(sym, ad, NULL);
7429 /* update symbol definition */
7430 put_extern_sym(sym, sec, addr, size);
7431 } else {
7432 /* push global reference */
7433 vpush_ref(type, sec, addr, size);
7434 sym = vtop->sym;
7435 vtop->r |= r;
7438 #ifdef CONFIG_TCC_BCHECK
7439 /* handles bounds now because the symbol must be defined
7440 before for the relocation */
7441 if (bcheck) {
7442 addr_t *bounds_ptr;
7444 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7445 /* then add global bound info */
7446 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7447 bounds_ptr[0] = 0; /* relocated */
7448 bounds_ptr[1] = size;
7450 #endif
7453 if (type->t & VT_VLA) {
7454 int a;
7456 if (NODATA_WANTED)
7457 goto no_alloc;
7459 /* save current stack pointer */
7460 if (root_scope->vla.loc == 0) {
7461 struct scope *v = cur_scope;
7462 gen_vla_sp_save(loc -= PTR_SIZE);
7463 do v->vla.loc = loc; while ((v = v->prev));
7466 vla_runtime_type_size(type, &a);
7467 gen_vla_alloc(type, a);
7468 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7469 /* on _WIN64, because of the function args scratch area, the
7470 result of alloca differs from RSP and is returned in RAX. */
7471 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7472 #endif
7473 gen_vla_sp_save(addr);
7474 cur_scope->vla.loc = addr;
7475 cur_scope->vla.num++;
7477 } else if (has_init) {
7478 size_t oldreloc_offset = 0;
7479 if (sec && sec->reloc)
7480 oldreloc_offset = sec->reloc->data_offset;
7481 decl_initializer(type, sec, addr, DIF_FIRST);
7482 if (sec && sec->reloc)
7483 squeeze_multi_relocs(sec, oldreloc_offset);
7484 /* patch flexible array member size back to -1, */
7485 /* for possible subsequent similar declarations */
7486 if (flexible_array)
7487 flexible_array->type.ref->c = -1;
7490 no_alloc:
7491 /* restore parse state if needed */
7492 if (init_str) {
7493 end_macro();
7494 next();
7497 nocode_wanted = saved_nocode_wanted;
7500 /* parse a function defined by symbol 'sym' and generate its code in
7501 'cur_text_section' */
7502 static void gen_function(Sym *sym)
7504 /* Initialize VLA state */
7505 struct scope f = { 0 };
7506 cur_scope = root_scope = &f;
7508 nocode_wanted = 0;
7509 ind = cur_text_section->data_offset;
7510 if (sym->a.aligned) {
7511 size_t newoff = section_add(cur_text_section, 0,
7512 1 << (sym->a.aligned - 1));
7513 gen_fill_nops(newoff - ind);
7515 /* NOTE: we patch the symbol size later */
7516 put_extern_sym(sym, cur_text_section, ind, 0);
7518 funcname = get_tok_str(sym->v, NULL);
7519 func_ind = ind;
7521 /* put debug symbol */
7522 tcc_debug_funcstart(tcc_state, sym);
7523 /* push a dummy symbol to enable local sym storage */
7524 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7525 local_scope = 1; /* for function parameters */
7526 gfunc_prolog(&sym->type);
7527 local_scope = 0;
7528 rsym = 0;
7529 clear_temp_local_var_list();
7530 block(0);
7531 gsym(rsym);
7532 nocode_wanted = 0;
7533 gfunc_epilog();
7534 cur_text_section->data_offset = ind;
7535 /* reset local stack */
7536 sym_pop(&local_stack, NULL, 0);
7537 local_scope = 0;
7538 label_pop(&global_label_stack, NULL, 0);
7539 sym_pop(&all_cleanups, NULL, 0);
7540 /* patch symbol size */
7541 elfsym(sym)->st_size = ind - func_ind;
7542 /* end of function */
7543 tcc_debug_funcend(tcc_state, ind - func_ind);
7544 /* It's better to crash than to generate wrong code */
7545 cur_text_section = NULL;
7546 funcname = ""; /* for safety */
7547 func_vt.t = VT_VOID; /* for safety */
7548 func_var = 0; /* for safety */
7549 ind = 0; /* for safety */
7550 nocode_wanted = 0x80000000;
7551 check_vstack();
7554 static void gen_inline_functions(TCCState *s)
7556 Sym *sym;
7557 int inline_generated, i;
7558 struct InlineFunc *fn;
7560 tcc_open_bf(s, ":inline:", 0);
7561 /* iterate while inline function are referenced */
7562 do {
7563 inline_generated = 0;
7564 for (i = 0; i < s->nb_inline_fns; ++i) {
7565 fn = s->inline_fns[i];
7566 sym = fn->sym;
7567 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7568 /* the function was used or forced (and then not internal):
7569 generate its code and convert it to a normal function */
7570 fn->sym = NULL;
7571 if (file)
7572 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7573 begin_macro(fn->func_str, 1);
7574 next();
7575 cur_text_section = text_section;
7576 gen_function(sym);
7577 end_macro();
7579 inline_generated = 1;
7582 } while (inline_generated);
7583 tcc_close();
7586 ST_FUNC void free_inline_functions(TCCState *s)
7588 int i;
7589 /* free tokens of unused inline functions */
7590 for (i = 0; i < s->nb_inline_fns; ++i) {
7591 struct InlineFunc *fn = s->inline_fns[i];
7592 if (fn->sym)
7593 tok_str_free(fn->func_str);
7595 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7598 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7599 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7600 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7602 int v, has_init, r;
7603 CType type, btype;
7604 Sym *sym;
7605 AttributeDef ad, adbase;
7607 while (1) {
7608 if (tok == TOK_STATIC_ASSERT) {
7609 int c;
7611 next();
7612 skip('(');
7613 c = expr_const();
7614 skip(',');
7615 if (c == 0)
7616 tcc_error("%s", get_tok_str(tok, &tokc));
7617 next();
7618 skip(')');
7619 skip(';');
7620 continue;
7622 if (!parse_btype(&btype, &adbase)) {
7623 if (is_for_loop_init)
7624 return 0;
7625 /* skip redundant ';' if not in old parameter decl scope */
7626 if (tok == ';' && l != VT_CMP) {
7627 next();
7628 continue;
7630 if (l != VT_CONST)
7631 break;
7632 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7633 /* global asm block */
7634 asm_global_instr();
7635 continue;
7637 if (tok >= TOK_UIDENT) {
7638 /* special test for old K&R protos without explicit int
7639 type. Only accepted when defining global data */
7640 btype.t = VT_INT;
7641 } else {
7642 if (tok != TOK_EOF)
7643 expect("declaration");
7644 break;
7647 if (tok == ';') {
7648 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7649 int v = btype.ref->v;
7650 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7651 tcc_warning("unnamed struct/union that defines no instances");
7652 next();
7653 continue;
7655 if (IS_ENUM(btype.t)) {
7656 next();
7657 continue;
7660 while (1) { /* iterate thru each declaration */
7661 type = btype;
7662 /* If the base type itself was an array type of unspecified
7663 size (like in 'typedef int arr[]; arr x = {1};') then
7664 we will overwrite the unknown size by the real one for
7665 this decl. We need to unshare the ref symbol holding
7666 that size. */
7667 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7668 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7670 ad = adbase;
7671 type_decl(&type, &ad, &v, TYPE_DIRECT);
7672 #if 0
7674 char buf[500];
7675 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7676 printf("type = '%s'\n", buf);
7678 #endif
7679 if ((type.t & VT_BTYPE) == VT_FUNC) {
7680 /* if old style function prototype, we accept a
7681 declaration list */
7682 sym = type.ref;
7683 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7684 decl0(VT_CMP, 0, sym);
7685 /* always compile 'extern inline' */
7686 if (type.t & VT_EXTERN)
7687 type.t &= ~VT_INLINE;
7690 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7691 ad.asm_label = asm_label_instr();
7692 /* parse one last attribute list, after asm label */
7693 parse_attribute(&ad);
7694 #if 0
7695 /* gcc does not allow __asm__("label") with function definition,
7696 but why not ... */
7697 if (tok == '{')
7698 expect(";");
7699 #endif
7702 #ifdef TCC_TARGET_PE
7703 if (ad.a.dllimport || ad.a.dllexport) {
7704 if (type.t & VT_STATIC)
7705 tcc_error("cannot have dll linkage with static");
7706 if (type.t & VT_TYPEDEF) {
7707 tcc_warning("'%s' attribute ignored for typedef",
7708 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7709 (ad.a.dllexport = 0, "dllexport"));
7710 } else if (ad.a.dllimport) {
7711 if ((type.t & VT_BTYPE) == VT_FUNC)
7712 ad.a.dllimport = 0;
7713 else
7714 type.t |= VT_EXTERN;
7717 #endif
7718 if (tok == '{') {
7719 if (l != VT_CONST)
7720 tcc_error("cannot use local functions");
7721 if ((type.t & VT_BTYPE) != VT_FUNC)
7722 expect("function definition");
7724 /* reject abstract declarators in function definition
7725 make old style params without decl have int type */
7726 sym = type.ref;
7727 while ((sym = sym->next) != NULL) {
7728 if (!(sym->v & ~SYM_FIELD))
7729 expect("identifier");
7730 if (sym->type.t == VT_VOID)
7731 sym->type = int_type;
7734 /* put function symbol */
7735 type.t &= ~VT_EXTERN;
7736 sym = external_sym(v, &type, 0, &ad);
7737 /* static inline functions are just recorded as a kind
7738 of macro. Their code will be emitted at the end of
7739 the compilation unit only if they are used */
7740 if (sym->type.t & VT_INLINE) {
7741 struct InlineFunc *fn;
7742 const char *filename;
7744 filename = file ? file->filename : "";
7745 fn = tcc_malloc(sizeof *fn + strlen(filename));
7746 strcpy(fn->filename, filename);
7747 fn->sym = sym;
7748 skip_or_save_block(&fn->func_str);
7749 dynarray_add(&tcc_state->inline_fns,
7750 &tcc_state->nb_inline_fns, fn);
7751 } else {
7752 /* compute text section */
7753 cur_text_section = ad.section;
7754 if (!cur_text_section)
7755 cur_text_section = text_section;
7756 gen_function(sym);
7758 break;
7759 } else {
7760 if (l == VT_CMP) {
7761 /* find parameter in function parameter list */
7762 for (sym = func_sym->next; sym; sym = sym->next)
7763 if ((sym->v & ~SYM_FIELD) == v)
7764 goto found;
7765 tcc_error("declaration for parameter '%s' but no such parameter",
7766 get_tok_str(v, NULL));
7767 found:
7768 if (type.t & VT_STORAGE) /* 'register' is okay */
7769 tcc_error("storage class specified for '%s'",
7770 get_tok_str(v, NULL));
7771 if (sym->type.t != VT_VOID)
7772 tcc_error("redefinition of parameter '%s'",
7773 get_tok_str(v, NULL));
7774 convert_parameter_type(&type);
7775 sym->type = type;
7776 } else if (type.t & VT_TYPEDEF) {
7777 /* save typedefed type */
7778 /* XXX: test storage specifiers ? */
7779 sym = sym_find(v);
7780 if (sym && sym->sym_scope == local_scope) {
7781 if (!is_compatible_types(&sym->type, &type)
7782 || !(sym->type.t & VT_TYPEDEF))
7783 tcc_error("incompatible redefinition of '%s'",
7784 get_tok_str(v, NULL));
7785 sym->type = type;
7786 } else {
7787 sym = sym_push(v, &type, 0, 0);
7789 sym->a = ad.a;
7790 sym->f = ad.f;
7791 } else if ((type.t & VT_BTYPE) == VT_VOID
7792 && !(type.t & VT_EXTERN)) {
7793 tcc_error("declaration of void object");
7794 } else {
7795 r = 0;
7796 if ((type.t & VT_BTYPE) == VT_FUNC) {
7797 /* external function definition */
7798 /* specific case for func_call attribute */
7799 type.ref->f = ad.f;
7800 } else if (!(type.t & VT_ARRAY)) {
7801 /* not lvalue if array */
7802 r |= lvalue_type(type.t);
7804 has_init = (tok == '=');
7805 if (has_init && (type.t & VT_VLA))
7806 tcc_error("variable length array cannot be initialized");
7807 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7808 || (type.t & VT_BTYPE) == VT_FUNC
7809 /* as with GCC, uninitialized global arrays with no size
7810 are considered extern: */
7811 || ((type.t & VT_ARRAY) && !has_init
7812 && l == VT_CONST && type.ref->c < 0)
7814 /* external variable or function */
7815 type.t |= VT_EXTERN;
7816 sym = external_sym(v, &type, r, &ad);
7817 if (ad.alias_target) {
7818 ElfSym *esym;
7819 Sym *alias_target;
7820 alias_target = sym_find(ad.alias_target);
7821 esym = elfsym(alias_target);
7822 if (!esym)
7823 tcc_error("unsupported forward __alias__ attribute");
7824 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7826 } else {
7827 if (type.t & VT_STATIC)
7828 r |= VT_CONST;
7829 else
7830 r |= l;
7831 if (has_init)
7832 next();
7833 else if (l == VT_CONST)
7834 /* uninitialized global variables may be overridden */
7835 type.t |= VT_EXTERN;
7836 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7839 if (tok != ',') {
7840 if (is_for_loop_init)
7841 return 1;
7842 skip(';');
7843 break;
7845 next();
7849 return 0;
7852 static void decl(int l)
7854 decl0(l, 0, NULL);
7857 /* ------------------------------------------------------------------------- */
7858 #undef gjmp_addr
7859 #undef gjmp
7860 /* ------------------------------------------------------------------------- */