Fix invalid size with GNU designated initializers
[tinycc.git] / tccgen.c
blobb7e0fcfc65797b0c97d21f4213995d5bfb9abc80
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r != VT_CMP) /* must be VT_CONST then */
879 vset_VT_CMP(vtop->c.i != 0);
881 p = inv ? &vtop->jfalse : &vtop->jtrue;
882 *p = gjmp_append(*p, t);
885 /* Generate value test
887 * Generate a test for any value (jump, comparison and integers) */
888 static int gvtst(int inv, int t)
890 int op, u, x;
892 gvtst_set(inv, t);
894 t = vtop->jtrue, u = vtop->jfalse;
895 if (inv)
896 x = u, u = t, t = x;
897 op = vtop->cmp_op;
899 /* jump to the wanted target */
900 if (op > 1)
901 t = gjmp_cond(op ^ inv, t);
902 else if (op != inv)
903 t = gjmp(t);
904 /* resolve complementary jumps to here */
905 gsym(u);
907 vtop--;
908 return t;
911 /* ------------------------------------------------------------------------- */
912 /* push a symbol value of TYPE */
913 static inline void vpushsym(CType *type, Sym *sym)
915 CValue cval;
916 cval.i = 0;
917 vsetc(type, VT_CONST | VT_SYM, &cval);
918 vtop->sym = sym;
921 /* Return a static symbol pointing to a section */
922 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
924 int v;
925 Sym *sym;
927 v = anon_sym++;
928 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
929 sym->type.t |= VT_STATIC;
930 put_extern_sym(sym, sec, offset, size);
931 return sym;
934 /* push a reference to a section offset by adding a dummy symbol */
935 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
937 vpushsym(type, get_sym_ref(type, sec, offset, size));
940 /* define a new external reference to a symbol 'v' of type 'u' */
941 ST_FUNC Sym *external_global_sym(int v, CType *type)
943 Sym *s;
945 s = sym_find(v);
946 if (!s) {
947 /* push forward reference */
948 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
949 s->type.ref = type->ref;
950 } else if (IS_ASM_SYM(s)) {
951 s->type.t = type->t | (s->type.t & VT_EXTERN);
952 s->type.ref = type->ref;
953 update_storage(s);
955 return s;
958 /* Merge symbol attributes. */
959 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
961 if (sa1->aligned && !sa->aligned)
962 sa->aligned = sa1->aligned;
963 sa->packed |= sa1->packed;
964 sa->weak |= sa1->weak;
965 if (sa1->visibility != STV_DEFAULT) {
966 int vis = sa->visibility;
967 if (vis == STV_DEFAULT
968 || vis > sa1->visibility)
969 vis = sa1->visibility;
970 sa->visibility = vis;
972 sa->dllexport |= sa1->dllexport;
973 sa->nodecorate |= sa1->nodecorate;
974 sa->dllimport |= sa1->dllimport;
977 /* Merge function attributes. */
978 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
980 if (fa1->func_call && !fa->func_call)
981 fa->func_call = fa1->func_call;
982 if (fa1->func_type && !fa->func_type)
983 fa->func_type = fa1->func_type;
984 if (fa1->func_args && !fa->func_args)
985 fa->func_args = fa1->func_args;
988 /* Merge attributes. */
989 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
991 merge_symattr(&ad->a, &ad1->a);
992 merge_funcattr(&ad->f, &ad1->f);
994 if (ad1->section)
995 ad->section = ad1->section;
996 if (ad1->alias_target)
997 ad->alias_target = ad1->alias_target;
998 if (ad1->asm_label)
999 ad->asm_label = ad1->asm_label;
1000 if (ad1->attr_mode)
1001 ad->attr_mode = ad1->attr_mode;
1004 /* Merge some type attributes. */
1005 static void patch_type(Sym *sym, CType *type)
1007 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1008 if (!(sym->type.t & VT_EXTERN))
1009 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1010 sym->type.t &= ~VT_EXTERN;
1013 if (IS_ASM_SYM(sym)) {
1014 /* stay static if both are static */
1015 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1016 sym->type.ref = type->ref;
1019 if (!is_compatible_types(&sym->type, type)) {
1020 tcc_error("incompatible types for redefinition of '%s'",
1021 get_tok_str(sym->v, NULL));
1023 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1024 int static_proto = sym->type.t & VT_STATIC;
1025 /* warn if static follows non-static function declaration */
1026 if ((type->t & VT_STATIC) && !static_proto
1027 /* XXX this test for inline shouldn't be here. Until we
1028 implement gnu-inline mode again it silences a warning for
1029 mingw caused by our workarounds. */
1030 && !((type->t | sym->type.t) & VT_INLINE))
1031 tcc_warning("static storage ignored for redefinition of '%s'",
1032 get_tok_str(sym->v, NULL));
1034 /* set 'inline' if both agree or if one has static */
1035 if ((type->t | sym->type.t) & VT_INLINE) {
1036 if (!((type->t ^ sym->type.t) & VT_INLINE)
1037 || ((type->t | sym->type.t) & VT_STATIC))
1038 static_proto |= VT_INLINE;
1041 if (0 == (type->t & VT_EXTERN)) {
1042 /* put complete type, use static from prototype */
1043 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1044 sym->type.ref = type->ref;
1045 } else {
1046 sym->type.t &= ~VT_INLINE | static_proto;
1049 if (sym->type.ref->f.func_type == FUNC_OLD
1050 && type->ref->f.func_type != FUNC_OLD) {
1051 sym->type.ref = type->ref;
1054 } else {
1055 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1056 /* set array size if it was omitted in extern declaration */
1057 sym->type.ref->c = type->ref->c;
1059 if ((type->t ^ sym->type.t) & VT_STATIC)
1060 tcc_warning("storage mismatch for redefinition of '%s'",
1061 get_tok_str(sym->v, NULL));
1065 /* Merge some storage attributes. */
1066 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1068 if (type)
1069 patch_type(sym, type);
1071 #ifdef TCC_TARGET_PE
1072 if (sym->a.dllimport != ad->a.dllimport)
1073 tcc_error("incompatible dll linkage for redefinition of '%s'",
1074 get_tok_str(sym->v, NULL));
1075 #endif
1076 merge_symattr(&sym->a, &ad->a);
1077 if (ad->asm_label)
1078 sym->asm_label = ad->asm_label;
1079 update_storage(sym);
1082 /* copy sym to other stack */
1083 static Sym *sym_copy(Sym *s0, Sym **ps)
1085 Sym *s;
1086 s = sym_malloc(), *s = *s0;
1087 s->prev = *ps, *ps = s;
1088 if (s->v < SYM_FIRST_ANOM) {
1089 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1090 s->prev_tok = *ps, *ps = s;
1092 return s;
1095 /* copy a list of syms */
1096 static void sym_copy_ref(Sym *s0, Sym **ps)
1098 Sym *s, **sp = &s0->type.ref;
1099 for (s = *sp, *sp = NULL; s; s = s->next)
1100 sp = &(*sp = sym_copy(s, ps))->next;
1103 /* define a new external reference to a symbol 'v' */
1104 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1106 Sym *s; int bt;
1108 /* look for global symbol */
1109 s = sym_find(v);
1110 while (s && s->sym_scope)
1111 s = s->prev_tok;
1113 if (!s) {
1114 /* push forward reference */
1115 s = global_identifier_push(v, type->t, 0);
1116 s->r |= r;
1117 s->a = ad->a;
1118 s->asm_label = ad->asm_label;
1119 s->type.ref = type->ref;
1120 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1121 /* copy type to the global stack also */
1122 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1123 sym_copy_ref(s, &global_stack);
1124 } else {
1125 patch_storage(s, ad, type);
1126 bt = s->type.t & VT_BTYPE;
1128 /* push variables to local scope if any */
1129 if (local_stack && bt != VT_FUNC)
1130 s = sym_copy(s, &local_stack);
1131 return s;
1134 /* push a reference to global symbol v */
1135 ST_FUNC void vpush_global_sym(CType *type, int v)
1137 vpushsym(type, external_global_sym(v, type));
1140 /* save registers up to (vtop - n) stack entry */
1141 ST_FUNC void save_regs(int n)
1143 SValue *p, *p1;
1144 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1145 save_reg(p->r);
1148 /* save r to the memory stack, and mark it as being free */
1149 ST_FUNC void save_reg(int r)
1151 save_reg_upstack(r, 0);
1154 /* save r to the memory stack, and mark it as being free,
1155 if seen up to (vtop - n) stack entry */
1156 ST_FUNC void save_reg_upstack(int r, int n)
1158 int l, saved, size, align;
1159 SValue *p, *p1, sv;
1160 CType *type;
1162 if ((r &= VT_VALMASK) >= VT_CONST)
1163 return;
1164 if (nocode_wanted)
1165 return;
1167 /* modify all stack values */
1168 saved = 0;
1169 l = 0;
1170 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1171 if ((p->r & VT_VALMASK) == r ||
1172 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1173 /* must save value on stack if not already done */
1174 if (!saved) {
1175 /* NOTE: must reload 'r' because r might be equal to r2 */
1176 r = p->r & VT_VALMASK;
1177 /* store register in the stack */
1178 type = &p->type;
1179 if ((p->r & VT_LVAL) ||
1180 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1181 #if PTR_SIZE == 8
1182 type = &char_pointer_type;
1183 #else
1184 type = &int_type;
1185 #endif
1186 size = type_size(type, &align);
1187 l=get_temp_local_var(size,align);
1188 sv.type.t = type->t;
1189 sv.r = VT_LOCAL | VT_LVAL;
1190 sv.c.i = l;
1191 store(r, &sv);
1192 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1193 /* x86 specific: need to pop fp register ST0 if saved */
1194 if (r == TREG_ST0) {
1195 o(0xd8dd); /* fstp %st(0) */
1197 #endif
1198 #if PTR_SIZE == 4
1199 /* special long long case */
1200 if ((type->t & VT_BTYPE) == VT_LLONG) {
1201 sv.c.i += 4;
1202 store(p->r2, &sv);
1204 #endif
1205 saved = 1;
1207 /* mark that stack entry as being saved on the stack */
1208 if (p->r & VT_LVAL) {
1209 /* also clear the bounded flag because the
1210 relocation address of the function was stored in
1211 p->c.i */
1212 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1213 } else {
1214 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1216 p->r2 = VT_CONST;
1217 p->c.i = l;
1222 #ifdef TCC_TARGET_ARM
1223 /* find a register of class 'rc2' with at most one reference on stack.
1224 * If none, call get_reg(rc) */
1225 ST_FUNC int get_reg_ex(int rc, int rc2)
1227 int r;
1228 SValue *p;
1230 for(r=0;r<NB_REGS;r++) {
1231 if (reg_classes[r] & rc2) {
1232 int n;
1233 n=0;
1234 for(p = vstack; p <= vtop; p++) {
1235 if ((p->r & VT_VALMASK) == r ||
1236 (p->r2 & VT_VALMASK) == r)
1237 n++;
1239 if (n <= 1)
1240 return r;
1243 return get_reg(rc);
1245 #endif
1247 /* find a free register of class 'rc'. If none, save one register */
1248 ST_FUNC int get_reg(int rc)
1250 int r;
1251 SValue *p;
1253 /* find a free register */
1254 for(r=0;r<NB_REGS;r++) {
1255 if (reg_classes[r] & rc) {
1256 if (nocode_wanted)
1257 return r;
1258 for(p=vstack;p<=vtop;p++) {
1259 if ((p->r & VT_VALMASK) == r ||
1260 (p->r2 & VT_VALMASK) == r)
1261 goto notfound;
1263 return r;
1265 notfound: ;
1268 /* no register left : free the first one on the stack (VERY
1269 IMPORTANT to start from the bottom to ensure that we don't
1270 spill registers used in gen_opi()) */
1271 for(p=vstack;p<=vtop;p++) {
1272 /* look at second register (if long long) */
1273 r = p->r2 & VT_VALMASK;
1274 if (r < VT_CONST && (reg_classes[r] & rc))
1275 goto save_found;
1276 r = p->r & VT_VALMASK;
1277 if (r < VT_CONST && (reg_classes[r] & rc)) {
1278 save_found:
1279 save_reg(r);
1280 return r;
1283 /* Should never comes here */
1284 return -1;
1287 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1288 static int get_temp_local_var(int size,int align){
1289 int i;
1290 struct temp_local_variable *temp_var;
1291 int found_var;
1292 SValue *p;
1293 int r;
1294 char free;
1295 char found;
1296 found=0;
1297 for(i=0;i<nb_temp_local_vars;i++){
1298 temp_var=&arr_temp_local_vars[i];
1299 if(temp_var->size<size||align!=temp_var->align){
1300 continue;
1302 /*check if temp_var is free*/
1303 free=1;
1304 for(p=vstack;p<=vtop;p++) {
1305 r=p->r&VT_VALMASK;
1306 if(r==VT_LOCAL||r==VT_LLOCAL){
1307 if(p->c.i==temp_var->location){
1308 free=0;
1309 break;
1313 if(free){
1314 found_var=temp_var->location;
1315 found=1;
1316 break;
1319 if(!found){
1320 loc = (loc - size) & -align;
1321 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1322 temp_var=&arr_temp_local_vars[i];
1323 temp_var->location=loc;
1324 temp_var->size=size;
1325 temp_var->align=align;
1326 nb_temp_local_vars++;
1328 found_var=loc;
1330 return found_var;
1333 static void clear_temp_local_var_list(){
1334 nb_temp_local_vars=0;
1337 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1338 if needed */
1339 static void move_reg(int r, int s, int t)
1341 SValue sv;
1343 if (r != s) {
1344 save_reg(r);
1345 sv.type.t = t;
1346 sv.type.ref = NULL;
1347 sv.r = s;
1348 sv.c.i = 0;
1349 load(r, &sv);
1353 /* get address of vtop (vtop MUST BE an lvalue) */
1354 ST_FUNC void gaddrof(void)
1356 vtop->r &= ~VT_LVAL;
1357 /* tricky: if saved lvalue, then we can go back to lvalue */
1358 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1359 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1364 #ifdef CONFIG_TCC_BCHECK
1365 /* generate lvalue bound code */
1366 static void gbound(void)
1368 int lval_type;
1369 CType type1;
1371 vtop->r &= ~VT_MUSTBOUND;
1372 /* if lvalue, then use checking code before dereferencing */
1373 if (vtop->r & VT_LVAL) {
1374 /* if not VT_BOUNDED value, then make one */
1375 if (!(vtop->r & VT_BOUNDED)) {
1376 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1377 /* must save type because we must set it to int to get pointer */
1378 type1 = vtop->type;
1379 vtop->type.t = VT_PTR;
1380 gaddrof();
1381 vpushi(0);
1382 gen_bounded_ptr_add();
1383 vtop->r |= lval_type;
1384 vtop->type = type1;
1386 /* then check for dereferencing */
1387 gen_bounded_ptr_deref();
1390 #endif
1392 static void incr_bf_adr(int o)
1394 vtop->type = char_pointer_type;
1395 gaddrof();
1396 vpushi(o);
1397 gen_op('+');
1398 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1399 | (VT_BYTE|VT_UNSIGNED);
1400 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1401 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1404 /* single-byte load mode for packed or otherwise unaligned bitfields */
1405 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1407 int n, o, bits;
1408 save_reg_upstack(vtop->r, 1);
1409 vpush64(type->t & VT_BTYPE, 0); // B X
1410 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1411 do {
1412 vswap(); // X B
1413 incr_bf_adr(o);
1414 vdup(); // X B B
1415 n = 8 - bit_pos;
1416 if (n > bit_size)
1417 n = bit_size;
1418 if (bit_pos)
1419 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1420 if (n < 8)
1421 vpushi((1 << n) - 1), gen_op('&');
1422 gen_cast(type);
1423 if (bits)
1424 vpushi(bits), gen_op(TOK_SHL);
1425 vrotb(3); // B Y X
1426 gen_op('|'); // B X
1427 bits += n, bit_size -= n, o = 1;
1428 } while (bit_size);
1429 vswap(), vpop();
1430 if (!(type->t & VT_UNSIGNED)) {
1431 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1432 vpushi(n), gen_op(TOK_SHL);
1433 vpushi(n), gen_op(TOK_SAR);
1437 /* single-byte store mode for packed or otherwise unaligned bitfields */
1438 static void store_packed_bf(int bit_pos, int bit_size)
1440 int bits, n, o, m, c;
1442 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1443 vswap(); // X B
1444 save_reg_upstack(vtop->r, 1);
1445 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1446 do {
1447 incr_bf_adr(o); // X B
1448 vswap(); //B X
1449 c ? vdup() : gv_dup(); // B V X
1450 vrott(3); // X B V
1451 if (bits)
1452 vpushi(bits), gen_op(TOK_SHR);
1453 if (bit_pos)
1454 vpushi(bit_pos), gen_op(TOK_SHL);
1455 n = 8 - bit_pos;
1456 if (n > bit_size)
1457 n = bit_size;
1458 if (n < 8) {
1459 m = ((1 << n) - 1) << bit_pos;
1460 vpushi(m), gen_op('&'); // X B V1
1461 vpushv(vtop-1); // X B V1 B
1462 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1463 gen_op('&'); // X B V1 B1
1464 gen_op('|'); // X B V2
1466 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1467 vstore(), vpop(); // X B
1468 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1469 } while (bit_size);
1470 vpop(), vpop();
1473 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1475 int t;
1476 if (0 == sv->type.ref)
1477 return 0;
1478 t = sv->type.ref->auxtype;
1479 if (t != -1 && t != VT_STRUCT) {
1480 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1481 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1483 return t;
1486 /* store vtop a register belonging to class 'rc'. lvalues are
1487 converted to values. Cannot be used if cannot be converted to
1488 register value (such as structures). */
1489 ST_FUNC int gv(int rc)
1491 int r, bit_pos, bit_size, size, align, rc2;
1493 /* NOTE: get_reg can modify vstack[] */
1494 if (vtop->type.t & VT_BITFIELD) {
1495 CType type;
1497 bit_pos = BIT_POS(vtop->type.t);
1498 bit_size = BIT_SIZE(vtop->type.t);
1499 /* remove bit field info to avoid loops */
1500 vtop->type.t &= ~VT_STRUCT_MASK;
1502 type.ref = NULL;
1503 type.t = vtop->type.t & VT_UNSIGNED;
1504 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1505 type.t |= VT_UNSIGNED;
1507 r = adjust_bf(vtop, bit_pos, bit_size);
1509 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1510 type.t |= VT_LLONG;
1511 else
1512 type.t |= VT_INT;
1514 if (r == VT_STRUCT) {
1515 load_packed_bf(&type, bit_pos, bit_size);
1516 } else {
1517 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1518 /* cast to int to propagate signedness in following ops */
1519 gen_cast(&type);
1520 /* generate shifts */
1521 vpushi(bits - (bit_pos + bit_size));
1522 gen_op(TOK_SHL);
1523 vpushi(bits - bit_size);
1524 /* NOTE: transformed to SHR if unsigned */
1525 gen_op(TOK_SAR);
1527 r = gv(rc);
1528 } else {
1529 if (is_float(vtop->type.t) &&
1530 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1531 unsigned long offset;
1532 /* CPUs usually cannot use float constants, so we store them
1533 generically in data segment */
1534 size = type_size(&vtop->type, &align);
1535 if (NODATA_WANTED)
1536 size = 0, align = 1;
1537 offset = section_add(data_section, size, align);
1538 vpush_ref(&vtop->type, data_section, offset, size);
1539 vswap();
1540 init_putv(&vtop->type, data_section, offset);
1541 vtop->r |= VT_LVAL;
1543 #ifdef CONFIG_TCC_BCHECK
1544 if (vtop->r & VT_MUSTBOUND)
1545 gbound();
1546 #endif
1548 r = vtop->r & VT_VALMASK;
1549 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1550 #ifndef TCC_TARGET_ARM64
1551 if (rc == RC_IRET)
1552 rc2 = RC_LRET;
1553 #ifdef TCC_TARGET_X86_64
1554 else if (rc == RC_FRET)
1555 rc2 = RC_QRET;
1556 #endif
1557 #endif
1558 /* need to reload if:
1559 - constant
1560 - lvalue (need to dereference pointer)
1561 - already a register, but not in the right class */
1562 if (r >= VT_CONST
1563 || (vtop->r & VT_LVAL)
1564 || !(reg_classes[r] & rc)
1565 #if PTR_SIZE == 8
1566 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1567 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1568 #else
1569 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1570 #endif
1573 r = get_reg(rc);
1574 #if PTR_SIZE == 8
1575 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1576 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1577 #else
1578 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1579 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1580 unsigned long long ll;
1581 #endif
1582 int r2, original_type;
1583 original_type = vtop->type.t;
1584 /* two register type load : expand to two words
1585 temporarily */
1586 #if PTR_SIZE == 4
1587 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1588 /* load constant */
1589 ll = vtop->c.i;
1590 vtop->c.i = ll; /* first word */
1591 load(r, vtop);
1592 vtop->r = r; /* save register value */
1593 vpushi(ll >> 32); /* second word */
1594 } else
1595 #endif
1596 if (vtop->r & VT_LVAL) {
1597 /* We do not want to modifier the long long
1598 pointer here, so the safest (and less
1599 efficient) is to save all the other registers
1600 in the stack. XXX: totally inefficient. */
1601 #if 0
1602 save_regs(1);
1603 #else
1604 /* lvalue_save: save only if used further down the stack */
1605 save_reg_upstack(vtop->r, 1);
1606 #endif
1607 /* load from memory */
1608 vtop->type.t = load_type;
1609 load(r, vtop);
1610 vdup();
1611 vtop[-1].r = r; /* save register value */
1612 /* increment pointer to get second word */
1613 vtop->type.t = addr_type;
1614 gaddrof();
1615 vpushi(load_size);
1616 gen_op('+');
1617 vtop->r |= VT_LVAL;
1618 vtop->type.t = load_type;
1619 } else {
1620 /* move registers */
1621 load(r, vtop);
1622 vdup();
1623 vtop[-1].r = r; /* save register value */
1624 vtop->r = vtop[-1].r2;
1626 /* Allocate second register. Here we rely on the fact that
1627 get_reg() tries first to free r2 of an SValue. */
1628 r2 = get_reg(rc2);
1629 load(r2, vtop);
1630 vpop();
1631 /* write second register */
1632 vtop->r2 = r2;
1633 vtop->type.t = original_type;
1634 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1635 int t1, t;
1636 /* lvalue of scalar type : need to use lvalue type
1637 because of possible cast */
1638 t = vtop->type.t;
1639 t1 = t;
1640 /* compute memory access type */
1641 if (vtop->r & VT_LVAL_BYTE)
1642 t = VT_BYTE;
1643 else if (vtop->r & VT_LVAL_SHORT)
1644 t = VT_SHORT;
1645 if (vtop->r & VT_LVAL_UNSIGNED)
1646 t |= VT_UNSIGNED;
1647 vtop->type.t = t;
1648 load(r, vtop);
1649 /* restore wanted type */
1650 vtop->type.t = t1;
1651 } else {
1652 if (vtop->r == VT_CMP)
1653 vset_VT_JMP();
1654 /* one register type load */
1655 load(r, vtop);
1658 vtop->r = r;
1659 #ifdef TCC_TARGET_C67
1660 /* uses register pairs for doubles */
1661 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1662 vtop->r2 = r+1;
1663 #endif
1665 return r;
1668 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1669 ST_FUNC void gv2(int rc1, int rc2)
1671 /* generate more generic register first. But VT_JMP or VT_CMP
1672 values must be generated first in all cases to avoid possible
1673 reload errors */
1674 if (vtop->r != VT_CMP && rc1 <= rc2) {
1675 vswap();
1676 gv(rc1);
1677 vswap();
1678 gv(rc2);
1679 /* test if reload is needed for first register */
1680 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1681 vswap();
1682 gv(rc1);
1683 vswap();
1685 } else {
1686 gv(rc2);
1687 vswap();
1688 gv(rc1);
1689 vswap();
1690 /* test if reload is needed for first register */
1691 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1692 gv(rc2);
1697 #ifndef TCC_TARGET_ARM64
1698 /* wrapper around RC_FRET to return a register by type */
1699 static int rc_fret(int t)
1701 #ifdef TCC_TARGET_X86_64
1702 if (t == VT_LDOUBLE) {
1703 return RC_ST0;
1705 #endif
1706 return RC_FRET;
1708 #endif
1710 /* wrapper around REG_FRET to return a register by type */
1711 static int reg_fret(int t)
1713 #ifdef TCC_TARGET_X86_64
1714 if (t == VT_LDOUBLE) {
1715 return TREG_ST0;
1717 #endif
1718 return REG_FRET;
1721 #if PTR_SIZE == 4
1722 /* expand 64bit on stack in two ints */
1723 ST_FUNC void lexpand(void)
1725 int u, v;
1726 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1727 v = vtop->r & (VT_VALMASK | VT_LVAL);
1728 if (v == VT_CONST) {
1729 vdup();
1730 vtop[0].c.i >>= 32;
1731 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1732 vdup();
1733 vtop[0].c.i += 4;
1734 } else {
1735 gv(RC_INT);
1736 vdup();
1737 vtop[0].r = vtop[-1].r2;
1738 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1740 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1742 #endif
1744 #if PTR_SIZE == 4
1745 /* build a long long from two ints */
1746 static void lbuild(int t)
1748 gv2(RC_INT, RC_INT);
1749 vtop[-1].r2 = vtop[0].r;
1750 vtop[-1].type.t = t;
1751 vpop();
1753 #endif
1755 /* convert stack entry to register and duplicate its value in another
1756 register */
1757 static void gv_dup(void)
1759 int rc, t, r, r1;
1760 SValue sv;
1762 t = vtop->type.t;
1763 #if PTR_SIZE == 4
1764 if ((t & VT_BTYPE) == VT_LLONG) {
1765 if (t & VT_BITFIELD) {
1766 gv(RC_INT);
1767 t = vtop->type.t;
1769 lexpand();
1770 gv_dup();
1771 vswap();
1772 vrotb(3);
1773 gv_dup();
1774 vrotb(4);
1775 /* stack: H L L1 H1 */
1776 lbuild(t);
1777 vrotb(3);
1778 vrotb(3);
1779 vswap();
1780 lbuild(t);
1781 vswap();
1782 } else
1783 #endif
1785 /* duplicate value */
1786 rc = RC_INT;
1787 sv.type.t = VT_INT;
1788 if (is_float(t)) {
1789 rc = RC_FLOAT;
1790 #ifdef TCC_TARGET_X86_64
1791 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1792 rc = RC_ST0;
1794 #endif
1795 sv.type.t = t;
1797 r = gv(rc);
1798 r1 = get_reg(rc);
1799 sv.r = r;
1800 sv.c.i = 0;
1801 load(r1, &sv); /* move r to r1 */
1802 vdup();
1803 /* duplicates value */
1804 if (r != r1)
1805 vtop->r = r1;
1809 #if PTR_SIZE == 4
1810 /* generate CPU independent (unsigned) long long operations */
1811 static void gen_opl(int op)
1813 int t, a, b, op1, c, i;
1814 int func;
1815 unsigned short reg_iret = REG_IRET;
1816 unsigned short reg_lret = REG_LRET;
1817 SValue tmp;
1819 switch(op) {
1820 case '/':
1821 case TOK_PDIV:
1822 func = TOK___divdi3;
1823 goto gen_func;
1824 case TOK_UDIV:
1825 func = TOK___udivdi3;
1826 goto gen_func;
1827 case '%':
1828 func = TOK___moddi3;
1829 goto gen_mod_func;
1830 case TOK_UMOD:
1831 func = TOK___umoddi3;
1832 gen_mod_func:
1833 #ifdef TCC_ARM_EABI
1834 reg_iret = TREG_R2;
1835 reg_lret = TREG_R3;
1836 #endif
1837 gen_func:
1838 /* call generic long long function */
1839 vpush_global_sym(&func_old_type, func);
1840 vrott(3);
1841 gfunc_call(2);
1842 vpushi(0);
1843 vtop->r = reg_iret;
1844 vtop->r2 = reg_lret;
1845 break;
1846 case '^':
1847 case '&':
1848 case '|':
1849 case '*':
1850 case '+':
1851 case '-':
1852 //pv("gen_opl A",0,2);
1853 t = vtop->type.t;
1854 vswap();
1855 lexpand();
1856 vrotb(3);
1857 lexpand();
1858 /* stack: L1 H1 L2 H2 */
1859 tmp = vtop[0];
1860 vtop[0] = vtop[-3];
1861 vtop[-3] = tmp;
1862 tmp = vtop[-2];
1863 vtop[-2] = vtop[-3];
1864 vtop[-3] = tmp;
1865 vswap();
1866 /* stack: H1 H2 L1 L2 */
1867 //pv("gen_opl B",0,4);
1868 if (op == '*') {
1869 vpushv(vtop - 1);
1870 vpushv(vtop - 1);
1871 gen_op(TOK_UMULL);
1872 lexpand();
1873 /* stack: H1 H2 L1 L2 ML MH */
1874 for(i=0;i<4;i++)
1875 vrotb(6);
1876 /* stack: ML MH H1 H2 L1 L2 */
1877 tmp = vtop[0];
1878 vtop[0] = vtop[-2];
1879 vtop[-2] = tmp;
1880 /* stack: ML MH H1 L2 H2 L1 */
1881 gen_op('*');
1882 vrotb(3);
1883 vrotb(3);
1884 gen_op('*');
1885 /* stack: ML MH M1 M2 */
1886 gen_op('+');
1887 gen_op('+');
1888 } else if (op == '+' || op == '-') {
1889 /* XXX: add non carry method too (for MIPS or alpha) */
1890 if (op == '+')
1891 op1 = TOK_ADDC1;
1892 else
1893 op1 = TOK_SUBC1;
1894 gen_op(op1);
1895 /* stack: H1 H2 (L1 op L2) */
1896 vrotb(3);
1897 vrotb(3);
1898 gen_op(op1 + 1); /* TOK_xxxC2 */
1899 } else {
1900 gen_op(op);
1901 /* stack: H1 H2 (L1 op L2) */
1902 vrotb(3);
1903 vrotb(3);
1904 /* stack: (L1 op L2) H1 H2 */
1905 gen_op(op);
1906 /* stack: (L1 op L2) (H1 op H2) */
1908 /* stack: L H */
1909 lbuild(t);
1910 break;
1911 case TOK_SAR:
1912 case TOK_SHR:
1913 case TOK_SHL:
1914 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1915 t = vtop[-1].type.t;
1916 vswap();
1917 lexpand();
1918 vrotb(3);
1919 /* stack: L H shift */
1920 c = (int)vtop->c.i;
1921 /* constant: simpler */
1922 /* NOTE: all comments are for SHL. the other cases are
1923 done by swapping words */
1924 vpop();
1925 if (op != TOK_SHL)
1926 vswap();
1927 if (c >= 32) {
1928 /* stack: L H */
1929 vpop();
1930 if (c > 32) {
1931 vpushi(c - 32);
1932 gen_op(op);
1934 if (op != TOK_SAR) {
1935 vpushi(0);
1936 } else {
1937 gv_dup();
1938 vpushi(31);
1939 gen_op(TOK_SAR);
1941 vswap();
1942 } else {
1943 vswap();
1944 gv_dup();
1945 /* stack: H L L */
1946 vpushi(c);
1947 gen_op(op);
1948 vswap();
1949 vpushi(32 - c);
1950 if (op == TOK_SHL)
1951 gen_op(TOK_SHR);
1952 else
1953 gen_op(TOK_SHL);
1954 vrotb(3);
1955 /* stack: L L H */
1956 vpushi(c);
1957 if (op == TOK_SHL)
1958 gen_op(TOK_SHL);
1959 else
1960 gen_op(TOK_SHR);
1961 gen_op('|');
1963 if (op != TOK_SHL)
1964 vswap();
1965 lbuild(t);
1966 } else {
1967 /* XXX: should provide a faster fallback on x86 ? */
1968 switch(op) {
1969 case TOK_SAR:
1970 func = TOK___ashrdi3;
1971 goto gen_func;
1972 case TOK_SHR:
1973 func = TOK___lshrdi3;
1974 goto gen_func;
1975 case TOK_SHL:
1976 func = TOK___ashldi3;
1977 goto gen_func;
1980 break;
1981 default:
1982 /* compare operations */
1983 t = vtop->type.t;
1984 vswap();
1985 lexpand();
1986 vrotb(3);
1987 lexpand();
1988 /* stack: L1 H1 L2 H2 */
1989 tmp = vtop[-1];
1990 vtop[-1] = vtop[-2];
1991 vtop[-2] = tmp;
1992 /* stack: L1 L2 H1 H2 */
1993 save_regs(4);
1994 /* compare high */
1995 op1 = op;
1996 /* when values are equal, we need to compare low words. since
1997 the jump is inverted, we invert the test too. */
1998 if (op1 == TOK_LT)
1999 op1 = TOK_LE;
2000 else if (op1 == TOK_GT)
2001 op1 = TOK_GE;
2002 else if (op1 == TOK_ULT)
2003 op1 = TOK_ULE;
2004 else if (op1 == TOK_UGT)
2005 op1 = TOK_UGE;
2006 a = 0;
2007 b = 0;
2008 gen_op(op1);
2009 if (op == TOK_NE) {
2010 b = gvtst(0, 0);
2011 } else {
2012 a = gvtst(1, 0);
2013 if (op != TOK_EQ) {
2014 /* generate non equal test */
2015 vpushi(0);
2016 vset_VT_CMP(TOK_NE);
2017 b = gvtst(0, 0);
2020 /* compare low. Always unsigned */
2021 op1 = op;
2022 if (op1 == TOK_LT)
2023 op1 = TOK_ULT;
2024 else if (op1 == TOK_LE)
2025 op1 = TOK_ULE;
2026 else if (op1 == TOK_GT)
2027 op1 = TOK_UGT;
2028 else if (op1 == TOK_GE)
2029 op1 = TOK_UGE;
2030 gen_op(op1);
2031 #if 0//def TCC_TARGET_I386
2032 if (op == TOK_NE) { gsym(b); break; }
2033 if (op == TOK_EQ) { gsym(a); break; }
2034 #endif
2035 gvtst_set(1, a);
2036 gvtst_set(0, b);
2037 break;
2040 #endif
2042 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2044 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2045 return (a ^ b) >> 63 ? -x : x;
2048 static int gen_opic_lt(uint64_t a, uint64_t b)
2050 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2053 /* handle integer constant optimizations and various machine
2054 independent opt */
2055 static void gen_opic(int op)
2057 SValue *v1 = vtop - 1;
2058 SValue *v2 = vtop;
2059 int t1 = v1->type.t & VT_BTYPE;
2060 int t2 = v2->type.t & VT_BTYPE;
2061 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2062 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2063 uint64_t l1 = c1 ? v1->c.i : 0;
2064 uint64_t l2 = c2 ? v2->c.i : 0;
2065 int shm = (t1 == VT_LLONG) ? 63 : 31;
2067 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2068 l1 = ((uint32_t)l1 |
2069 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2070 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2071 l2 = ((uint32_t)l2 |
2072 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2074 if (c1 && c2) {
2075 switch(op) {
2076 case '+': l1 += l2; break;
2077 case '-': l1 -= l2; break;
2078 case '&': l1 &= l2; break;
2079 case '^': l1 ^= l2; break;
2080 case '|': l1 |= l2; break;
2081 case '*': l1 *= l2; break;
2083 case TOK_PDIV:
2084 case '/':
2085 case '%':
2086 case TOK_UDIV:
2087 case TOK_UMOD:
2088 /* if division by zero, generate explicit division */
2089 if (l2 == 0) {
2090 if (const_wanted)
2091 tcc_error("division by zero in constant");
2092 goto general_case;
2094 switch(op) {
2095 default: l1 = gen_opic_sdiv(l1, l2); break;
2096 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2097 case TOK_UDIV: l1 = l1 / l2; break;
2098 case TOK_UMOD: l1 = l1 % l2; break;
2100 break;
2101 case TOK_SHL: l1 <<= (l2 & shm); break;
2102 case TOK_SHR: l1 >>= (l2 & shm); break;
2103 case TOK_SAR:
2104 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2105 break;
2106 /* tests */
2107 case TOK_ULT: l1 = l1 < l2; break;
2108 case TOK_UGE: l1 = l1 >= l2; break;
2109 case TOK_EQ: l1 = l1 == l2; break;
2110 case TOK_NE: l1 = l1 != l2; break;
2111 case TOK_ULE: l1 = l1 <= l2; break;
2112 case TOK_UGT: l1 = l1 > l2; break;
2113 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2114 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2115 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2116 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2117 /* logical */
2118 case TOK_LAND: l1 = l1 && l2; break;
2119 case TOK_LOR: l1 = l1 || l2; break;
2120 default:
2121 goto general_case;
2123 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2124 l1 = ((uint32_t)l1 |
2125 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2126 v1->c.i = l1;
2127 vtop--;
2128 } else {
2129 /* if commutative ops, put c2 as constant */
2130 if (c1 && (op == '+' || op == '&' || op == '^' ||
2131 op == '|' || op == '*')) {
2132 vswap();
2133 c2 = c1; //c = c1, c1 = c2, c2 = c;
2134 l2 = l1; //l = l1, l1 = l2, l2 = l;
2136 if (!const_wanted &&
2137 c1 && ((l1 == 0 &&
2138 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2139 (l1 == -1 && op == TOK_SAR))) {
2140 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2141 vtop--;
2142 } else if (!const_wanted &&
2143 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2144 (op == '|' &&
2145 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2146 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2147 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2148 if (l2 == 1)
2149 vtop->c.i = 0;
2150 vswap();
2151 vtop--;
2152 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2153 op == TOK_PDIV) &&
2154 l2 == 1) ||
2155 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2156 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2157 l2 == 0) ||
2158 (op == '&' &&
2159 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2160 /* filter out NOP operations like x*1, x-0, x&-1... */
2161 vtop--;
2162 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2163 /* try to use shifts instead of muls or divs */
2164 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2165 int n = -1;
2166 while (l2) {
2167 l2 >>= 1;
2168 n++;
2170 vtop->c.i = n;
2171 if (op == '*')
2172 op = TOK_SHL;
2173 else if (op == TOK_PDIV)
2174 op = TOK_SAR;
2175 else
2176 op = TOK_SHR;
2178 goto general_case;
2179 } else if (c2 && (op == '+' || op == '-') &&
2180 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2181 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2182 /* symbol + constant case */
2183 if (op == '-')
2184 l2 = -l2;
2185 l2 += vtop[-1].c.i;
2186 /* The backends can't always deal with addends to symbols
2187 larger than +-1<<31. Don't construct such. */
2188 if ((int)l2 != l2)
2189 goto general_case;
2190 vtop--;
2191 vtop->c.i = l2;
2192 } else {
2193 general_case:
2194 /* call low level op generator */
2195 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2196 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2197 gen_opl(op);
2198 else
2199 gen_opi(op);
2204 /* generate a floating point operation with constant propagation */
2205 static void gen_opif(int op)
2207 int c1, c2;
2208 SValue *v1, *v2;
2209 #if defined _MSC_VER && defined __x86_64__
2210 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2211 volatile
2212 #endif
2213 long double f1, f2;
2215 v1 = vtop - 1;
2216 v2 = vtop;
2217 /* currently, we cannot do computations with forward symbols */
2218 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2219 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2220 if (c1 && c2) {
2221 if (v1->type.t == VT_FLOAT) {
2222 f1 = v1->c.f;
2223 f2 = v2->c.f;
2224 } else if (v1->type.t == VT_DOUBLE) {
2225 f1 = v1->c.d;
2226 f2 = v2->c.d;
2227 } else {
2228 f1 = v1->c.ld;
2229 f2 = v2->c.ld;
2232 /* NOTE: we only do constant propagation if finite number (not
2233 NaN or infinity) (ANSI spec) */
2234 if (!ieee_finite(f1) || !ieee_finite(f2))
2235 goto general_case;
2237 switch(op) {
2238 case '+': f1 += f2; break;
2239 case '-': f1 -= f2; break;
2240 case '*': f1 *= f2; break;
2241 case '/':
2242 if (f2 == 0.0) {
2243 /* If not in initializer we need to potentially generate
2244 FP exceptions at runtime, otherwise we want to fold. */
2245 if (!const_wanted)
2246 goto general_case;
2248 f1 /= f2;
2249 break;
2250 /* XXX: also handles tests ? */
2251 default:
2252 goto general_case;
2254 /* XXX: overflow test ? */
2255 if (v1->type.t == VT_FLOAT) {
2256 v1->c.f = f1;
2257 } else if (v1->type.t == VT_DOUBLE) {
2258 v1->c.d = f1;
2259 } else {
2260 v1->c.ld = f1;
2262 vtop--;
2263 } else {
2264 general_case:
2265 gen_opf(op);
2269 static int pointed_size(CType *type)
2271 int align;
2272 return type_size(pointed_type(type), &align);
2275 static void vla_runtime_pointed_size(CType *type)
2277 int align;
2278 vla_runtime_type_size(pointed_type(type), &align);
2281 static inline int is_null_pointer(SValue *p)
2283 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2284 return 0;
2285 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2286 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2287 ((p->type.t & VT_BTYPE) == VT_PTR &&
2288 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2289 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2290 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2293 static inline int is_integer_btype(int bt)
2295 return (bt == VT_BYTE || bt == VT_SHORT ||
2296 bt == VT_INT || bt == VT_LLONG);
2299 /* check types for comparison or subtraction of pointers */
2300 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2302 CType *type1, *type2, tmp_type1, tmp_type2;
2303 int bt1, bt2;
2305 /* null pointers are accepted for all comparisons as gcc */
2306 if (is_null_pointer(p1) || is_null_pointer(p2))
2307 return;
2308 type1 = &p1->type;
2309 type2 = &p2->type;
2310 bt1 = type1->t & VT_BTYPE;
2311 bt2 = type2->t & VT_BTYPE;
2312 /* accept comparison between pointer and integer with a warning */
2313 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2314 if (op != TOK_LOR && op != TOK_LAND )
2315 tcc_warning("comparison between pointer and integer");
2316 return;
2319 /* both must be pointers or implicit function pointers */
2320 if (bt1 == VT_PTR) {
2321 type1 = pointed_type(type1);
2322 } else if (bt1 != VT_FUNC)
2323 goto invalid_operands;
2325 if (bt2 == VT_PTR) {
2326 type2 = pointed_type(type2);
2327 } else if (bt2 != VT_FUNC) {
2328 invalid_operands:
2329 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2331 if ((type1->t & VT_BTYPE) == VT_VOID ||
2332 (type2->t & VT_BTYPE) == VT_VOID)
2333 return;
2334 tmp_type1 = *type1;
2335 tmp_type2 = *type2;
2336 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2337 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2338 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2339 /* gcc-like error if '-' is used */
2340 if (op == '-')
2341 goto invalid_operands;
2342 else
2343 tcc_warning("comparison of distinct pointer types lacks a cast");
2347 /* generic gen_op: handles types problems */
2348 ST_FUNC void gen_op(int op)
2350 int u, t1, t2, bt1, bt2, t;
2351 CType type1;
2353 redo:
2354 t1 = vtop[-1].type.t;
2355 t2 = vtop[0].type.t;
2356 bt1 = t1 & VT_BTYPE;
2357 bt2 = t2 & VT_BTYPE;
2359 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2360 tcc_error("operation on a struct");
2361 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2362 if (bt2 == VT_FUNC) {
2363 mk_pointer(&vtop->type);
2364 gaddrof();
2366 if (bt1 == VT_FUNC) {
2367 vswap();
2368 mk_pointer(&vtop->type);
2369 gaddrof();
2370 vswap();
2372 goto redo;
2373 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2374 /* at least one operand is a pointer */
2375 /* relational op: must be both pointers */
2376 if (op >= TOK_ULT && op <= TOK_LOR) {
2377 check_comparison_pointer_types(vtop - 1, vtop, op);
2378 /* pointers are handled are unsigned */
2379 #if PTR_SIZE == 8
2380 t = VT_LLONG | VT_UNSIGNED;
2381 #else
2382 t = VT_INT | VT_UNSIGNED;
2383 #endif
2384 goto std_op;
2386 /* if both pointers, then it must be the '-' op */
2387 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2388 if (op != '-')
2389 tcc_error("cannot use pointers here");
2390 check_comparison_pointer_types(vtop - 1, vtop, op);
2391 /* XXX: check that types are compatible */
2392 if (vtop[-1].type.t & VT_VLA) {
2393 vla_runtime_pointed_size(&vtop[-1].type);
2394 } else {
2395 vpushi(pointed_size(&vtop[-1].type));
2397 vrott(3);
2398 gen_opic(op);
2399 vtop->type.t = ptrdiff_type.t;
2400 vswap();
2401 gen_op(TOK_PDIV);
2402 } else {
2403 /* exactly one pointer : must be '+' or '-'. */
2404 if (op != '-' && op != '+')
2405 tcc_error("cannot use pointers here");
2406 /* Put pointer as first operand */
2407 if (bt2 == VT_PTR) {
2408 vswap();
2409 t = t1, t1 = t2, t2 = t;
2411 #if PTR_SIZE == 4
2412 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2413 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2414 gen_cast_s(VT_INT);
2415 #endif
2416 type1 = vtop[-1].type;
2417 type1.t &= ~VT_ARRAY;
2418 if (vtop[-1].type.t & VT_VLA)
2419 vla_runtime_pointed_size(&vtop[-1].type);
2420 else {
2421 u = pointed_size(&vtop[-1].type);
2422 if (u < 0)
2423 tcc_error("unknown array element size");
2424 #if PTR_SIZE == 8
2425 vpushll(u);
2426 #else
2427 /* XXX: cast to int ? (long long case) */
2428 vpushi(u);
2429 #endif
2431 gen_op('*');
2432 #if 0
2433 /* #ifdef CONFIG_TCC_BCHECK
2434 The main reason to removing this code:
2435 #include <stdio.h>
2436 int main ()
2438 int v[10];
2439 int i = 10;
2440 int j = 9;
2441 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2442 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2444 When this code is on. then the output looks like
2445 v+i-j = 0xfffffffe
2446 v+(i-j) = 0xbff84000
2448 /* if evaluating constant expression, no code should be
2449 generated, so no bound check */
2450 if (tcc_state->do_bounds_check && !const_wanted) {
2451 /* if bounded pointers, we generate a special code to
2452 test bounds */
2453 if (op == '-') {
2454 vpushi(0);
2455 vswap();
2456 gen_op('-');
2458 gen_bounded_ptr_add();
2459 } else
2460 #endif
2462 gen_opic(op);
2464 /* put again type if gen_opic() swaped operands */
2465 vtop->type = type1;
2467 } else if (is_float(bt1) || is_float(bt2)) {
2468 /* compute bigger type and do implicit casts */
2469 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2470 t = VT_LDOUBLE;
2471 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2472 t = VT_DOUBLE;
2473 } else {
2474 t = VT_FLOAT;
2476 /* floats can only be used for a few operations */
2477 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2478 (op < TOK_ULT || op > TOK_GT))
2479 tcc_error("invalid operands for binary operation");
2480 goto std_op;
2481 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2482 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2483 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2484 t |= VT_UNSIGNED;
2485 t |= (VT_LONG & t1);
2486 goto std_op;
2487 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2488 /* cast to biggest op */
2489 t = VT_LLONG | VT_LONG;
2490 if (bt1 == VT_LLONG)
2491 t &= t1;
2492 if (bt2 == VT_LLONG)
2493 t &= t2;
2494 /* convert to unsigned if it does not fit in a long long */
2495 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2496 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2497 t |= VT_UNSIGNED;
2498 goto std_op;
2499 } else {
2500 /* integer operations */
2501 t = VT_INT | (VT_LONG & (t1 | t2));
2502 /* convert to unsigned if it does not fit in an integer */
2503 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2504 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2505 t |= VT_UNSIGNED;
2506 std_op:
2507 /* XXX: currently, some unsigned operations are explicit, so
2508 we modify them here */
2509 if (t & VT_UNSIGNED) {
2510 if (op == TOK_SAR)
2511 op = TOK_SHR;
2512 else if (op == '/')
2513 op = TOK_UDIV;
2514 else if (op == '%')
2515 op = TOK_UMOD;
2516 else if (op == TOK_LT)
2517 op = TOK_ULT;
2518 else if (op == TOK_GT)
2519 op = TOK_UGT;
2520 else if (op == TOK_LE)
2521 op = TOK_ULE;
2522 else if (op == TOK_GE)
2523 op = TOK_UGE;
2525 vswap();
2526 type1.t = t;
2527 type1.ref = NULL;
2528 gen_cast(&type1);
2529 vswap();
2530 /* special case for shifts and long long: we keep the shift as
2531 an integer */
2532 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2533 type1.t = VT_INT;
2534 gen_cast(&type1);
2535 if (is_float(t))
2536 gen_opif(op);
2537 else
2538 gen_opic(op);
2539 if (op >= TOK_ULT && op <= TOK_GT) {
2540 /* relational op: the result is an int */
2541 vtop->type.t = VT_INT;
2542 } else {
2543 vtop->type.t = t;
2546 // Make sure that we have converted to an rvalue:
2547 if (vtop->r & VT_LVAL)
2548 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2551 #ifndef TCC_TARGET_ARM
2552 /* generic itof for unsigned long long case */
2553 static void gen_cvt_itof1(int t)
2555 #ifdef TCC_TARGET_ARM64
2556 gen_cvt_itof(t);
2557 #else
2558 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2559 (VT_LLONG | VT_UNSIGNED)) {
2561 if (t == VT_FLOAT)
2562 vpush_global_sym(&func_old_type, TOK___floatundisf);
2563 #if LDOUBLE_SIZE != 8
2564 else if (t == VT_LDOUBLE)
2565 vpush_global_sym(&func_old_type, TOK___floatundixf);
2566 #endif
2567 else
2568 vpush_global_sym(&func_old_type, TOK___floatundidf);
2569 vrott(2);
2570 gfunc_call(1);
2571 vpushi(0);
2572 vtop->r = reg_fret(t);
2573 } else {
2574 gen_cvt_itof(t);
2576 #endif
2578 #endif
2580 /* generic ftoi for unsigned long long case */
2581 static void gen_cvt_ftoi1(int t)
2583 #ifdef TCC_TARGET_ARM64
2584 gen_cvt_ftoi(t);
2585 #else
2586 int st;
2588 if (t == (VT_LLONG | VT_UNSIGNED)) {
2589 /* not handled natively */
2590 st = vtop->type.t & VT_BTYPE;
2591 if (st == VT_FLOAT)
2592 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2593 #if LDOUBLE_SIZE != 8
2594 else if (st == VT_LDOUBLE)
2595 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2596 #endif
2597 else
2598 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2599 vrott(2);
2600 gfunc_call(1);
2601 vpushi(0);
2602 vtop->r = REG_IRET;
2603 vtop->r2 = REG_LRET;
2604 } else {
2605 gen_cvt_ftoi(t);
2607 #endif
2610 /* force char or short cast */
2611 static void force_charshort_cast(int t)
2613 int bits, dbt;
2615 /* cannot cast static initializers */
2616 if (STATIC_DATA_WANTED)
2617 return;
2619 dbt = t & VT_BTYPE;
2620 /* XXX: add optimization if lvalue : just change type and offset */
2621 if (dbt == VT_BYTE)
2622 bits = 8;
2623 else
2624 bits = 16;
2625 if (t & VT_UNSIGNED) {
2626 vpushi((1 << bits) - 1);
2627 gen_op('&');
2628 } else {
2629 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2630 bits = 64 - bits;
2631 else
2632 bits = 32 - bits;
2633 vpushi(bits);
2634 gen_op(TOK_SHL);
2635 /* result must be signed or the SAR is converted to an SHL
2636 This was not the case when "t" was a signed short
2637 and the last value on the stack was an unsigned int */
2638 vtop->type.t &= ~VT_UNSIGNED;
2639 vpushi(bits);
2640 gen_op(TOK_SAR);
2644 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2645 static void gen_cast_s(int t)
2647 CType type;
2648 type.t = t;
2649 type.ref = NULL;
2650 gen_cast(&type);
2653 static void gen_cast(CType *type)
2655 int sbt, dbt, sf, df, c, p;
2657 /* special delayed cast for char/short */
2658 /* XXX: in some cases (multiple cascaded casts), it may still
2659 be incorrect */
2660 if (vtop->r & VT_MUSTCAST) {
2661 vtop->r &= ~VT_MUSTCAST;
2662 force_charshort_cast(vtop->type.t);
2665 /* bitfields first get cast to ints */
2666 if (vtop->type.t & VT_BITFIELD) {
2667 gv(RC_INT);
2670 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2671 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2673 if (sbt != dbt) {
2674 sf = is_float(sbt);
2675 df = is_float(dbt);
2676 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2677 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2678 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2679 c &= dbt != VT_LDOUBLE;
2680 #endif
2681 if (c) {
2682 /* constant case: we can do it now */
2683 /* XXX: in ISOC, cannot do it if error in convert */
2684 if (sbt == VT_FLOAT)
2685 vtop->c.ld = vtop->c.f;
2686 else if (sbt == VT_DOUBLE)
2687 vtop->c.ld = vtop->c.d;
2689 if (df) {
2690 if ((sbt & VT_BTYPE) == VT_LLONG) {
2691 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2692 vtop->c.ld = vtop->c.i;
2693 else
2694 vtop->c.ld = -(long double)-vtop->c.i;
2695 } else if(!sf) {
2696 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2697 vtop->c.ld = (uint32_t)vtop->c.i;
2698 else
2699 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2702 if (dbt == VT_FLOAT)
2703 vtop->c.f = (float)vtop->c.ld;
2704 else if (dbt == VT_DOUBLE)
2705 vtop->c.d = (double)vtop->c.ld;
2706 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2707 vtop->c.i = vtop->c.ld;
2708 } else if (sf && dbt == VT_BOOL) {
2709 vtop->c.i = (vtop->c.ld != 0);
2710 } else {
2711 if(sf)
2712 vtop->c.i = vtop->c.ld;
2713 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2715 else if (sbt & VT_UNSIGNED)
2716 vtop->c.i = (uint32_t)vtop->c.i;
2717 #if PTR_SIZE == 8
2718 else if (sbt == VT_PTR)
2720 #endif
2721 else if (sbt != VT_LLONG)
2722 vtop->c.i = ((uint32_t)vtop->c.i |
2723 -(vtop->c.i & 0x80000000));
2725 if (dbt == (VT_LLONG|VT_UNSIGNED))
2727 else if (dbt == VT_BOOL)
2728 vtop->c.i = (vtop->c.i != 0);
2729 #if PTR_SIZE == 8
2730 else if (dbt == VT_PTR)
2732 #endif
2733 else if (dbt != VT_LLONG) {
2734 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2735 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2736 0xffffffff);
2737 vtop->c.i &= m;
2738 if (!(dbt & VT_UNSIGNED))
2739 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2742 } else if (p && dbt == VT_BOOL) {
2743 vtop->r = VT_CONST;
2744 vtop->c.i = 1;
2745 } else {
2746 /* non constant case: generate code */
2747 if (sf && df) {
2748 /* convert from fp to fp */
2749 gen_cvt_ftof(dbt);
2750 } else if (df) {
2751 /* convert int to fp */
2752 gen_cvt_itof1(dbt);
2753 } else if (sf) {
2754 /* convert fp to int */
2755 if (dbt == VT_BOOL) {
2756 vpushi(0);
2757 gen_op(TOK_NE);
2758 } else {
2759 /* we handle char/short/etc... with generic code */
2760 if (dbt != (VT_INT | VT_UNSIGNED) &&
2761 dbt != (VT_LLONG | VT_UNSIGNED) &&
2762 dbt != VT_LLONG)
2763 dbt = VT_INT;
2764 gen_cvt_ftoi1(dbt);
2765 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2766 /* additional cast for char/short... */
2767 vtop->type.t = dbt;
2768 gen_cast(type);
2771 #if PTR_SIZE == 4
2772 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2773 if ((sbt & VT_BTYPE) != VT_LLONG) {
2774 /* scalar to long long */
2775 /* machine independent conversion */
2776 gv(RC_INT);
2777 /* generate high word */
2778 if (sbt == (VT_INT | VT_UNSIGNED)) {
2779 vpushi(0);
2780 gv(RC_INT);
2781 } else {
2782 if (sbt == VT_PTR) {
2783 /* cast from pointer to int before we apply
2784 shift operation, which pointers don't support*/
2785 gen_cast_s(VT_INT);
2787 gv_dup();
2788 vpushi(31);
2789 gen_op(TOK_SAR);
2791 /* patch second register */
2792 vtop[-1].r2 = vtop->r;
2793 vpop();
2795 #else
2796 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2797 (dbt & VT_BTYPE) == VT_PTR ||
2798 (dbt & VT_BTYPE) == VT_FUNC) {
2799 if ((sbt & VT_BTYPE) != VT_LLONG &&
2800 (sbt & VT_BTYPE) != VT_PTR &&
2801 (sbt & VT_BTYPE) != VT_FUNC) {
2802 /* need to convert from 32bit to 64bit */
2803 gv(RC_INT);
2804 if (sbt != (VT_INT | VT_UNSIGNED)) {
2805 #if defined(TCC_TARGET_ARM64)
2806 gen_cvt_sxtw();
2807 #elif defined(TCC_TARGET_X86_64)
2808 int r = gv(RC_INT);
2809 /* x86_64 specific: movslq */
2810 o(0x6348);
2811 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2812 #else
2813 #error
2814 #endif
2817 #endif
2818 } else if (dbt == VT_BOOL) {
2819 /* scalar to bool */
2820 vpushi(0);
2821 gen_op(TOK_NE);
2822 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2823 (dbt & VT_BTYPE) == VT_SHORT) {
2824 if (sbt == VT_PTR) {
2825 vtop->type.t = VT_INT;
2826 tcc_warning("nonportable conversion from pointer to char/short");
2828 force_charshort_cast(dbt);
2829 } else if ((dbt & VT_BTYPE) == VT_INT) {
2830 /* scalar to int */
2831 if ((sbt & VT_BTYPE) == VT_LLONG) {
2832 #if PTR_SIZE == 4
2833 /* from long long: just take low order word */
2834 lexpand();
2835 vpop();
2836 #else
2837 vpushi(0xffffffff);
2838 vtop->type.t |= VT_UNSIGNED;
2839 gen_op('&');
2840 #endif
2842 /* if lvalue and single word type, nothing to do because
2843 the lvalue already contains the real type size (see
2844 VT_LVAL_xxx constants) */
2847 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2848 /* if we are casting between pointer types,
2849 we must update the VT_LVAL_xxx size */
2850 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2851 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2853 vtop->type = *type;
2854 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2857 /* return type size as known at compile time. Put alignment at 'a' */
2858 ST_FUNC int type_size(CType *type, int *a)
2860 Sym *s;
2861 int bt;
2863 bt = type->t & VT_BTYPE;
2864 if (bt == VT_STRUCT) {
2865 /* struct/union */
2866 s = type->ref;
2867 *a = s->r;
2868 return s->c;
2869 } else if (bt == VT_PTR) {
2870 if (type->t & VT_ARRAY) {
2871 int ts;
2873 s = type->ref;
2874 ts = type_size(&s->type, a);
2876 if (ts < 0 && s->c < 0)
2877 ts = -ts;
2879 return ts * s->c;
2880 } else {
2881 *a = PTR_SIZE;
2882 return PTR_SIZE;
2884 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2885 return -1; /* incomplete enum */
2886 } else if (bt == VT_LDOUBLE) {
2887 *a = LDOUBLE_ALIGN;
2888 return LDOUBLE_SIZE;
2889 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2890 #ifdef TCC_TARGET_I386
2891 #ifdef TCC_TARGET_PE
2892 *a = 8;
2893 #else
2894 *a = 4;
2895 #endif
2896 #elif defined(TCC_TARGET_ARM)
2897 #ifdef TCC_ARM_EABI
2898 *a = 8;
2899 #else
2900 *a = 4;
2901 #endif
2902 #else
2903 *a = 8;
2904 #endif
2905 return 8;
2906 } else if (bt == VT_INT || bt == VT_FLOAT) {
2907 *a = 4;
2908 return 4;
2909 } else if (bt == VT_SHORT) {
2910 *a = 2;
2911 return 2;
2912 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2913 *a = 8;
2914 return 16;
2915 } else {
2916 /* char, void, function, _Bool */
2917 *a = 1;
2918 return 1;
2922 /* push type size as known at runtime time on top of value stack. Put
2923 alignment at 'a' */
2924 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2926 if (type->t & VT_VLA) {
2927 type_size(&type->ref->type, a);
2928 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2929 } else {
2930 vpushi(type_size(type, a));
2934 /* return the pointed type of t */
2935 static inline CType *pointed_type(CType *type)
2937 return &type->ref->type;
2940 /* modify type so that its it is a pointer to type. */
2941 ST_FUNC void mk_pointer(CType *type)
2943 Sym *s;
2944 s = sym_push(SYM_FIELD, type, 0, -1);
2945 type->t = VT_PTR | (type->t & VT_STORAGE);
2946 type->ref = s;
2949 /* compare function types. OLD functions match any new functions */
2950 static int is_compatible_func(CType *type1, CType *type2)
2952 Sym *s1, *s2;
2954 s1 = type1->ref;
2955 s2 = type2->ref;
2956 if (s1->f.func_call != s2->f.func_call)
2957 return 0;
2958 if (s1->f.func_type != s2->f.func_type
2959 && s1->f.func_type != FUNC_OLD
2960 && s2->f.func_type != FUNC_OLD)
2961 return 0;
2962 /* we should check the function return type for FUNC_OLD too
2963 but that causes problems with the internally used support
2964 functions such as TOK_memmove */
2965 if (s1->f.func_type == FUNC_OLD && !s1->next)
2966 return 1;
2967 if (s2->f.func_type == FUNC_OLD && !s2->next)
2968 return 1;
2969 for (;;) {
2970 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2971 return 0;
2972 s1 = s1->next;
2973 s2 = s2->next;
2974 if (!s1)
2975 return !s2;
2976 if (!s2)
2977 return 0;
2981 /* return true if type1 and type2 are the same. If unqualified is
2982 true, qualifiers on the types are ignored.
2984 static int compare_types(CType *type1, CType *type2, int unqualified)
2986 int bt1, t1, t2;
2988 t1 = type1->t & VT_TYPE;
2989 t2 = type2->t & VT_TYPE;
2990 if (unqualified) {
2991 /* strip qualifiers before comparing */
2992 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2993 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2996 /* Default Vs explicit signedness only matters for char */
2997 if ((t1 & VT_BTYPE) != VT_BYTE) {
2998 t1 &= ~VT_DEFSIGN;
2999 t2 &= ~VT_DEFSIGN;
3001 /* XXX: bitfields ? */
3002 if (t1 != t2)
3003 return 0;
3005 if ((t1 & VT_ARRAY)
3006 && !(type1->ref->c < 0
3007 || type2->ref->c < 0
3008 || type1->ref->c == type2->ref->c))
3009 return 0;
3011 /* test more complicated cases */
3012 bt1 = t1 & VT_BTYPE;
3013 if (bt1 == VT_PTR) {
3014 type1 = pointed_type(type1);
3015 type2 = pointed_type(type2);
3016 return is_compatible_types(type1, type2);
3017 } else if (bt1 == VT_STRUCT) {
3018 return (type1->ref == type2->ref);
3019 } else if (bt1 == VT_FUNC) {
3020 return is_compatible_func(type1, type2);
3021 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3022 return type1->ref == type2->ref;
3023 } else {
3024 return 1;
3028 /* return true if type1 and type2 are exactly the same (including
3029 qualifiers).
3031 static int is_compatible_types(CType *type1, CType *type2)
3033 return compare_types(type1,type2,0);
3036 /* return true if type1 and type2 are the same (ignoring qualifiers).
3038 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3040 return compare_types(type1,type2,1);
3043 /* print a type. If 'varstr' is not NULL, then the variable is also
3044 printed in the type */
3045 /* XXX: union */
3046 /* XXX: add array and function pointers */
3047 static void type_to_str(char *buf, int buf_size,
3048 CType *type, const char *varstr)
3050 int bt, v, t;
3051 Sym *s, *sa;
3052 char buf1[256];
3053 const char *tstr;
3055 t = type->t;
3056 bt = t & VT_BTYPE;
3057 buf[0] = '\0';
3059 if (t & VT_EXTERN)
3060 pstrcat(buf, buf_size, "extern ");
3061 if (t & VT_STATIC)
3062 pstrcat(buf, buf_size, "static ");
3063 if (t & VT_TYPEDEF)
3064 pstrcat(buf, buf_size, "typedef ");
3065 if (t & VT_INLINE)
3066 pstrcat(buf, buf_size, "inline ");
3067 if (t & VT_VOLATILE)
3068 pstrcat(buf, buf_size, "volatile ");
3069 if (t & VT_CONSTANT)
3070 pstrcat(buf, buf_size, "const ");
3072 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3073 || ((t & VT_UNSIGNED)
3074 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3075 && !IS_ENUM(t)
3077 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3079 buf_size -= strlen(buf);
3080 buf += strlen(buf);
3082 switch(bt) {
3083 case VT_VOID:
3084 tstr = "void";
3085 goto add_tstr;
3086 case VT_BOOL:
3087 tstr = "_Bool";
3088 goto add_tstr;
3089 case VT_BYTE:
3090 tstr = "char";
3091 goto add_tstr;
3092 case VT_SHORT:
3093 tstr = "short";
3094 goto add_tstr;
3095 case VT_INT:
3096 tstr = "int";
3097 goto maybe_long;
3098 case VT_LLONG:
3099 tstr = "long long";
3100 maybe_long:
3101 if (t & VT_LONG)
3102 tstr = "long";
3103 if (!IS_ENUM(t))
3104 goto add_tstr;
3105 tstr = "enum ";
3106 goto tstruct;
3107 case VT_FLOAT:
3108 tstr = "float";
3109 goto add_tstr;
3110 case VT_DOUBLE:
3111 tstr = "double";
3112 goto add_tstr;
3113 case VT_LDOUBLE:
3114 tstr = "long double";
3115 add_tstr:
3116 pstrcat(buf, buf_size, tstr);
3117 break;
3118 case VT_STRUCT:
3119 tstr = "struct ";
3120 if (IS_UNION(t))
3121 tstr = "union ";
3122 tstruct:
3123 pstrcat(buf, buf_size, tstr);
3124 v = type->ref->v & ~SYM_STRUCT;
3125 if (v >= SYM_FIRST_ANOM)
3126 pstrcat(buf, buf_size, "<anonymous>");
3127 else
3128 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3129 break;
3130 case VT_FUNC:
3131 s = type->ref;
3132 buf1[0]=0;
3133 if (varstr && '*' == *varstr) {
3134 pstrcat(buf1, sizeof(buf1), "(");
3135 pstrcat(buf1, sizeof(buf1), varstr);
3136 pstrcat(buf1, sizeof(buf1), ")");
3138 pstrcat(buf1, buf_size, "(");
3139 sa = s->next;
3140 while (sa != NULL) {
3141 char buf2[256];
3142 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3143 pstrcat(buf1, sizeof(buf1), buf2);
3144 sa = sa->next;
3145 if (sa)
3146 pstrcat(buf1, sizeof(buf1), ", ");
3148 if (s->f.func_type == FUNC_ELLIPSIS)
3149 pstrcat(buf1, sizeof(buf1), ", ...");
3150 pstrcat(buf1, sizeof(buf1), ")");
3151 type_to_str(buf, buf_size, &s->type, buf1);
3152 goto no_var;
3153 case VT_PTR:
3154 s = type->ref;
3155 if (t & VT_ARRAY) {
3156 if (varstr && '*' == *varstr)
3157 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3158 else
3159 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3160 type_to_str(buf, buf_size, &s->type, buf1);
3161 goto no_var;
3163 pstrcpy(buf1, sizeof(buf1), "*");
3164 if (t & VT_CONSTANT)
3165 pstrcat(buf1, buf_size, "const ");
3166 if (t & VT_VOLATILE)
3167 pstrcat(buf1, buf_size, "volatile ");
3168 if (varstr)
3169 pstrcat(buf1, sizeof(buf1), varstr);
3170 type_to_str(buf, buf_size, &s->type, buf1);
3171 goto no_var;
3173 if (varstr) {
3174 pstrcat(buf, buf_size, " ");
3175 pstrcat(buf, buf_size, varstr);
3177 no_var: ;
3180 /* verify type compatibility to store vtop in 'dt' type, and generate
3181 casts if needed. */
3182 static void gen_assign_cast(CType *dt)
3184 CType *st, *type1, *type2;
3185 char buf1[256], buf2[256];
3186 int dbt, sbt, qualwarn, lvl;
3188 st = &vtop->type; /* source type */
3189 dbt = dt->t & VT_BTYPE;
3190 sbt = st->t & VT_BTYPE;
3191 if (sbt == VT_VOID || dbt == VT_VOID) {
3192 if (sbt == VT_VOID && dbt == VT_VOID)
3193 ; /* It is Ok if both are void */
3194 else
3195 tcc_error("cannot cast from/to void");
3197 if (dt->t & VT_CONSTANT)
3198 tcc_warning("assignment of read-only location");
3199 switch(dbt) {
3200 case VT_PTR:
3201 /* special cases for pointers */
3202 /* '0' can also be a pointer */
3203 if (is_null_pointer(vtop))
3204 break;
3205 /* accept implicit pointer to integer cast with warning */
3206 if (is_integer_btype(sbt)) {
3207 tcc_warning("assignment makes pointer from integer without a cast");
3208 break;
3210 type1 = pointed_type(dt);
3211 if (sbt == VT_PTR)
3212 type2 = pointed_type(st);
3213 else if (sbt == VT_FUNC)
3214 type2 = st; /* a function is implicitly a function pointer */
3215 else
3216 goto error;
3217 if (is_compatible_types(type1, type2))
3218 break;
3219 for (qualwarn = lvl = 0;; ++lvl) {
3220 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3221 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3222 qualwarn = 1;
3223 dbt = type1->t & (VT_BTYPE|VT_LONG);
3224 sbt = type2->t & (VT_BTYPE|VT_LONG);
3225 if (dbt != VT_PTR || sbt != VT_PTR)
3226 break;
3227 type1 = pointed_type(type1);
3228 type2 = pointed_type(type2);
3230 if (!is_compatible_unqualified_types(type1, type2)) {
3231 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3232 /* void * can match anything */
3233 } else if (dbt == sbt
3234 && is_integer_btype(sbt & VT_BTYPE)
3235 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3236 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3237 /* Like GCC don't warn by default for merely changes
3238 in pointer target signedness. Do warn for different
3239 base types, though, in particular for unsigned enums
3240 and signed int targets. */
3241 } else {
3242 tcc_warning("assignment from incompatible pointer type");
3243 break;
3246 if (qualwarn)
3247 tcc_warning("assignment discards qualifiers from pointer target type");
3248 break;
3249 case VT_BYTE:
3250 case VT_SHORT:
3251 case VT_INT:
3252 case VT_LLONG:
3253 if (sbt == VT_PTR || sbt == VT_FUNC) {
3254 tcc_warning("assignment makes integer from pointer without a cast");
3255 } else if (sbt == VT_STRUCT) {
3256 goto case_VT_STRUCT;
3258 /* XXX: more tests */
3259 break;
3260 case VT_STRUCT:
3261 case_VT_STRUCT:
3262 if (!is_compatible_unqualified_types(dt, st)) {
3263 error:
3264 type_to_str(buf1, sizeof(buf1), st, NULL);
3265 type_to_str(buf2, sizeof(buf2), dt, NULL);
3266 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3268 break;
3270 gen_cast(dt);
3273 /* store vtop in lvalue pushed on stack */
3274 ST_FUNC void vstore(void)
3276 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3278 ft = vtop[-1].type.t;
3279 sbt = vtop->type.t & VT_BTYPE;
3280 dbt = ft & VT_BTYPE;
3281 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3282 (sbt == VT_INT && dbt == VT_SHORT))
3283 && !(vtop->type.t & VT_BITFIELD)) {
3284 /* optimize char/short casts */
3285 delayed_cast = VT_MUSTCAST;
3286 vtop->type.t = ft & VT_TYPE;
3287 /* XXX: factorize */
3288 if (ft & VT_CONSTANT)
3289 tcc_warning("assignment of read-only location");
3290 } else {
3291 delayed_cast = 0;
3292 if (!(ft & VT_BITFIELD))
3293 gen_assign_cast(&vtop[-1].type);
3296 if (sbt == VT_STRUCT) {
3297 /* if structure, only generate pointer */
3298 /* structure assignment : generate memcpy */
3299 /* XXX: optimize if small size */
3300 size = type_size(&vtop->type, &align);
3302 /* destination */
3303 vswap();
3304 vtop->type.t = VT_PTR;
3305 gaddrof();
3307 /* address of memcpy() */
3308 #ifdef TCC_ARM_EABI
3309 if(!(align & 7))
3310 vpush_global_sym(&func_old_type, TOK_memcpy8);
3311 else if(!(align & 3))
3312 vpush_global_sym(&func_old_type, TOK_memcpy4);
3313 else
3314 #endif
3315 /* Use memmove, rather than memcpy, as dest and src may be same: */
3316 vpush_global_sym(&func_old_type, TOK_memmove);
3318 vswap();
3319 /* source */
3320 vpushv(vtop - 2);
3321 vtop->type.t = VT_PTR;
3322 gaddrof();
3323 /* type size */
3324 vpushi(size);
3325 gfunc_call(3);
3327 /* leave source on stack */
3328 } else if (ft & VT_BITFIELD) {
3329 /* bitfield store handling */
3331 /* save lvalue as expression result (example: s.b = s.a = n;) */
3332 vdup(), vtop[-1] = vtop[-2];
3334 bit_pos = BIT_POS(ft);
3335 bit_size = BIT_SIZE(ft);
3336 /* remove bit field info to avoid loops */
3337 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3339 if ((ft & VT_BTYPE) == VT_BOOL) {
3340 gen_cast(&vtop[-1].type);
3341 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3344 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3345 if (r == VT_STRUCT) {
3346 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3347 store_packed_bf(bit_pos, bit_size);
3348 } else {
3349 unsigned long long mask = (1ULL << bit_size) - 1;
3350 if ((ft & VT_BTYPE) != VT_BOOL) {
3351 /* mask source */
3352 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3353 vpushll(mask);
3354 else
3355 vpushi((unsigned)mask);
3356 gen_op('&');
3358 /* shift source */
3359 vpushi(bit_pos);
3360 gen_op(TOK_SHL);
3361 vswap();
3362 /* duplicate destination */
3363 vdup();
3364 vrott(3);
3365 /* load destination, mask and or with source */
3366 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3367 vpushll(~(mask << bit_pos));
3368 else
3369 vpushi(~((unsigned)mask << bit_pos));
3370 gen_op('&');
3371 gen_op('|');
3372 /* store result */
3373 vstore();
3374 /* ... and discard */
3375 vpop();
3377 } else if (dbt == VT_VOID) {
3378 --vtop;
3379 } else {
3380 #ifdef CONFIG_TCC_BCHECK
3381 /* bound check case */
3382 if (vtop[-1].r & VT_MUSTBOUND) {
3383 vswap();
3384 gbound();
3385 vswap();
3387 #endif
3388 rc = RC_INT;
3389 if (is_float(ft)) {
3390 rc = RC_FLOAT;
3391 #ifdef TCC_TARGET_X86_64
3392 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3393 rc = RC_ST0;
3394 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3395 rc = RC_FRET;
3397 #endif
3399 r = gv(rc); /* generate value */
3400 /* if lvalue was saved on stack, must read it */
3401 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3402 SValue sv;
3403 t = get_reg(RC_INT);
3404 #if PTR_SIZE == 8
3405 sv.type.t = VT_PTR;
3406 #else
3407 sv.type.t = VT_INT;
3408 #endif
3409 sv.r = VT_LOCAL | VT_LVAL;
3410 sv.c.i = vtop[-1].c.i;
3411 load(t, &sv);
3412 vtop[-1].r = t | VT_LVAL;
3414 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3415 #if PTR_SIZE == 8
3416 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3417 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3418 #else
3419 if ((ft & VT_BTYPE) == VT_LLONG) {
3420 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3421 #endif
3422 vtop[-1].type.t = load_type;
3423 store(r, vtop - 1);
3424 vswap();
3425 /* convert to int to increment easily */
3426 vtop->type.t = addr_type;
3427 gaddrof();
3428 vpushi(load_size);
3429 gen_op('+');
3430 vtop->r |= VT_LVAL;
3431 vswap();
3432 vtop[-1].type.t = load_type;
3433 /* XXX: it works because r2 is spilled last ! */
3434 store(vtop->r2, vtop - 1);
3435 } else {
3436 store(r, vtop - 1);
3439 vswap();
3440 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3441 vtop->r |= delayed_cast;
3445 /* post defines POST/PRE add. c is the token ++ or -- */
3446 ST_FUNC void inc(int post, int c)
3448 test_lvalue();
3449 vdup(); /* save lvalue */
3450 if (post) {
3451 gv_dup(); /* duplicate value */
3452 vrotb(3);
3453 vrotb(3);
3455 /* add constant */
3456 vpushi(c - TOK_MID);
3457 gen_op('+');
3458 vstore(); /* store value */
3459 if (post)
3460 vpop(); /* if post op, return saved value */
3463 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3465 /* read the string */
3466 if (tok != TOK_STR)
3467 expect(msg);
3468 cstr_new(astr);
3469 while (tok == TOK_STR) {
3470 /* XXX: add \0 handling too ? */
3471 cstr_cat(astr, tokc.str.data, -1);
3472 next();
3474 cstr_ccat(astr, '\0');
3477 /* If I is >= 1 and a power of two, returns log2(i)+1.
3478 If I is 0 returns 0. */
3479 static int exact_log2p1(int i)
3481 int ret;
3482 if (!i)
3483 return 0;
3484 for (ret = 1; i >= 1 << 8; ret += 8)
3485 i >>= 8;
3486 if (i >= 1 << 4)
3487 ret += 4, i >>= 4;
3488 if (i >= 1 << 2)
3489 ret += 2, i >>= 2;
3490 if (i >= 1 << 1)
3491 ret++;
3492 return ret;
3495 /* Parse __attribute__((...)) GNUC extension. */
3496 static void parse_attribute(AttributeDef *ad)
3498 int t, n;
3499 CString astr;
3501 redo:
3502 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3503 return;
3504 next();
3505 skip('(');
3506 skip('(');
3507 while (tok != ')') {
3508 if (tok < TOK_IDENT)
3509 expect("attribute name");
3510 t = tok;
3511 next();
3512 switch(t) {
3513 case TOK_CLEANUP1:
3514 case TOK_CLEANUP2:
3516 Sym *s;
3518 skip('(');
3519 s = sym_find(tok);
3520 if (!s) {
3521 tcc_warning("implicit declaration of function '%s'",
3522 get_tok_str(tok, &tokc));
3523 s = external_global_sym(tok, &func_old_type);
3525 ad->cleanup_func = s;
3526 next();
3527 skip(')');
3528 break;
3530 case TOK_SECTION1:
3531 case TOK_SECTION2:
3532 skip('(');
3533 parse_mult_str(&astr, "section name");
3534 ad->section = find_section(tcc_state, (char *)astr.data);
3535 skip(')');
3536 cstr_free(&astr);
3537 break;
3538 case TOK_ALIAS1:
3539 case TOK_ALIAS2:
3540 skip('(');
3541 parse_mult_str(&astr, "alias(\"target\")");
3542 ad->alias_target = /* save string as token, for later */
3543 tok_alloc((char*)astr.data, astr.size-1)->tok;
3544 skip(')');
3545 cstr_free(&astr);
3546 break;
3547 case TOK_VISIBILITY1:
3548 case TOK_VISIBILITY2:
3549 skip('(');
3550 parse_mult_str(&astr,
3551 "visibility(\"default|hidden|internal|protected\")");
3552 if (!strcmp (astr.data, "default"))
3553 ad->a.visibility = STV_DEFAULT;
3554 else if (!strcmp (astr.data, "hidden"))
3555 ad->a.visibility = STV_HIDDEN;
3556 else if (!strcmp (astr.data, "internal"))
3557 ad->a.visibility = STV_INTERNAL;
3558 else if (!strcmp (astr.data, "protected"))
3559 ad->a.visibility = STV_PROTECTED;
3560 else
3561 expect("visibility(\"default|hidden|internal|protected\")");
3562 skip(')');
3563 cstr_free(&astr);
3564 break;
3565 case TOK_ALIGNED1:
3566 case TOK_ALIGNED2:
3567 if (tok == '(') {
3568 next();
3569 n = expr_const();
3570 if (n <= 0 || (n & (n - 1)) != 0)
3571 tcc_error("alignment must be a positive power of two");
3572 skip(')');
3573 } else {
3574 n = MAX_ALIGN;
3576 ad->a.aligned = exact_log2p1(n);
3577 if (n != 1 << (ad->a.aligned - 1))
3578 tcc_error("alignment of %d is larger than implemented", n);
3579 break;
3580 case TOK_PACKED1:
3581 case TOK_PACKED2:
3582 ad->a.packed = 1;
3583 break;
3584 case TOK_WEAK1:
3585 case TOK_WEAK2:
3586 ad->a.weak = 1;
3587 break;
3588 case TOK_UNUSED1:
3589 case TOK_UNUSED2:
3590 /* currently, no need to handle it because tcc does not
3591 track unused objects */
3592 break;
3593 case TOK_NORETURN1:
3594 case TOK_NORETURN2:
3595 ad->f.func_noreturn = 1;
3596 break;
3597 case TOK_CDECL1:
3598 case TOK_CDECL2:
3599 case TOK_CDECL3:
3600 ad->f.func_call = FUNC_CDECL;
3601 break;
3602 case TOK_STDCALL1:
3603 case TOK_STDCALL2:
3604 case TOK_STDCALL3:
3605 ad->f.func_call = FUNC_STDCALL;
3606 break;
3607 #ifdef TCC_TARGET_I386
3608 case TOK_REGPARM1:
3609 case TOK_REGPARM2:
3610 skip('(');
3611 n = expr_const();
3612 if (n > 3)
3613 n = 3;
3614 else if (n < 0)
3615 n = 0;
3616 if (n > 0)
3617 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3618 skip(')');
3619 break;
3620 case TOK_FASTCALL1:
3621 case TOK_FASTCALL2:
3622 case TOK_FASTCALL3:
3623 ad->f.func_call = FUNC_FASTCALLW;
3624 break;
3625 #endif
3626 case TOK_MODE:
3627 skip('(');
3628 switch(tok) {
3629 case TOK_MODE_DI:
3630 ad->attr_mode = VT_LLONG + 1;
3631 break;
3632 case TOK_MODE_QI:
3633 ad->attr_mode = VT_BYTE + 1;
3634 break;
3635 case TOK_MODE_HI:
3636 ad->attr_mode = VT_SHORT + 1;
3637 break;
3638 case TOK_MODE_SI:
3639 case TOK_MODE_word:
3640 ad->attr_mode = VT_INT + 1;
3641 break;
3642 default:
3643 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3644 break;
3646 next();
3647 skip(')');
3648 break;
3649 case TOK_DLLEXPORT:
3650 ad->a.dllexport = 1;
3651 break;
3652 case TOK_NODECORATE:
3653 ad->a.nodecorate = 1;
3654 break;
3655 case TOK_DLLIMPORT:
3656 ad->a.dllimport = 1;
3657 break;
3658 default:
3659 if (tcc_state->warn_unsupported)
3660 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3661 /* skip parameters */
3662 if (tok == '(') {
3663 int parenthesis = 0;
3664 do {
3665 if (tok == '(')
3666 parenthesis++;
3667 else if (tok == ')')
3668 parenthesis--;
3669 next();
3670 } while (parenthesis && tok != -1);
3672 break;
3674 if (tok != ',')
3675 break;
3676 next();
3678 skip(')');
3679 skip(')');
3680 goto redo;
3683 static Sym * find_field (CType *type, int v, int *cumofs)
3685 Sym *s = type->ref;
3686 v |= SYM_FIELD;
3687 while ((s = s->next) != NULL) {
3688 if ((s->v & SYM_FIELD) &&
3689 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3690 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3691 Sym *ret = find_field (&s->type, v, cumofs);
3692 if (ret) {
3693 *cumofs += s->c;
3694 return ret;
3697 if (s->v == v)
3698 break;
3700 return s;
3703 static void struct_layout(CType *type, AttributeDef *ad)
3705 int size, align, maxalign, offset, c, bit_pos, bit_size;
3706 int packed, a, bt, prevbt, prev_bit_size;
3707 int pcc = !tcc_state->ms_bitfields;
3708 int pragma_pack = *tcc_state->pack_stack_ptr;
3709 Sym *f;
3711 maxalign = 1;
3712 offset = 0;
3713 c = 0;
3714 bit_pos = 0;
3715 prevbt = VT_STRUCT; /* make it never match */
3716 prev_bit_size = 0;
3718 //#define BF_DEBUG
3720 for (f = type->ref->next; f; f = f->next) {
3721 if (f->type.t & VT_BITFIELD)
3722 bit_size = BIT_SIZE(f->type.t);
3723 else
3724 bit_size = -1;
3725 size = type_size(&f->type, &align);
3726 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3727 packed = 0;
3729 if (pcc && bit_size == 0) {
3730 /* in pcc mode, packing does not affect zero-width bitfields */
3732 } else {
3733 /* in pcc mode, attribute packed overrides if set. */
3734 if (pcc && (f->a.packed || ad->a.packed))
3735 align = packed = 1;
3737 /* pragma pack overrides align if lesser and packs bitfields always */
3738 if (pragma_pack) {
3739 packed = 1;
3740 if (pragma_pack < align)
3741 align = pragma_pack;
3742 /* in pcc mode pragma pack also overrides individual align */
3743 if (pcc && pragma_pack < a)
3744 a = 0;
3747 /* some individual align was specified */
3748 if (a)
3749 align = a;
3751 if (type->ref->type.t == VT_UNION) {
3752 if (pcc && bit_size >= 0)
3753 size = (bit_size + 7) >> 3;
3754 offset = 0;
3755 if (size > c)
3756 c = size;
3758 } else if (bit_size < 0) {
3759 if (pcc)
3760 c += (bit_pos + 7) >> 3;
3761 c = (c + align - 1) & -align;
3762 offset = c;
3763 if (size > 0)
3764 c += size;
3765 bit_pos = 0;
3766 prevbt = VT_STRUCT;
3767 prev_bit_size = 0;
3769 } else {
3770 /* A bit-field. Layout is more complicated. There are two
3771 options: PCC (GCC) compatible and MS compatible */
3772 if (pcc) {
3773 /* In PCC layout a bit-field is placed adjacent to the
3774 preceding bit-fields, except if:
3775 - it has zero-width
3776 - an individual alignment was given
3777 - it would overflow its base type container and
3778 there is no packing */
3779 if (bit_size == 0) {
3780 new_field:
3781 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3782 bit_pos = 0;
3783 } else if (f->a.aligned) {
3784 goto new_field;
3785 } else if (!packed) {
3786 int a8 = align * 8;
3787 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3788 if (ofs > size / align)
3789 goto new_field;
3792 /* in pcc mode, long long bitfields have type int if they fit */
3793 if (size == 8 && bit_size <= 32)
3794 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3796 while (bit_pos >= align * 8)
3797 c += align, bit_pos -= align * 8;
3798 offset = c;
3800 /* In PCC layout named bit-fields influence the alignment
3801 of the containing struct using the base types alignment,
3802 except for packed fields (which here have correct align). */
3803 if (f->v & SYM_FIRST_ANOM
3804 // && bit_size // ??? gcc on ARM/rpi does that
3806 align = 1;
3808 } else {
3809 bt = f->type.t & VT_BTYPE;
3810 if ((bit_pos + bit_size > size * 8)
3811 || (bit_size > 0) == (bt != prevbt)
3813 c = (c + align - 1) & -align;
3814 offset = c;
3815 bit_pos = 0;
3816 /* In MS bitfield mode a bit-field run always uses
3817 at least as many bits as the underlying type.
3818 To start a new run it's also required that this
3819 or the last bit-field had non-zero width. */
3820 if (bit_size || prev_bit_size)
3821 c += size;
3823 /* In MS layout the records alignment is normally
3824 influenced by the field, except for a zero-width
3825 field at the start of a run (but by further zero-width
3826 fields it is again). */
3827 if (bit_size == 0 && prevbt != bt)
3828 align = 1;
3829 prevbt = bt;
3830 prev_bit_size = bit_size;
3833 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3834 | (bit_pos << VT_STRUCT_SHIFT);
3835 bit_pos += bit_size;
3837 if (align > maxalign)
3838 maxalign = align;
3840 #ifdef BF_DEBUG
3841 printf("set field %s offset %-2d size %-2d align %-2d",
3842 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3843 if (f->type.t & VT_BITFIELD) {
3844 printf(" pos %-2d bits %-2d",
3845 BIT_POS(f->type.t),
3846 BIT_SIZE(f->type.t)
3849 printf("\n");
3850 #endif
3852 f->c = offset;
3853 f->r = 0;
3856 if (pcc)
3857 c += (bit_pos + 7) >> 3;
3859 /* store size and alignment */
3860 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3861 if (a < maxalign)
3862 a = maxalign;
3863 type->ref->r = a;
3864 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3865 /* can happen if individual align for some member was given. In
3866 this case MSVC ignores maxalign when aligning the size */
3867 a = pragma_pack;
3868 if (a < bt)
3869 a = bt;
3871 c = (c + a - 1) & -a;
3872 type->ref->c = c;
3874 #ifdef BF_DEBUG
3875 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3876 #endif
3878 /* check whether we can access bitfields by their type */
3879 for (f = type->ref->next; f; f = f->next) {
3880 int s, px, cx, c0;
3881 CType t;
3883 if (0 == (f->type.t & VT_BITFIELD))
3884 continue;
3885 f->type.ref = f;
3886 f->auxtype = -1;
3887 bit_size = BIT_SIZE(f->type.t);
3888 if (bit_size == 0)
3889 continue;
3890 bit_pos = BIT_POS(f->type.t);
3891 size = type_size(&f->type, &align);
3892 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3893 continue;
3895 /* try to access the field using a different type */
3896 c0 = -1, s = align = 1;
3897 for (;;) {
3898 px = f->c * 8 + bit_pos;
3899 cx = (px >> 3) & -align;
3900 px = px - (cx << 3);
3901 if (c0 == cx)
3902 break;
3903 s = (px + bit_size + 7) >> 3;
3904 if (s > 4) {
3905 t.t = VT_LLONG;
3906 } else if (s > 2) {
3907 t.t = VT_INT;
3908 } else if (s > 1) {
3909 t.t = VT_SHORT;
3910 } else {
3911 t.t = VT_BYTE;
3913 s = type_size(&t, &align);
3914 c0 = cx;
3917 if (px + bit_size <= s * 8 && cx + s <= c) {
3918 /* update offset and bit position */
3919 f->c = cx;
3920 bit_pos = px;
3921 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3922 | (bit_pos << VT_STRUCT_SHIFT);
3923 if (s != size)
3924 f->auxtype = t.t;
3925 #ifdef BF_DEBUG
3926 printf("FIX field %s offset %-2d size %-2d align %-2d "
3927 "pos %-2d bits %-2d\n",
3928 get_tok_str(f->v & ~SYM_FIELD, NULL),
3929 cx, s, align, px, bit_size);
3930 #endif
3931 } else {
3932 /* fall back to load/store single-byte wise */
3933 f->auxtype = VT_STRUCT;
3934 #ifdef BF_DEBUG
3935 printf("FIX field %s : load byte-wise\n",
3936 get_tok_str(f->v & ~SYM_FIELD, NULL));
3937 #endif
3942 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3943 static void struct_decl(CType *type, int u)
3945 int v, c, size, align, flexible;
3946 int bit_size, bsize, bt;
3947 Sym *s, *ss, **ps;
3948 AttributeDef ad, ad1;
3949 CType type1, btype;
3951 memset(&ad, 0, sizeof ad);
3952 next();
3953 parse_attribute(&ad);
3954 if (tok != '{') {
3955 v = tok;
3956 next();
3957 /* struct already defined ? return it */
3958 if (v < TOK_IDENT)
3959 expect("struct/union/enum name");
3960 s = struct_find(v);
3961 if (s && (s->sym_scope == local_scope || tok != '{')) {
3962 if (u == s->type.t)
3963 goto do_decl;
3964 if (u == VT_ENUM && IS_ENUM(s->type.t))
3965 goto do_decl;
3966 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3968 } else {
3969 v = anon_sym++;
3971 /* Record the original enum/struct/union token. */
3972 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3973 type1.ref = NULL;
3974 /* we put an undefined size for struct/union */
3975 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3976 s->r = 0; /* default alignment is zero as gcc */
3977 do_decl:
3978 type->t = s->type.t;
3979 type->ref = s;
3981 if (tok == '{') {
3982 next();
3983 if (s->c != -1)
3984 tcc_error("struct/union/enum already defined");
3985 s->c = -2;
3986 /* cannot be empty */
3987 /* non empty enums are not allowed */
3988 ps = &s->next;
3989 if (u == VT_ENUM) {
3990 long long ll = 0, pl = 0, nl = 0;
3991 CType t;
3992 t.ref = s;
3993 /* enum symbols have static storage */
3994 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3995 for(;;) {
3996 v = tok;
3997 if (v < TOK_UIDENT)
3998 expect("identifier");
3999 ss = sym_find(v);
4000 if (ss && !local_stack)
4001 tcc_error("redefinition of enumerator '%s'",
4002 get_tok_str(v, NULL));
4003 next();
4004 if (tok == '=') {
4005 next();
4006 ll = expr_const64();
4008 ss = sym_push(v, &t, VT_CONST, 0);
4009 ss->enum_val = ll;
4010 *ps = ss, ps = &ss->next;
4011 if (ll < nl)
4012 nl = ll;
4013 if (ll > pl)
4014 pl = ll;
4015 if (tok != ',')
4016 break;
4017 next();
4018 ll++;
4019 /* NOTE: we accept a trailing comma */
4020 if (tok == '}')
4021 break;
4023 skip('}');
4024 /* set integral type of the enum */
4025 t.t = VT_INT;
4026 if (nl >= 0) {
4027 if (pl != (unsigned)pl)
4028 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4029 t.t |= VT_UNSIGNED;
4030 } else if (pl != (int)pl || nl != (int)nl)
4031 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4032 s->type.t = type->t = t.t | VT_ENUM;
4033 s->c = 0;
4034 /* set type for enum members */
4035 for (ss = s->next; ss; ss = ss->next) {
4036 ll = ss->enum_val;
4037 if (ll == (int)ll) /* default is int if it fits */
4038 continue;
4039 if (t.t & VT_UNSIGNED) {
4040 ss->type.t |= VT_UNSIGNED;
4041 if (ll == (unsigned)ll)
4042 continue;
4044 ss->type.t = (ss->type.t & ~VT_BTYPE)
4045 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4047 } else {
4048 c = 0;
4049 flexible = 0;
4050 while (tok != '}') {
4051 if (!parse_btype(&btype, &ad1)) {
4052 skip(';');
4053 continue;
4055 while (1) {
4056 if (flexible)
4057 tcc_error("flexible array member '%s' not at the end of struct",
4058 get_tok_str(v, NULL));
4059 bit_size = -1;
4060 v = 0;
4061 type1 = btype;
4062 if (tok != ':') {
4063 if (tok != ';')
4064 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4065 if (v == 0) {
4066 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4067 expect("identifier");
4068 else {
4069 int v = btype.ref->v;
4070 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4071 if (tcc_state->ms_extensions == 0)
4072 expect("identifier");
4076 if (type_size(&type1, &align) < 0) {
4077 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4078 flexible = 1;
4079 else
4080 tcc_error("field '%s' has incomplete type",
4081 get_tok_str(v, NULL));
4083 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4084 (type1.t & VT_BTYPE) == VT_VOID ||
4085 (type1.t & VT_STORAGE))
4086 tcc_error("invalid type for '%s'",
4087 get_tok_str(v, NULL));
4089 if (tok == ':') {
4090 next();
4091 bit_size = expr_const();
4092 /* XXX: handle v = 0 case for messages */
4093 if (bit_size < 0)
4094 tcc_error("negative width in bit-field '%s'",
4095 get_tok_str(v, NULL));
4096 if (v && bit_size == 0)
4097 tcc_error("zero width for bit-field '%s'",
4098 get_tok_str(v, NULL));
4099 parse_attribute(&ad1);
4101 size = type_size(&type1, &align);
4102 if (bit_size >= 0) {
4103 bt = type1.t & VT_BTYPE;
4104 if (bt != VT_INT &&
4105 bt != VT_BYTE &&
4106 bt != VT_SHORT &&
4107 bt != VT_BOOL &&
4108 bt != VT_LLONG)
4109 tcc_error("bitfields must have scalar type");
4110 bsize = size * 8;
4111 if (bit_size > bsize) {
4112 tcc_error("width of '%s' exceeds its type",
4113 get_tok_str(v, NULL));
4114 } else if (bit_size == bsize
4115 && !ad.a.packed && !ad1.a.packed) {
4116 /* no need for bit fields */
4118 } else if (bit_size == 64) {
4119 tcc_error("field width 64 not implemented");
4120 } else {
4121 type1.t = (type1.t & ~VT_STRUCT_MASK)
4122 | VT_BITFIELD
4123 | (bit_size << (VT_STRUCT_SHIFT + 6));
4126 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4127 /* Remember we've seen a real field to check
4128 for placement of flexible array member. */
4129 c = 1;
4131 /* If member is a struct or bit-field, enforce
4132 placing into the struct (as anonymous). */
4133 if (v == 0 &&
4134 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4135 bit_size >= 0)) {
4136 v = anon_sym++;
4138 if (v) {
4139 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4140 ss->a = ad1.a;
4141 *ps = ss;
4142 ps = &ss->next;
4144 if (tok == ';' || tok == TOK_EOF)
4145 break;
4146 skip(',');
4148 skip(';');
4150 skip('}');
4151 parse_attribute(&ad);
4152 struct_layout(type, &ad);
4157 static void sym_to_attr(AttributeDef *ad, Sym *s)
4159 merge_symattr(&ad->a, &s->a);
4160 merge_funcattr(&ad->f, &s->f);
4163 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4164 are added to the element type, copied because it could be a typedef. */
4165 static void parse_btype_qualify(CType *type, int qualifiers)
4167 while (type->t & VT_ARRAY) {
4168 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4169 type = &type->ref->type;
4171 type->t |= qualifiers;
4174 /* return 0 if no type declaration. otherwise, return the basic type
4175 and skip it.
4177 static int parse_btype(CType *type, AttributeDef *ad)
4179 int t, u, bt, st, type_found, typespec_found, g, n;
4180 Sym *s;
4181 CType type1;
4183 memset(ad, 0, sizeof(AttributeDef));
4184 type_found = 0;
4185 typespec_found = 0;
4186 t = VT_INT;
4187 bt = st = -1;
4188 type->ref = NULL;
4190 while(1) {
4191 switch(tok) {
4192 case TOK_EXTENSION:
4193 /* currently, we really ignore extension */
4194 next();
4195 continue;
4197 /* basic types */
4198 case TOK_CHAR:
4199 u = VT_BYTE;
4200 basic_type:
4201 next();
4202 basic_type1:
4203 if (u == VT_SHORT || u == VT_LONG) {
4204 if (st != -1 || (bt != -1 && bt != VT_INT))
4205 tmbt: tcc_error("too many basic types");
4206 st = u;
4207 } else {
4208 if (bt != -1 || (st != -1 && u != VT_INT))
4209 goto tmbt;
4210 bt = u;
4212 if (u != VT_INT)
4213 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4214 typespec_found = 1;
4215 break;
4216 case TOK_VOID:
4217 u = VT_VOID;
4218 goto basic_type;
4219 case TOK_SHORT:
4220 u = VT_SHORT;
4221 goto basic_type;
4222 case TOK_INT:
4223 u = VT_INT;
4224 goto basic_type;
4225 case TOK_ALIGNAS:
4226 { int n;
4227 AttributeDef ad1;
4228 next();
4229 skip('(');
4230 memset(&ad1, 0, sizeof(AttributeDef));
4231 if (parse_btype(&type1, &ad1)) {
4232 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4233 if (ad1.a.aligned)
4234 n = 1 << (ad1.a.aligned - 1);
4235 else
4236 type_size(&type1, &n);
4237 } else {
4238 n = expr_const();
4239 if (n <= 0 || (n & (n - 1)) != 0)
4240 tcc_error("alignment must be a positive power of two");
4242 skip(')');
4243 ad->a.aligned = exact_log2p1(n);
4245 continue;
4246 case TOK_LONG:
4247 if ((t & VT_BTYPE) == VT_DOUBLE) {
4248 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4249 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4250 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4251 } else {
4252 u = VT_LONG;
4253 goto basic_type;
4255 next();
4256 break;
4257 #ifdef TCC_TARGET_ARM64
4258 case TOK_UINT128:
4259 /* GCC's __uint128_t appears in some Linux header files. Make it a
4260 synonym for long double to get the size and alignment right. */
4261 u = VT_LDOUBLE;
4262 goto basic_type;
4263 #endif
4264 case TOK_BOOL:
4265 u = VT_BOOL;
4266 goto basic_type;
4267 case TOK_FLOAT:
4268 u = VT_FLOAT;
4269 goto basic_type;
4270 case TOK_DOUBLE:
4271 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4272 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4273 } else {
4274 u = VT_DOUBLE;
4275 goto basic_type;
4277 next();
4278 break;
4279 case TOK_ENUM:
4280 struct_decl(&type1, VT_ENUM);
4281 basic_type2:
4282 u = type1.t;
4283 type->ref = type1.ref;
4284 goto basic_type1;
4285 case TOK_STRUCT:
4286 struct_decl(&type1, VT_STRUCT);
4287 goto basic_type2;
4288 case TOK_UNION:
4289 struct_decl(&type1, VT_UNION);
4290 goto basic_type2;
4292 /* type modifiers */
4293 case TOK_CONST1:
4294 case TOK_CONST2:
4295 case TOK_CONST3:
4296 type->t = t;
4297 parse_btype_qualify(type, VT_CONSTANT);
4298 t = type->t;
4299 next();
4300 break;
4301 case TOK_VOLATILE1:
4302 case TOK_VOLATILE2:
4303 case TOK_VOLATILE3:
4304 type->t = t;
4305 parse_btype_qualify(type, VT_VOLATILE);
4306 t = type->t;
4307 next();
4308 break;
4309 case TOK_SIGNED1:
4310 case TOK_SIGNED2:
4311 case TOK_SIGNED3:
4312 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4313 tcc_error("signed and unsigned modifier");
4314 t |= VT_DEFSIGN;
4315 next();
4316 typespec_found = 1;
4317 break;
4318 case TOK_REGISTER:
4319 case TOK_AUTO:
4320 case TOK_RESTRICT1:
4321 case TOK_RESTRICT2:
4322 case TOK_RESTRICT3:
4323 next();
4324 break;
4325 case TOK_UNSIGNED:
4326 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4327 tcc_error("signed and unsigned modifier");
4328 t |= VT_DEFSIGN | VT_UNSIGNED;
4329 next();
4330 typespec_found = 1;
4331 break;
4333 /* storage */
4334 case TOK_EXTERN:
4335 g = VT_EXTERN;
4336 goto storage;
4337 case TOK_STATIC:
4338 g = VT_STATIC;
4339 goto storage;
4340 case TOK_TYPEDEF:
4341 g = VT_TYPEDEF;
4342 goto storage;
4343 storage:
4344 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4345 tcc_error("multiple storage classes");
4346 t |= g;
4347 next();
4348 break;
4349 case TOK_INLINE1:
4350 case TOK_INLINE2:
4351 case TOK_INLINE3:
4352 t |= VT_INLINE;
4353 next();
4354 break;
4355 case TOK_NORETURN3:
4356 /* currently, no need to handle it because tcc does not
4357 track unused objects */
4358 next();
4359 break;
4360 /* GNUC attribute */
4361 case TOK_ATTRIBUTE1:
4362 case TOK_ATTRIBUTE2:
4363 parse_attribute(ad);
4364 if (ad->attr_mode) {
4365 u = ad->attr_mode -1;
4366 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4368 continue;
4369 /* GNUC typeof */
4370 case TOK_TYPEOF1:
4371 case TOK_TYPEOF2:
4372 case TOK_TYPEOF3:
4373 next();
4374 parse_expr_type(&type1);
4375 /* remove all storage modifiers except typedef */
4376 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4377 if (type1.ref)
4378 sym_to_attr(ad, type1.ref);
4379 goto basic_type2;
4380 default:
4381 if (typespec_found)
4382 goto the_end;
4383 s = sym_find(tok);
4384 if (!s || !(s->type.t & VT_TYPEDEF))
4385 goto the_end;
4387 n = tok, next();
4388 if (tok == ':' && !in_generic) {
4389 /* ignore if it's a label */
4390 unget_tok(n);
4391 goto the_end;
4394 t &= ~(VT_BTYPE|VT_LONG);
4395 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4396 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4397 type->ref = s->type.ref;
4398 if (t)
4399 parse_btype_qualify(type, t);
4400 t = type->t;
4401 /* get attributes from typedef */
4402 sym_to_attr(ad, s);
4403 typespec_found = 1;
4404 st = bt = -2;
4405 break;
4407 type_found = 1;
4409 the_end:
4410 if (tcc_state->char_is_unsigned) {
4411 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4412 t |= VT_UNSIGNED;
4414 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4415 bt = t & (VT_BTYPE|VT_LONG);
4416 if (bt == VT_LONG)
4417 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4418 #ifdef TCC_TARGET_PE
4419 if (bt == VT_LDOUBLE)
4420 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4421 #endif
4422 type->t = t;
4423 return type_found;
4426 /* convert a function parameter type (array to pointer and function to
4427 function pointer) */
4428 static inline void convert_parameter_type(CType *pt)
4430 /* remove const and volatile qualifiers (XXX: const could be used
4431 to indicate a const function parameter */
4432 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4433 /* array must be transformed to pointer according to ANSI C */
4434 pt->t &= ~VT_ARRAY;
4435 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4436 mk_pointer(pt);
4440 ST_FUNC void parse_asm_str(CString *astr)
4442 skip('(');
4443 parse_mult_str(astr, "string constant");
4446 /* Parse an asm label and return the token */
4447 static int asm_label_instr(void)
4449 int v;
4450 CString astr;
4452 next();
4453 parse_asm_str(&astr);
4454 skip(')');
4455 #ifdef ASM_DEBUG
4456 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4457 #endif
4458 v = tok_alloc(astr.data, astr.size - 1)->tok;
4459 cstr_free(&astr);
4460 return v;
4463 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4465 int n, l, t1, arg_size, align, unused_align;
4466 Sym **plast, *s, *first;
4467 AttributeDef ad1;
4468 CType pt;
4470 if (tok == '(') {
4471 /* function type, or recursive declarator (return if so) */
4472 next();
4473 if (td && !(td & TYPE_ABSTRACT))
4474 return 0;
4475 if (tok == ')')
4476 l = 0;
4477 else if (parse_btype(&pt, &ad1))
4478 l = FUNC_NEW;
4479 else if (td) {
4480 merge_attr (ad, &ad1);
4481 return 0;
4482 } else
4483 l = FUNC_OLD;
4484 first = NULL;
4485 plast = &first;
4486 arg_size = 0;
4487 if (l) {
4488 for(;;) {
4489 /* read param name and compute offset */
4490 if (l != FUNC_OLD) {
4491 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4492 break;
4493 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4494 if ((pt.t & VT_BTYPE) == VT_VOID)
4495 tcc_error("parameter declared as void");
4496 } else {
4497 n = tok;
4498 if (n < TOK_UIDENT)
4499 expect("identifier");
4500 pt.t = VT_VOID; /* invalid type */
4501 pt.ref = NULL;
4502 next();
4504 convert_parameter_type(&pt);
4505 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4506 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4507 *plast = s;
4508 plast = &s->next;
4509 if (tok == ')')
4510 break;
4511 skip(',');
4512 if (l == FUNC_NEW && tok == TOK_DOTS) {
4513 l = FUNC_ELLIPSIS;
4514 next();
4515 break;
4517 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4518 tcc_error("invalid type");
4520 } else
4521 /* if no parameters, then old type prototype */
4522 l = FUNC_OLD;
4523 skip(')');
4524 /* NOTE: const is ignored in returned type as it has a special
4525 meaning in gcc / C++ */
4526 type->t &= ~VT_CONSTANT;
4527 /* some ancient pre-K&R C allows a function to return an array
4528 and the array brackets to be put after the arguments, such
4529 that "int c()[]" means something like "int[] c()" */
4530 if (tok == '[') {
4531 next();
4532 skip(']'); /* only handle simple "[]" */
4533 mk_pointer(type);
4535 /* we push a anonymous symbol which will contain the function prototype */
4536 ad->f.func_args = arg_size;
4537 ad->f.func_type = l;
4538 s = sym_push(SYM_FIELD, type, 0, 0);
4539 s->a = ad->a;
4540 s->f = ad->f;
4541 s->next = first;
4542 type->t = VT_FUNC;
4543 type->ref = s;
4544 } else if (tok == '[') {
4545 int saved_nocode_wanted = nocode_wanted;
4546 /* array definition */
4547 next();
4548 while (1) {
4549 /* XXX The optional type-quals and static should only be accepted
4550 in parameter decls. The '*' as well, and then even only
4551 in prototypes (not function defs). */
4552 switch (tok) {
4553 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4554 case TOK_CONST1:
4555 case TOK_VOLATILE1:
4556 case TOK_STATIC:
4557 case '*':
4558 next();
4559 continue;
4560 default:
4561 break;
4563 break;
4565 n = -1;
4566 t1 = 0;
4567 if (tok != ']') {
4568 if (!local_stack || (storage & VT_STATIC))
4569 vpushi(expr_const());
4570 else {
4571 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4572 length must always be evaluated, even under nocode_wanted,
4573 so that its size slot is initialized (e.g. under sizeof
4574 or typeof). */
4575 nocode_wanted = 0;
4576 gexpr();
4578 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4579 n = vtop->c.i;
4580 if (n < 0)
4581 tcc_error("invalid array size");
4582 } else {
4583 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4584 tcc_error("size of variable length array should be an integer");
4585 n = 0;
4586 t1 = VT_VLA;
4589 skip(']');
4590 /* parse next post type */
4591 post_type(type, ad, storage, 0);
4593 if ((type->t & VT_BTYPE) == VT_FUNC)
4594 tcc_error("declaration of an array of functions");
4595 if ((type->t & VT_BTYPE) == VT_VOID
4596 || type_size(type, &unused_align) < 0)
4597 tcc_error("declaration of an array of incomplete type elements");
4599 t1 |= type->t & VT_VLA;
4601 if (t1 & VT_VLA) {
4602 if (n < 0)
4603 tcc_error("need explicit inner array size in VLAs");
4604 loc -= type_size(&int_type, &align);
4605 loc &= -align;
4606 n = loc;
4608 vla_runtime_type_size(type, &align);
4609 gen_op('*');
4610 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4611 vswap();
4612 vstore();
4614 if (n != -1)
4615 vpop();
4616 nocode_wanted = saved_nocode_wanted;
4618 /* we push an anonymous symbol which will contain the array
4619 element type */
4620 s = sym_push(SYM_FIELD, type, 0, n);
4621 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4622 type->ref = s;
4624 return 1;
4627 /* Parse a type declarator (except basic type), and return the type
4628 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4629 expected. 'type' should contain the basic type. 'ad' is the
4630 attribute definition of the basic type. It can be modified by
4631 type_decl(). If this (possibly abstract) declarator is a pointer chain
4632 it returns the innermost pointed to type (equals *type, but is a different
4633 pointer), otherwise returns type itself, that's used for recursive calls. */
4634 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4636 CType *post, *ret;
4637 int qualifiers, storage;
4639 /* recursive type, remove storage bits first, apply them later again */
4640 storage = type->t & VT_STORAGE;
4641 type->t &= ~VT_STORAGE;
4642 post = ret = type;
4644 while (tok == '*') {
4645 qualifiers = 0;
4646 redo:
4647 next();
4648 switch(tok) {
4649 case TOK_CONST1:
4650 case TOK_CONST2:
4651 case TOK_CONST3:
4652 qualifiers |= VT_CONSTANT;
4653 goto redo;
4654 case TOK_VOLATILE1:
4655 case TOK_VOLATILE2:
4656 case TOK_VOLATILE3:
4657 qualifiers |= VT_VOLATILE;
4658 goto redo;
4659 case TOK_RESTRICT1:
4660 case TOK_RESTRICT2:
4661 case TOK_RESTRICT3:
4662 goto redo;
4663 /* XXX: clarify attribute handling */
4664 case TOK_ATTRIBUTE1:
4665 case TOK_ATTRIBUTE2:
4666 parse_attribute(ad);
4667 break;
4669 mk_pointer(type);
4670 type->t |= qualifiers;
4671 if (ret == type)
4672 /* innermost pointed to type is the one for the first derivation */
4673 ret = pointed_type(type);
4676 if (tok == '(') {
4677 /* This is possibly a parameter type list for abstract declarators
4678 ('int ()'), use post_type for testing this. */
4679 if (!post_type(type, ad, 0, td)) {
4680 /* It's not, so it's a nested declarator, and the post operations
4681 apply to the innermost pointed to type (if any). */
4682 /* XXX: this is not correct to modify 'ad' at this point, but
4683 the syntax is not clear */
4684 parse_attribute(ad);
4685 post = type_decl(type, ad, v, td);
4686 skip(')');
4687 } else
4688 goto abstract;
4689 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4690 /* type identifier */
4691 *v = tok;
4692 next();
4693 } else {
4694 abstract:
4695 if (!(td & TYPE_ABSTRACT))
4696 expect("identifier");
4697 *v = 0;
4699 post_type(post, ad, storage, 0);
4700 parse_attribute(ad);
4701 type->t |= storage;
4702 return ret;
4705 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4706 ST_FUNC int lvalue_type(int t)
4708 int bt, r;
4709 r = VT_LVAL;
4710 bt = t & VT_BTYPE;
4711 if (bt == VT_BYTE || bt == VT_BOOL)
4712 r |= VT_LVAL_BYTE;
4713 else if (bt == VT_SHORT)
4714 r |= VT_LVAL_SHORT;
4715 else
4716 return r;
4717 if (t & VT_UNSIGNED)
4718 r |= VT_LVAL_UNSIGNED;
4719 return r;
4722 /* indirection with full error checking and bound check */
4723 ST_FUNC void indir(void)
4725 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4726 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4727 return;
4728 expect("pointer");
4730 if (vtop->r & VT_LVAL)
4731 gv(RC_INT);
4732 vtop->type = *pointed_type(&vtop->type);
4733 /* Arrays and functions are never lvalues */
4734 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4735 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4736 vtop->r |= lvalue_type(vtop->type.t);
4737 /* if bound checking, the referenced pointer must be checked */
4738 #ifdef CONFIG_TCC_BCHECK
4739 if (tcc_state->do_bounds_check)
4740 vtop->r |= VT_MUSTBOUND;
4741 #endif
4745 /* pass a parameter to a function and do type checking and casting */
4746 static void gfunc_param_typed(Sym *func, Sym *arg)
4748 int func_type;
4749 CType type;
4751 func_type = func->f.func_type;
4752 if (func_type == FUNC_OLD ||
4753 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4754 /* default casting : only need to convert float to double */
4755 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4756 gen_cast_s(VT_DOUBLE);
4757 } else if (vtop->type.t & VT_BITFIELD) {
4758 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4759 type.ref = vtop->type.ref;
4760 gen_cast(&type);
4762 } else if (arg == NULL) {
4763 tcc_error("too many arguments to function");
4764 } else {
4765 type = arg->type;
4766 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4767 gen_assign_cast(&type);
4771 /* parse an expression and return its type without any side effect. */
4772 static void expr_type(CType *type, void (*expr_fn)(void))
4774 nocode_wanted++;
4775 expr_fn();
4776 *type = vtop->type;
4777 vpop();
4778 nocode_wanted--;
4781 /* parse an expression of the form '(type)' or '(expr)' and return its
4782 type */
4783 static void parse_expr_type(CType *type)
4785 int n;
4786 AttributeDef ad;
4788 skip('(');
4789 if (parse_btype(type, &ad)) {
4790 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4791 } else {
4792 expr_type(type, gexpr);
4794 skip(')');
4797 static void parse_type(CType *type)
4799 AttributeDef ad;
4800 int n;
4802 if (!parse_btype(type, &ad)) {
4803 expect("type");
4805 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4808 static void parse_builtin_params(int nc, const char *args)
4810 char c, sep = '(';
4811 CType t;
4812 if (nc)
4813 nocode_wanted++;
4814 next();
4815 while ((c = *args++)) {
4816 skip(sep);
4817 sep = ',';
4818 switch (c) {
4819 case 'e': expr_eq(); continue;
4820 case 't': parse_type(&t); vpush(&t); continue;
4821 default: tcc_error("internal error"); break;
4824 skip(')');
4825 if (nc)
4826 nocode_wanted--;
4829 ST_FUNC void unary(void)
4831 int n, t, align, size, r, sizeof_caller;
4832 CType type;
4833 Sym *s;
4834 AttributeDef ad;
4836 sizeof_caller = in_sizeof;
4837 in_sizeof = 0;
4838 type.ref = NULL;
4839 /* XXX: GCC 2.95.3 does not generate a table although it should be
4840 better here */
4841 tok_next:
4842 switch(tok) {
4843 case TOK_EXTENSION:
4844 next();
4845 goto tok_next;
4846 case TOK_LCHAR:
4847 #ifdef TCC_TARGET_PE
4848 t = VT_SHORT|VT_UNSIGNED;
4849 goto push_tokc;
4850 #endif
4851 case TOK_CINT:
4852 case TOK_CCHAR:
4853 t = VT_INT;
4854 push_tokc:
4855 type.t = t;
4856 vsetc(&type, VT_CONST, &tokc);
4857 next();
4858 break;
4859 case TOK_CUINT:
4860 t = VT_INT | VT_UNSIGNED;
4861 goto push_tokc;
4862 case TOK_CLLONG:
4863 t = VT_LLONG;
4864 goto push_tokc;
4865 case TOK_CULLONG:
4866 t = VT_LLONG | VT_UNSIGNED;
4867 goto push_tokc;
4868 case TOK_CFLOAT:
4869 t = VT_FLOAT;
4870 goto push_tokc;
4871 case TOK_CDOUBLE:
4872 t = VT_DOUBLE;
4873 goto push_tokc;
4874 case TOK_CLDOUBLE:
4875 t = VT_LDOUBLE;
4876 goto push_tokc;
4877 case TOK_CLONG:
4878 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4879 goto push_tokc;
4880 case TOK_CULONG:
4881 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4882 goto push_tokc;
4883 case TOK___FUNCTION__:
4884 if (!gnu_ext)
4885 goto tok_identifier;
4886 /* fall thru */
4887 case TOK___FUNC__:
4889 void *ptr;
4890 int len;
4891 /* special function name identifier */
4892 len = strlen(funcname) + 1;
4893 /* generate char[len] type */
4894 type.t = VT_BYTE;
4895 mk_pointer(&type);
4896 type.t |= VT_ARRAY;
4897 type.ref->c = len;
4898 vpush_ref(&type, data_section, data_section->data_offset, len);
4899 if (!NODATA_WANTED) {
4900 ptr = section_ptr_add(data_section, len);
4901 memcpy(ptr, funcname, len);
4903 next();
4905 break;
4906 case TOK_LSTR:
4907 #ifdef TCC_TARGET_PE
4908 t = VT_SHORT | VT_UNSIGNED;
4909 #else
4910 t = VT_INT;
4911 #endif
4912 goto str_init;
4913 case TOK_STR:
4914 /* string parsing */
4915 t = VT_BYTE;
4916 if (tcc_state->char_is_unsigned)
4917 t = VT_BYTE | VT_UNSIGNED;
4918 str_init:
4919 if (tcc_state->warn_write_strings)
4920 t |= VT_CONSTANT;
4921 type.t = t;
4922 mk_pointer(&type);
4923 type.t |= VT_ARRAY;
4924 memset(&ad, 0, sizeof(AttributeDef));
4925 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4926 break;
4927 case '(':
4928 next();
4929 /* cast ? */
4930 if (parse_btype(&type, &ad)) {
4931 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4932 skip(')');
4933 /* check ISOC99 compound literal */
4934 if (tok == '{') {
4935 /* data is allocated locally by default */
4936 if (global_expr)
4937 r = VT_CONST;
4938 else
4939 r = VT_LOCAL;
4940 /* all except arrays are lvalues */
4941 if (!(type.t & VT_ARRAY))
4942 r |= lvalue_type(type.t);
4943 memset(&ad, 0, sizeof(AttributeDef));
4944 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4945 } else {
4946 if (sizeof_caller) {
4947 vpush(&type);
4948 return;
4950 unary();
4951 gen_cast(&type);
4953 } else if (tok == '{') {
4954 int saved_nocode_wanted = nocode_wanted;
4955 if (const_wanted)
4956 tcc_error("expected constant");
4957 /* save all registers */
4958 save_regs(0);
4959 /* statement expression : we do not accept break/continue
4960 inside as GCC does. We do retain the nocode_wanted state,
4961 as statement expressions can't ever be entered from the
4962 outside, so any reactivation of code emission (from labels
4963 or loop heads) can be disabled again after the end of it. */
4964 block(1);
4965 nocode_wanted = saved_nocode_wanted;
4966 skip(')');
4967 } else {
4968 gexpr();
4969 skip(')');
4971 break;
4972 case '*':
4973 next();
4974 unary();
4975 indir();
4976 break;
4977 case '&':
4978 next();
4979 unary();
4980 /* functions names must be treated as function pointers,
4981 except for unary '&' and sizeof. Since we consider that
4982 functions are not lvalues, we only have to handle it
4983 there and in function calls. */
4984 /* arrays can also be used although they are not lvalues */
4985 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4986 !(vtop->type.t & VT_ARRAY))
4987 test_lvalue();
4988 mk_pointer(&vtop->type);
4989 gaddrof();
4990 break;
4991 case '!':
4992 next();
4993 unary();
4994 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4995 gen_cast_s(VT_BOOL);
4996 vtop->c.i = !vtop->c.i;
4997 } else if (vtop->r == VT_CMP) {
4998 vtop->cmp_op ^= 1;
4999 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5000 } else {
5001 vpushi(0);
5002 gen_op(TOK_EQ);
5004 break;
5005 case '~':
5006 next();
5007 unary();
5008 vpushi(-1);
5009 gen_op('^');
5010 break;
5011 case '+':
5012 next();
5013 unary();
5014 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5015 tcc_error("pointer not accepted for unary plus");
5016 /* In order to force cast, we add zero, except for floating point
5017 where we really need an noop (otherwise -0.0 will be transformed
5018 into +0.0). */
5019 if (!is_float(vtop->type.t)) {
5020 vpushi(0);
5021 gen_op('+');
5023 break;
5024 case TOK_SIZEOF:
5025 case TOK_ALIGNOF1:
5026 case TOK_ALIGNOF2:
5027 case TOK_ALIGNOF3:
5028 t = tok;
5029 next();
5030 in_sizeof++;
5031 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5032 s = NULL;
5033 if (vtop[1].r & VT_SYM)
5034 s = vtop[1].sym; /* hack: accessing previous vtop */
5035 size = type_size(&type, &align);
5036 if (s && s->a.aligned)
5037 align = 1 << (s->a.aligned - 1);
5038 if (t == TOK_SIZEOF) {
5039 if (!(type.t & VT_VLA)) {
5040 if (size < 0)
5041 tcc_error("sizeof applied to an incomplete type");
5042 vpushs(size);
5043 } else {
5044 vla_runtime_type_size(&type, &align);
5046 } else {
5047 vpushs(align);
5049 vtop->type.t |= VT_UNSIGNED;
5050 break;
5052 case TOK_builtin_expect:
5053 /* __builtin_expect is a no-op for now */
5054 parse_builtin_params(0, "ee");
5055 vpop();
5056 break;
5057 case TOK_builtin_types_compatible_p:
5058 parse_builtin_params(0, "tt");
5059 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5060 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5061 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5062 vtop -= 2;
5063 vpushi(n);
5064 break;
5065 case TOK_builtin_choose_expr:
5067 int64_t c;
5068 next();
5069 skip('(');
5070 c = expr_const64();
5071 skip(',');
5072 if (!c) {
5073 nocode_wanted++;
5075 expr_eq();
5076 if (!c) {
5077 vpop();
5078 nocode_wanted--;
5080 skip(',');
5081 if (c) {
5082 nocode_wanted++;
5084 expr_eq();
5085 if (c) {
5086 vpop();
5087 nocode_wanted--;
5089 skip(')');
5091 break;
5092 case TOK_builtin_constant_p:
5093 parse_builtin_params(1, "e");
5094 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5095 vtop--;
5096 vpushi(n);
5097 break;
5098 case TOK_builtin_frame_address:
5099 case TOK_builtin_return_address:
5101 int tok1 = tok;
5102 int level;
5103 next();
5104 skip('(');
5105 if (tok != TOK_CINT) {
5106 tcc_error("%s only takes positive integers",
5107 tok1 == TOK_builtin_return_address ?
5108 "__builtin_return_address" :
5109 "__builtin_frame_address");
5111 level = (uint32_t)tokc.i;
5112 next();
5113 skip(')');
5114 type.t = VT_VOID;
5115 mk_pointer(&type);
5116 vset(&type, VT_LOCAL, 0); /* local frame */
5117 while (level--) {
5118 mk_pointer(&vtop->type);
5119 indir(); /* -> parent frame */
5121 if (tok1 == TOK_builtin_return_address) {
5122 // assume return address is just above frame pointer on stack
5123 vpushi(PTR_SIZE);
5124 gen_op('+');
5125 mk_pointer(&vtop->type);
5126 indir();
5129 break;
5130 #ifdef TCC_TARGET_X86_64
5131 #ifdef TCC_TARGET_PE
5132 case TOK_builtin_va_start:
5133 parse_builtin_params(0, "ee");
5134 r = vtop->r & VT_VALMASK;
5135 if (r == VT_LLOCAL)
5136 r = VT_LOCAL;
5137 if (r != VT_LOCAL)
5138 tcc_error("__builtin_va_start expects a local variable");
5139 vtop->r = r;
5140 vtop->type = char_pointer_type;
5141 vtop->c.i += 8;
5142 vstore();
5143 break;
5144 #else
5145 case TOK_builtin_va_arg_types:
5146 parse_builtin_params(0, "t");
5147 vpushi(classify_x86_64_va_arg(&vtop->type));
5148 vswap();
5149 vpop();
5150 break;
5151 #endif
5152 #endif
5154 #ifdef TCC_TARGET_ARM64
5155 case TOK___va_start: {
5156 parse_builtin_params(0, "ee");
5157 //xx check types
5158 gen_va_start();
5159 vpushi(0);
5160 vtop->type.t = VT_VOID;
5161 break;
5163 case TOK___va_arg: {
5164 parse_builtin_params(0, "et");
5165 type = vtop->type;
5166 vpop();
5167 //xx check types
5168 gen_va_arg(&type);
5169 vtop->type = type;
5170 break;
5172 case TOK___arm64_clear_cache: {
5173 parse_builtin_params(0, "ee");
5174 gen_clear_cache();
5175 vpushi(0);
5176 vtop->type.t = VT_VOID;
5177 break;
5179 #endif
5180 /* pre operations */
5181 case TOK_INC:
5182 case TOK_DEC:
5183 t = tok;
5184 next();
5185 unary();
5186 inc(0, t);
5187 break;
5188 case '-':
5189 next();
5190 unary();
5191 t = vtop->type.t & VT_BTYPE;
5192 if (is_float(t)) {
5193 /* In IEEE negate(x) isn't subtract(0,x), but rather
5194 subtract(-0, x). */
5195 vpush(&vtop->type);
5196 if (t == VT_FLOAT)
5197 vtop->c.f = -1.0 * 0.0;
5198 else if (t == VT_DOUBLE)
5199 vtop->c.d = -1.0 * 0.0;
5200 else
5201 vtop->c.ld = -1.0 * 0.0;
5202 } else
5203 vpushi(0);
5204 vswap();
5205 gen_op('-');
5206 break;
5207 case TOK_LAND:
5208 if (!gnu_ext)
5209 goto tok_identifier;
5210 next();
5211 /* allow to take the address of a label */
5212 if (tok < TOK_UIDENT)
5213 expect("label identifier");
5214 s = label_find(tok);
5215 if (!s) {
5216 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5217 } else {
5218 if (s->r == LABEL_DECLARED)
5219 s->r = LABEL_FORWARD;
5221 if (!s->type.t) {
5222 s->type.t = VT_VOID;
5223 mk_pointer(&s->type);
5224 s->type.t |= VT_STATIC;
5226 vpushsym(&s->type, s);
5227 next();
5228 break;
5230 case TOK_GENERIC:
5232 CType controlling_type;
5233 int has_default = 0;
5234 int has_match = 0;
5235 int learn = 0;
5236 TokenString *str = NULL;
5237 int saved_const_wanted = const_wanted;
5239 next();
5240 skip('(');
5241 const_wanted = 0;
5242 expr_type(&controlling_type, expr_eq);
5243 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5244 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5245 mk_pointer(&controlling_type);
5246 const_wanted = saved_const_wanted;
5247 for (;;) {
5248 learn = 0;
5249 skip(',');
5250 if (tok == TOK_DEFAULT) {
5251 if (has_default)
5252 tcc_error("too many 'default'");
5253 has_default = 1;
5254 if (!has_match)
5255 learn = 1;
5256 next();
5257 } else {
5258 AttributeDef ad_tmp;
5259 int itmp;
5260 CType cur_type;
5262 in_generic++;
5263 parse_btype(&cur_type, &ad_tmp);
5264 in_generic--;
5266 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5267 if (compare_types(&controlling_type, &cur_type, 0)) {
5268 if (has_match) {
5269 tcc_error("type match twice");
5271 has_match = 1;
5272 learn = 1;
5275 skip(':');
5276 if (learn) {
5277 if (str)
5278 tok_str_free(str);
5279 skip_or_save_block(&str);
5280 } else {
5281 skip_or_save_block(NULL);
5283 if (tok == ')')
5284 break;
5286 if (!str) {
5287 char buf[60];
5288 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5289 tcc_error("type '%s' does not match any association", buf);
5291 begin_macro(str, 1);
5292 next();
5293 expr_eq();
5294 if (tok != TOK_EOF)
5295 expect(",");
5296 end_macro();
5297 next();
5298 break;
5300 // special qnan , snan and infinity values
5301 case TOK___NAN__:
5302 n = 0x7fc00000;
5303 special_math_val:
5304 vpushi(n);
5305 vtop->type.t = VT_FLOAT;
5306 next();
5307 break;
5308 case TOK___SNAN__:
5309 n = 0x7f800001;
5310 goto special_math_val;
5311 case TOK___INF__:
5312 n = 0x7f800000;
5313 goto special_math_val;
5315 default:
5316 tok_identifier:
5317 t = tok;
5318 next();
5319 if (t < TOK_UIDENT)
5320 expect("identifier");
5321 s = sym_find(t);
5322 if (!s || IS_ASM_SYM(s)) {
5323 const char *name = get_tok_str(t, NULL);
5324 if (tok != '(')
5325 tcc_error("'%s' undeclared", name);
5326 /* for simple function calls, we tolerate undeclared
5327 external reference to int() function */
5328 if (tcc_state->warn_implicit_function_declaration
5329 #ifdef TCC_TARGET_PE
5330 /* people must be warned about using undeclared WINAPI functions
5331 (which usually start with uppercase letter) */
5332 || (name[0] >= 'A' && name[0] <= 'Z')
5333 #endif
5335 tcc_warning("implicit declaration of function '%s'", name);
5336 s = external_global_sym(t, &func_old_type);
5339 r = s->r;
5340 /* A symbol that has a register is a local register variable,
5341 which starts out as VT_LOCAL value. */
5342 if ((r & VT_VALMASK) < VT_CONST)
5343 r = (r & ~VT_VALMASK) | VT_LOCAL;
5345 vset(&s->type, r, s->c);
5346 /* Point to s as backpointer (even without r&VT_SYM).
5347 Will be used by at least the x86 inline asm parser for
5348 regvars. */
5349 vtop->sym = s;
5351 if (r & VT_SYM) {
5352 vtop->c.i = 0;
5353 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5354 vtop->c.i = s->enum_val;
5356 break;
5359 /* post operations */
5360 while (1) {
5361 if (tok == TOK_INC || tok == TOK_DEC) {
5362 inc(1, tok);
5363 next();
5364 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5365 int qualifiers, cumofs = 0;
5366 /* field */
5367 if (tok == TOK_ARROW)
5368 indir();
5369 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5370 test_lvalue();
5371 gaddrof();
5372 /* expect pointer on structure */
5373 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5374 expect("struct or union");
5375 if (tok == TOK_CDOUBLE)
5376 expect("field name");
5377 next();
5378 if (tok == TOK_CINT || tok == TOK_CUINT)
5379 expect("field name");
5380 s = find_field(&vtop->type, tok, &cumofs);
5381 if (!s)
5382 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5383 /* add field offset to pointer */
5384 vtop->type = char_pointer_type; /* change type to 'char *' */
5385 vpushi(cumofs + s->c);
5386 gen_op('+');
5387 /* change type to field type, and set to lvalue */
5388 vtop->type = s->type;
5389 vtop->type.t |= qualifiers;
5390 /* an array is never an lvalue */
5391 if (!(vtop->type.t & VT_ARRAY)) {
5392 vtop->r |= lvalue_type(vtop->type.t);
5393 #ifdef CONFIG_TCC_BCHECK
5394 /* if bound checking, the referenced pointer must be checked */
5395 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5396 vtop->r |= VT_MUSTBOUND;
5397 #endif
5399 next();
5400 } else if (tok == '[') {
5401 next();
5402 gexpr();
5403 gen_op('+');
5404 indir();
5405 skip(']');
5406 } else if (tok == '(') {
5407 SValue ret;
5408 Sym *sa;
5409 int nb_args, ret_nregs, ret_align, regsize, variadic;
5411 /* function call */
5412 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5413 /* pointer test (no array accepted) */
5414 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5415 vtop->type = *pointed_type(&vtop->type);
5416 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5417 goto error_func;
5418 } else {
5419 error_func:
5420 expect("function pointer");
5422 } else {
5423 vtop->r &= ~VT_LVAL; /* no lvalue */
5425 /* get return type */
5426 s = vtop->type.ref;
5427 next();
5428 sa = s->next; /* first parameter */
5429 nb_args = regsize = 0;
5430 ret.r2 = VT_CONST;
5431 /* compute first implicit argument if a structure is returned */
5432 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5433 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5434 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5435 &ret_align, &regsize);
5436 if (!ret_nregs) {
5437 /* get some space for the returned structure */
5438 size = type_size(&s->type, &align);
5439 #ifdef TCC_TARGET_ARM64
5440 /* On arm64, a small struct is return in registers.
5441 It is much easier to write it to memory if we know
5442 that we are allowed to write some extra bytes, so
5443 round the allocated space up to a power of 2: */
5444 if (size < 16)
5445 while (size & (size - 1))
5446 size = (size | (size - 1)) + 1;
5447 #endif
5448 loc = (loc - size) & -align;
5449 ret.type = s->type;
5450 ret.r = VT_LOCAL | VT_LVAL;
5451 /* pass it as 'int' to avoid structure arg passing
5452 problems */
5453 vseti(VT_LOCAL, loc);
5454 ret.c = vtop->c;
5455 nb_args++;
5457 } else {
5458 ret_nregs = 1;
5459 ret.type = s->type;
5462 if (ret_nregs) {
5463 /* return in register */
5464 if (is_float(ret.type.t)) {
5465 ret.r = reg_fret(ret.type.t);
5466 #ifdef TCC_TARGET_X86_64
5467 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5468 ret.r2 = REG_QRET;
5469 #endif
5470 } else {
5471 #ifndef TCC_TARGET_ARM64
5472 #ifdef TCC_TARGET_X86_64
5473 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5474 #else
5475 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5476 #endif
5477 ret.r2 = REG_LRET;
5478 #endif
5479 ret.r = REG_IRET;
5481 ret.c.i = 0;
5483 if (tok != ')') {
5484 for(;;) {
5485 expr_eq();
5486 gfunc_param_typed(s, sa);
5487 nb_args++;
5488 if (sa)
5489 sa = sa->next;
5490 if (tok == ')')
5491 break;
5492 skip(',');
5495 if (sa)
5496 tcc_error("too few arguments to function");
5497 skip(')');
5498 gfunc_call(nb_args);
5500 /* return value */
5501 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5502 vsetc(&ret.type, r, &ret.c);
5503 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5506 /* handle packed struct return */
5507 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5508 int addr, offset;
5510 size = type_size(&s->type, &align);
5511 /* We're writing whole regs often, make sure there's enough
5512 space. Assume register size is power of 2. */
5513 if (regsize > align)
5514 align = regsize;
5515 loc = (loc - size) & -align;
5516 addr = loc;
5517 offset = 0;
5518 for (;;) {
5519 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5520 vswap();
5521 vstore();
5522 vtop--;
5523 if (--ret_nregs == 0)
5524 break;
5525 offset += regsize;
5527 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5529 if (s->f.func_noreturn)
5530 CODE_OFF();
5531 } else {
5532 break;
5537 ST_FUNC void expr_prod(void)
5539 int t;
5541 unary();
5542 while (tok == '*' || tok == '/' || tok == '%') {
5543 t = tok;
5544 next();
5545 unary();
5546 gen_op(t);
5550 ST_FUNC void expr_sum(void)
5552 int t;
5554 expr_prod();
5555 while (tok == '+' || tok == '-') {
5556 t = tok;
5557 next();
5558 expr_prod();
5559 gen_op(t);
5563 static void expr_shift(void)
5565 int t;
5567 expr_sum();
5568 while (tok == TOK_SHL || tok == TOK_SAR) {
5569 t = tok;
5570 next();
5571 expr_sum();
5572 gen_op(t);
5576 static void expr_cmp(void)
5578 int t;
5580 expr_shift();
5581 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5582 tok == TOK_ULT || tok == TOK_UGE) {
5583 t = tok;
5584 next();
5585 expr_shift();
5586 gen_op(t);
5590 static void expr_cmpeq(void)
5592 int t;
5594 expr_cmp();
5595 while (tok == TOK_EQ || tok == TOK_NE) {
5596 t = tok;
5597 next();
5598 expr_cmp();
5599 gen_op(t);
5603 static void expr_and(void)
5605 expr_cmpeq();
5606 while (tok == '&') {
5607 next();
5608 expr_cmpeq();
5609 gen_op('&');
5613 static void expr_xor(void)
5615 expr_and();
5616 while (tok == '^') {
5617 next();
5618 expr_and();
5619 gen_op('^');
5623 static void expr_or(void)
5625 expr_xor();
5626 while (tok == '|') {
5627 next();
5628 expr_xor();
5629 gen_op('|');
5633 static int condition_3way(void);
5635 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5637 int t = 0, cc = 1, f = 0, c;
5638 for(;;) {
5639 c = f ? i : condition_3way();
5640 if (c < 0) {
5641 save_regs(1), cc = 0;
5642 } else if (c != i) {
5643 nocode_wanted++, f = 1;
5645 if (tok != e_op) {
5646 if (cc || f) {
5647 vpop();
5648 vpushi(i ^ f);
5649 gsym(t);
5650 nocode_wanted -= f;
5651 } else {
5652 gvtst_set(i, t);
5654 break;
5656 if (c < 0)
5657 t = gvtst(i, t);
5658 else
5659 vpop();
5660 next();
5661 e_fn();
5665 static void expr_land(void)
5667 expr_or();
5668 if (tok == TOK_LAND)
5669 expr_landor(expr_or, TOK_LAND, 1);
5672 static void expr_lor(void)
5674 expr_land();
5675 if (tok == TOK_LOR)
5676 expr_landor(expr_land, TOK_LOR, 0);
5679 /* Assuming vtop is a value used in a conditional context
5680 (i.e. compared with zero) return 0 if it's false, 1 if
5681 true and -1 if it can't be statically determined. */
5682 static int condition_3way(void)
5684 int c = -1;
5685 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5686 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5687 vdup();
5688 gen_cast_s(VT_BOOL);
5689 c = vtop->c.i;
5690 vpop();
5692 return c;
5695 static int is_cond_bool(SValue *sv)
5697 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5698 && (sv->type.t & VT_BTYPE) == VT_INT)
5699 return (unsigned)sv->c.i < 2;
5700 if (sv->r == VT_CMP)
5701 return 1;
5702 return 0;
5705 static void expr_cond(void)
5707 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5708 SValue sv;
5709 CType type, type1, type2;
5710 int ncw_prev;
5712 expr_lor();
5713 if (tok == '?') {
5714 next();
5715 c = condition_3way();
5716 g = (tok == ':' && gnu_ext);
5717 tt = 0;
5718 if (!g) {
5719 if (c < 0) {
5720 save_regs(1);
5721 tt = gvtst(1, 0);
5722 } else {
5723 vpop();
5725 } else if (c < 0) {
5726 /* needed to avoid having different registers saved in
5727 each branch */
5728 rc = RC_INT;
5729 if (is_float(vtop->type.t)) {
5730 rc = RC_FLOAT;
5731 #ifdef TCC_TARGET_X86_64
5732 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5733 rc = RC_ST0;
5735 #endif
5737 gv(rc);
5738 save_regs(1);
5739 gv_dup();
5740 tt = gvtst(0, 0);
5743 ncw_prev = nocode_wanted;
5744 if (1) {
5745 if (c == 0)
5746 nocode_wanted++;
5747 if (!g)
5748 gexpr();
5750 if (c < 0 && vtop->r == VT_CMP) {
5751 t1 = gvtst(0, 0);
5752 vpushi(0);
5753 gvtst_set(0, t1);
5756 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5757 mk_pointer(&vtop->type);
5758 type1 = vtop->type;
5759 sv = *vtop; /* save value to handle it later */
5760 vtop--; /* no vpop so that FP stack is not flushed */
5762 if (g) {
5763 u = tt;
5764 } else if (c < 0) {
5765 u = gjmp(0);
5766 gsym(tt);
5767 } else
5768 u = 0;
5770 nocode_wanted = ncw_prev;
5771 if (c == 1)
5772 nocode_wanted++;
5773 skip(':');
5774 expr_cond();
5776 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5777 if (sv.r == VT_CMP) {
5778 t1 = sv.jtrue;
5779 t2 = u;
5780 } else {
5781 t1 = gvtst(0, 0);
5782 t2 = gjmp(0);
5783 gsym(u);
5784 vpushv(&sv);
5786 gvtst_set(0, t1);
5787 gvtst_set(1, t2);
5788 nocode_wanted = ncw_prev;
5789 // tcc_warning("two conditions expr_cond");
5790 return;
5793 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5794 mk_pointer(&vtop->type);
5795 type2=vtop->type;
5796 t1 = type1.t;
5797 bt1 = t1 & VT_BTYPE;
5798 t2 = type2.t;
5799 bt2 = t2 & VT_BTYPE;
5800 type.ref = NULL;
5802 /* cast operands to correct type according to ISOC rules */
5803 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5804 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5805 } else if (is_float(bt1) || is_float(bt2)) {
5806 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5807 type.t = VT_LDOUBLE;
5809 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5810 type.t = VT_DOUBLE;
5811 } else {
5812 type.t = VT_FLOAT;
5814 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5815 /* cast to biggest op */
5816 type.t = VT_LLONG | VT_LONG;
5817 if (bt1 == VT_LLONG)
5818 type.t &= t1;
5819 if (bt2 == VT_LLONG)
5820 type.t &= t2;
5821 /* convert to unsigned if it does not fit in a long long */
5822 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5823 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5824 type.t |= VT_UNSIGNED;
5825 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5826 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5827 /* If one is a null ptr constant the result type
5828 is the other. */
5829 if (is_null_pointer (vtop)) type = type1;
5830 else if (is_null_pointer (&sv)) type = type2;
5831 else if (bt1 != bt2)
5832 tcc_error("incompatible types in conditional expressions");
5833 else {
5834 CType *pt1 = pointed_type(&type1);
5835 CType *pt2 = pointed_type(&type2);
5836 int pbt1 = pt1->t & VT_BTYPE;
5837 int pbt2 = pt2->t & VT_BTYPE;
5838 int newquals, copied = 0;
5839 /* pointers to void get preferred, otherwise the
5840 pointed to types minus qualifs should be compatible */
5841 type = (pbt1 == VT_VOID) ? type1 : type2;
5842 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5843 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5844 tcc_warning("pointer type mismatch in conditional expression\n");
5846 /* combine qualifs */
5847 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5848 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5849 & newquals)
5851 /* copy the pointer target symbol */
5852 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5853 0, type.ref->c);
5854 copied = 1;
5855 pointed_type(&type)->t |= newquals;
5857 /* pointers to incomplete arrays get converted to
5858 pointers to completed ones if possible */
5859 if (pt1->t & VT_ARRAY
5860 && pt2->t & VT_ARRAY
5861 && pointed_type(&type)->ref->c < 0
5862 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5864 if (!copied)
5865 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5866 0, type.ref->c);
5867 pointed_type(&type)->ref =
5868 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5869 0, pointed_type(&type)->ref->c);
5870 pointed_type(&type)->ref->c =
5871 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5874 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5875 /* XXX: test structure compatibility */
5876 type = bt1 == VT_STRUCT ? type1 : type2;
5877 } else {
5878 /* integer operations */
5879 type.t = VT_INT | (VT_LONG & (t1 | t2));
5880 /* convert to unsigned if it does not fit in an integer */
5881 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5882 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5883 type.t |= VT_UNSIGNED;
5885 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5886 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5887 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5889 /* now we convert second operand */
5890 if (c != 1) {
5891 gen_cast(&type);
5892 if (islv) {
5893 mk_pointer(&vtop->type);
5894 gaddrof();
5895 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5896 gaddrof();
5899 rc = RC_INT;
5900 if (is_float(type.t)) {
5901 rc = RC_FLOAT;
5902 #ifdef TCC_TARGET_X86_64
5903 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5904 rc = RC_ST0;
5906 #endif
5907 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5908 /* for long longs, we use fixed registers to avoid having
5909 to handle a complicated move */
5910 rc = RC_IRET;
5913 tt = r2 = 0;
5914 if (c < 0) {
5915 r2 = gv(rc);
5916 tt = gjmp(0);
5918 gsym(u);
5919 nocode_wanted = ncw_prev;
5921 /* this is horrible, but we must also convert first
5922 operand */
5923 if (c != 0) {
5924 *vtop = sv;
5925 gen_cast(&type);
5926 if (islv) {
5927 mk_pointer(&vtop->type);
5928 gaddrof();
5929 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5930 gaddrof();
5933 if (c < 0) {
5934 r1 = gv(rc);
5935 move_reg(r2, r1, type.t);
5936 vtop->r = r2;
5937 gsym(tt);
5940 if (islv)
5941 indir();
5946 static void expr_eq(void)
5948 int t;
5950 expr_cond();
5951 if (tok == '=' ||
5952 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5953 tok == TOK_A_XOR || tok == TOK_A_OR ||
5954 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5955 test_lvalue();
5956 t = tok;
5957 next();
5958 if (t == '=') {
5959 expr_eq();
5960 } else {
5961 vdup();
5962 expr_eq();
5963 gen_op(t & 0x7f);
5965 vstore();
5969 ST_FUNC void gexpr(void)
5971 while (1) {
5972 expr_eq();
5973 if (tok != ',')
5974 break;
5975 vpop();
5976 next();
5980 /* parse a constant expression and return value in vtop. */
5981 static void expr_const1(void)
5983 const_wanted++;
5984 nocode_wanted++;
5985 expr_cond();
5986 nocode_wanted--;
5987 const_wanted--;
5990 /* parse an integer constant and return its value. */
5991 static inline int64_t expr_const64(void)
5993 int64_t c;
5994 expr_const1();
5995 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5996 expect("constant expression");
5997 c = vtop->c.i;
5998 vpop();
5999 return c;
6002 /* parse an integer constant and return its value.
6003 Complain if it doesn't fit 32bit (signed or unsigned). */
6004 ST_FUNC int expr_const(void)
6006 int c;
6007 int64_t wc = expr_const64();
6008 c = wc;
6009 if (c != wc && (unsigned)c != wc)
6010 tcc_error("constant exceeds 32 bit");
6011 return c;
6014 /* ------------------------------------------------------------------------- */
6015 /* return from function */
6017 #ifndef TCC_TARGET_ARM64
6018 static void gfunc_return(CType *func_type)
6020 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6021 CType type, ret_type;
6022 int ret_align, ret_nregs, regsize;
6023 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6024 &ret_align, &regsize);
6025 if (0 == ret_nregs) {
6026 /* if returning structure, must copy it to implicit
6027 first pointer arg location */
6028 type = *func_type;
6029 mk_pointer(&type);
6030 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6031 indir();
6032 vswap();
6033 /* copy structure value to pointer */
6034 vstore();
6035 } else {
6036 /* returning structure packed into registers */
6037 int r, size, addr, align;
6038 size = type_size(func_type,&align);
6039 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6040 (vtop->c.i & (ret_align-1)))
6041 && (align & (ret_align-1))) {
6042 loc = (loc - size) & -ret_align;
6043 addr = loc;
6044 type = *func_type;
6045 vset(&type, VT_LOCAL | VT_LVAL, addr);
6046 vswap();
6047 vstore();
6048 vpop();
6049 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6051 vtop->type = ret_type;
6052 if (is_float(ret_type.t))
6053 r = rc_fret(ret_type.t);
6054 else
6055 r = RC_IRET;
6057 if (ret_nregs == 1)
6058 gv(r);
6059 else {
6060 for (;;) {
6061 vdup();
6062 gv(r);
6063 vpop();
6064 if (--ret_nregs == 0)
6065 break;
6066 /* We assume that when a structure is returned in multiple
6067 registers, their classes are consecutive values of the
6068 suite s(n) = 2^n */
6069 r <<= 1;
6070 vtop->c.i += regsize;
6074 } else if (is_float(func_type->t)) {
6075 gv(rc_fret(func_type->t));
6076 } else {
6077 gv(RC_IRET);
6079 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6081 #endif
6083 static void check_func_return(void)
6085 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6086 return;
6087 if (!strcmp (funcname, "main")
6088 && (func_vt.t & VT_BTYPE) == VT_INT) {
6089 /* main returns 0 by default */
6090 vpushi(0);
6091 gen_assign_cast(&func_vt);
6092 gfunc_return(&func_vt);
6093 } else {
6094 tcc_warning("function might return no value: '%s'", funcname);
6098 /* ------------------------------------------------------------------------- */
6099 /* switch/case */
6101 static int case_cmp(const void *pa, const void *pb)
6103 int64_t a = (*(struct case_t**) pa)->v1;
6104 int64_t b = (*(struct case_t**) pb)->v1;
6105 return a < b ? -1 : a > b;
6108 static void gtst_addr(int t, int a)
6110 gsym_addr(gvtst(0, t), a);
6113 static void gcase(struct case_t **base, int len, int *bsym)
6115 struct case_t *p;
6116 int e;
6117 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6118 while (len > 8) {
6119 /* binary search */
6120 p = base[len/2];
6121 vdup();
6122 if (ll)
6123 vpushll(p->v2);
6124 else
6125 vpushi(p->v2);
6126 gen_op(TOK_LE);
6127 e = gvtst(1, 0);
6128 vdup();
6129 if (ll)
6130 vpushll(p->v1);
6131 else
6132 vpushi(p->v1);
6133 gen_op(TOK_GE);
6134 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6135 /* x < v1 */
6136 gcase(base, len/2, bsym);
6137 /* x > v2 */
6138 gsym(e);
6139 e = len/2 + 1;
6140 base += e; len -= e;
6142 /* linear scan */
6143 while (len--) {
6144 p = *base++;
6145 vdup();
6146 if (ll)
6147 vpushll(p->v2);
6148 else
6149 vpushi(p->v2);
6150 if (p->v1 == p->v2) {
6151 gen_op(TOK_EQ);
6152 gtst_addr(0, p->sym);
6153 } else {
6154 gen_op(TOK_LE);
6155 e = gvtst(1, 0);
6156 vdup();
6157 if (ll)
6158 vpushll(p->v1);
6159 else
6160 vpushi(p->v1);
6161 gen_op(TOK_GE);
6162 gtst_addr(0, p->sym);
6163 gsym(e);
6166 *bsym = gjmp(*bsym);
6169 /* ------------------------------------------------------------------------- */
6170 /* __attribute__((cleanup(fn))) */
6172 static void try_call_scope_cleanup(Sym *stop)
6174 Sym *cls = cur_scope->cl.s;
6176 for (; cls != stop; cls = cls->ncl) {
6177 Sym *fs = cls->next;
6178 Sym *vs = cls->prev_tok;
6180 vpushsym(&fs->type, fs);
6181 vset(&vs->type, vs->r, vs->c);
6182 vtop->sym = vs;
6183 mk_pointer(&vtop->type);
6184 gaddrof();
6185 gfunc_call(1);
6189 static void try_call_cleanup_goto(Sym *cleanupstate)
6191 Sym *oc, *cc;
6192 int ocd, ccd;
6194 if (!cur_scope->cl.s)
6195 return;
6197 /* search NCA of both cleanup chains given parents and initial depth */
6198 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6199 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6201 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6203 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6206 try_call_scope_cleanup(cc);
6209 /* call 'func' for each __attribute__((cleanup(func))) */
6210 static void block_cleanup(struct scope *o)
6212 int jmp = 0;
6213 Sym *g, **pg;
6214 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6215 if (g->prev_tok->r & LABEL_FORWARD) {
6216 Sym *pcl = g->next;
6217 if (!jmp)
6218 jmp = gjmp(0);
6219 gsym(pcl->jnext);
6220 try_call_scope_cleanup(o->cl.s);
6221 pcl->jnext = gjmp(0);
6222 if (!o->cl.n)
6223 goto remove_pending;
6224 g->c = o->cl.n;
6225 pg = &g->prev;
6226 } else {
6227 remove_pending:
6228 *pg = g->prev;
6229 sym_free(g);
6232 gsym(jmp);
6233 try_call_scope_cleanup(o->cl.s);
6236 /* ------------------------------------------------------------------------- */
6237 /* VLA */
6239 static void vla_restore(int loc)
6241 if (loc)
6242 gen_vla_sp_restore(loc);
6245 static void vla_leave(struct scope *o)
6247 if (o->vla.num < cur_scope->vla.num)
6248 vla_restore(o->vla.loc);
6251 /* ------------------------------------------------------------------------- */
6252 /* local scopes */
6254 void new_scope(struct scope *o)
6256 /* copy and link previous scope */
6257 *o = *cur_scope;
6258 o->prev = cur_scope;
6259 cur_scope = o;
6261 /* record local declaration stack position */
6262 o->lstk = local_stack;
6263 o->llstk = local_label_stack;
6265 ++local_scope;
6268 void prev_scope(struct scope *o, int is_expr)
6270 vla_leave(o->prev);
6272 if (o->cl.s != o->prev->cl.s)
6273 block_cleanup(o->prev);
6275 /* pop locally defined labels */
6276 label_pop(&local_label_stack, o->llstk, is_expr);
6278 /* In the is_expr case (a statement expression is finished here),
6279 vtop might refer to symbols on the local_stack. Either via the
6280 type or via vtop->sym. We can't pop those nor any that in turn
6281 might be referred to. To make it easier we don't roll back
6282 any symbols in that case; some upper level call to block() will
6283 do that. We do have to remove such symbols from the lookup
6284 tables, though. sym_pop will do that. */
6286 /* pop locally defined symbols */
6287 sym_pop(&local_stack, o->lstk, is_expr);
6289 cur_scope = o->prev;
6290 --local_scope;
6293 /* leave a scope via break/continue(/goto) */
6294 void leave_scope(struct scope *o)
6296 if (!o)
6297 return;
6298 try_call_scope_cleanup(o->cl.s);
6299 vla_leave(o);
6302 /* ------------------------------------------------------------------------- */
6303 /* call block from 'for do while' loops */
6305 static void lblock(int *bsym, int *csym)
6307 struct scope *lo = loop_scope, *co = cur_scope;
6308 int *b = co->bsym, *c = co->csym;
6309 if (csym) {
6310 co->csym = csym;
6311 loop_scope = co;
6313 co->bsym = bsym;
6314 block(0);
6315 co->bsym = b;
6316 if (csym) {
6317 co->csym = c;
6318 loop_scope = lo;
6322 static void block(int is_expr)
6324 int a, b, c, d, e, t;
6325 Sym *s;
6327 if (is_expr) {
6328 /* default return value is (void) */
6329 vpushi(0);
6330 vtop->type.t = VT_VOID;
6333 again:
6334 t = tok, next();
6336 if (t == TOK_IF) {
6337 skip('(');
6338 gexpr();
6339 skip(')');
6340 a = gvtst(1, 0);
6341 block(0);
6342 if (tok == TOK_ELSE) {
6343 d = gjmp(0);
6344 gsym(a);
6345 next();
6346 block(0);
6347 gsym(d); /* patch else jmp */
6348 } else {
6349 gsym(a);
6352 } else if (t == TOK_WHILE) {
6353 d = gind();
6354 skip('(');
6355 gexpr();
6356 skip(')');
6357 a = gvtst(1, 0);
6358 b = 0;
6359 lblock(&a, &b);
6360 gjmp_addr(d);
6361 gsym_addr(b, d);
6362 gsym(a);
6364 } else if (t == '{') {
6365 struct scope o;
6366 new_scope(&o);
6368 /* handle local labels declarations */
6369 while (tok == TOK_LABEL) {
6370 do {
6371 next();
6372 if (tok < TOK_UIDENT)
6373 expect("label identifier");
6374 label_push(&local_label_stack, tok, LABEL_DECLARED);
6375 next();
6376 } while (tok == ',');
6377 skip(';');
6380 while (tok != '}') {
6381 decl(VT_LOCAL);
6382 if (tok != '}') {
6383 if (is_expr)
6384 vpop();
6385 block(is_expr);
6389 prev_scope(&o, is_expr);
6391 if (0 == local_scope && !nocode_wanted)
6392 check_func_return();
6393 next();
6395 } else if (t == TOK_RETURN) {
6396 a = tok != ';';
6397 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6398 if (a)
6399 gexpr(), gen_assign_cast(&func_vt);
6400 leave_scope(root_scope);
6401 if (a && b)
6402 gfunc_return(&func_vt);
6403 else if (a)
6404 vtop--;
6405 else if (b)
6406 tcc_warning("'return' with no value.");
6407 skip(';');
6408 /* jump unless last stmt in top-level block */
6409 if (tok != '}' || local_scope != 1)
6410 rsym = gjmp(rsym);
6411 CODE_OFF();
6413 } else if (t == TOK_BREAK) {
6414 /* compute jump */
6415 if (!cur_scope->bsym)
6416 tcc_error("cannot break");
6417 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6418 leave_scope(loop_scope);
6419 else
6420 leave_scope(cur_switch->scope);
6421 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6422 skip(';');
6424 } else if (t == TOK_CONTINUE) {
6425 /* compute jump */
6426 if (!cur_scope->csym)
6427 tcc_error("cannot continue");
6428 leave_scope(loop_scope);
6429 *cur_scope->csym = gjmp(*cur_scope->csym);
6430 skip(';');
6432 } else if (t == TOK_FOR) {
6433 struct scope o;
6434 new_scope(&o);
6436 skip('(');
6437 if (tok != ';') {
6438 /* c99 for-loop init decl? */
6439 if (!decl0(VT_LOCAL, 1, NULL)) {
6440 /* no, regular for-loop init expr */
6441 gexpr();
6442 vpop();
6445 skip(';');
6446 a = b = 0;
6447 c = d = gind();
6448 if (tok != ';') {
6449 gexpr();
6450 a = gvtst(1, 0);
6452 skip(';');
6453 if (tok != ')') {
6454 e = gjmp(0);
6455 d = gind();
6456 gexpr();
6457 vpop();
6458 gjmp_addr(c);
6459 gsym(e);
6461 skip(')');
6462 lblock(&a, &b);
6463 gjmp_addr(d);
6464 gsym_addr(b, d);
6465 gsym(a);
6466 prev_scope(&o, 0);
6468 } else if (t == TOK_DO) {
6469 a = b = 0;
6470 d = gind();
6471 lblock(&a, &b);
6472 gsym(b);
6473 skip(TOK_WHILE);
6474 skip('(');
6475 gexpr();
6476 skip(')');
6477 skip(';');
6478 c = gvtst(0, 0);
6479 gsym_addr(c, d);
6480 gsym(a);
6482 } else if (t == TOK_SWITCH) {
6483 struct switch_t *saved, sw;
6484 SValue switchval;
6486 sw.p = NULL;
6487 sw.n = 0;
6488 sw.def_sym = 0;
6489 sw.bsym = &a;
6490 sw.scope = cur_scope;
6492 saved = cur_switch;
6493 cur_switch = &sw;
6495 skip('(');
6496 gexpr();
6497 skip(')');
6498 switchval = *vtop--;
6500 a = 0;
6501 b = gjmp(0); /* jump to first case */
6502 lblock(&a, NULL);
6503 a = gjmp(a); /* add implicit break */
6504 /* case lookup */
6505 gsym(b);
6507 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6508 for (b = 1; b < sw.n; b++)
6509 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6510 tcc_error("duplicate case value");
6512 /* Our switch table sorting is signed, so the compared
6513 value needs to be as well when it's 64bit. */
6514 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6515 switchval.type.t &= ~VT_UNSIGNED;
6516 vpushv(&switchval);
6517 gv(RC_INT);
6518 d = 0, gcase(sw.p, sw.n, &d);
6519 vpop();
6520 if (sw.def_sym)
6521 gsym_addr(d, sw.def_sym);
6522 else
6523 gsym(d);
6524 /* break label */
6525 gsym(a);
6527 dynarray_reset(&sw.p, &sw.n);
6528 cur_switch = saved;
6530 } else if (t == TOK_CASE) {
6531 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6532 if (!cur_switch)
6533 expect("switch");
6534 cr->v1 = cr->v2 = expr_const64();
6535 if (gnu_ext && tok == TOK_DOTS) {
6536 next();
6537 cr->v2 = expr_const64();
6538 if (cr->v2 < cr->v1)
6539 tcc_warning("empty case range");
6541 cr->sym = gind();
6542 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6543 skip(':');
6544 is_expr = 0;
6545 goto block_after_label;
6547 } else if (t == TOK_DEFAULT) {
6548 if (!cur_switch)
6549 expect("switch");
6550 if (cur_switch->def_sym)
6551 tcc_error("too many 'default'");
6552 cur_switch->def_sym = gind();
6553 skip(':');
6554 is_expr = 0;
6555 goto block_after_label;
6557 } else if (t == TOK_GOTO) {
6558 vla_restore(root_scope->vla.loc);
6559 if (tok == '*' && gnu_ext) {
6560 /* computed goto */
6561 next();
6562 gexpr();
6563 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6564 expect("pointer");
6565 ggoto();
6567 } else if (tok >= TOK_UIDENT) {
6568 s = label_find(tok);
6569 /* put forward definition if needed */
6570 if (!s)
6571 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6572 else if (s->r == LABEL_DECLARED)
6573 s->r = LABEL_FORWARD;
6575 if (s->r & LABEL_FORWARD) {
6576 /* start new goto chain for cleanups, linked via label->next */
6577 if (cur_scope->cl.s && !nocode_wanted) {
6578 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6579 pending_gotos->prev_tok = s;
6580 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6581 pending_gotos->next = s;
6583 s->jnext = gjmp(s->jnext);
6584 } else {
6585 try_call_cleanup_goto(s->cleanupstate);
6586 gjmp_addr(s->jnext);
6588 next();
6590 } else {
6591 expect("label identifier");
6593 skip(';');
6595 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6596 asm_instr();
6598 } else {
6599 if (tok == ':' && t >= TOK_UIDENT) {
6600 /* label case */
6601 next();
6602 s = label_find(t);
6603 if (s) {
6604 if (s->r == LABEL_DEFINED)
6605 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6606 s->r = LABEL_DEFINED;
6607 if (s->next) {
6608 Sym *pcl; /* pending cleanup goto */
6609 for (pcl = s->next; pcl; pcl = pcl->prev)
6610 gsym(pcl->jnext);
6611 sym_pop(&s->next, NULL, 0);
6612 } else
6613 gsym(s->jnext);
6614 } else {
6615 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6617 s->jnext = gind();
6618 s->cleanupstate = cur_scope->cl.s;
6620 block_after_label:
6621 vla_restore(cur_scope->vla.loc);
6622 /* we accept this, but it is a mistake */
6623 if (tok == '}') {
6624 tcc_warning("deprecated use of label at end of compound statement");
6625 } else {
6626 goto again;
6629 } else {
6630 /* expression case */
6631 if (t != ';') {
6632 unget_tok(t);
6633 if (is_expr) {
6634 vpop();
6635 gexpr();
6636 } else {
6637 gexpr();
6638 vpop();
6640 skip(';');
6646 /* This skips over a stream of tokens containing balanced {} and ()
6647 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6648 with a '{'). If STR then allocates and stores the skipped tokens
6649 in *STR. This doesn't check if () and {} are nested correctly,
6650 i.e. "({)}" is accepted. */
6651 static void skip_or_save_block(TokenString **str)
6653 int braces = tok == '{';
6654 int level = 0;
6655 if (str)
6656 *str = tok_str_alloc();
6658 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6659 int t;
6660 if (tok == TOK_EOF) {
6661 if (str || level > 0)
6662 tcc_error("unexpected end of file");
6663 else
6664 break;
6666 if (str)
6667 tok_str_add_tok(*str);
6668 t = tok;
6669 next();
6670 if (t == '{' || t == '(') {
6671 level++;
6672 } else if (t == '}' || t == ')') {
6673 level--;
6674 if (level == 0 && braces && t == '}')
6675 break;
6678 if (str) {
6679 tok_str_add(*str, -1);
6680 tok_str_add(*str, 0);
6684 #define EXPR_CONST 1
6685 #define EXPR_ANY 2
6687 static void parse_init_elem(int expr_type)
6689 int saved_global_expr;
6690 switch(expr_type) {
6691 case EXPR_CONST:
6692 /* compound literals must be allocated globally in this case */
6693 saved_global_expr = global_expr;
6694 global_expr = 1;
6695 expr_const1();
6696 global_expr = saved_global_expr;
6697 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6698 (compound literals). */
6699 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6700 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6701 || vtop->sym->v < SYM_FIRST_ANOM))
6702 #ifdef TCC_TARGET_PE
6703 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6704 #endif
6706 tcc_error("initializer element is not constant");
6707 break;
6708 case EXPR_ANY:
6709 expr_eq();
6710 break;
6714 /* put zeros for variable based init */
6715 static void init_putz(Section *sec, unsigned long c, int size)
6717 if (sec) {
6718 /* nothing to do because globals are already set to zero */
6719 } else {
6720 vpush_global_sym(&func_old_type, TOK_memset);
6721 vseti(VT_LOCAL, c);
6722 #ifdef TCC_TARGET_ARM
6723 vpushs(size);
6724 vpushi(0);
6725 #else
6726 vpushi(0);
6727 vpushs(size);
6728 #endif
6729 gfunc_call(3);
6733 #define DIF_FIRST 1
6734 #define DIF_SIZE_ONLY 2
6735 #define DIF_HAVE_ELEM 4
6737 /* t is the array or struct type. c is the array or struct
6738 address. cur_field is the pointer to the current
6739 field, for arrays the 'c' member contains the current start
6740 index. 'flags' is as in decl_initializer.
6741 'al' contains the already initialized length of the
6742 current container (starting at c). This returns the new length of that. */
6743 static int decl_designator(CType *type, Section *sec, unsigned long c,
6744 Sym **cur_field, int flags, int al)
6746 Sym *s, *f;
6747 int index, index_last, align, l, nb_elems, elem_size;
6748 unsigned long corig = c;
6750 elem_size = 0;
6751 nb_elems = 1;
6753 if (flags & DIF_HAVE_ELEM)
6754 goto no_designator;
6756 if (gnu_ext && tok >= TOK_UIDENT) {
6757 l = tok, next();
6758 if (tok == ':')
6759 goto struct_field;
6760 unget_tok(l);
6763 /* NOTE: we only support ranges for last designator */
6764 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6765 if (tok == '[') {
6766 if (!(type->t & VT_ARRAY))
6767 expect("array type");
6768 next();
6769 index = index_last = expr_const();
6770 if (tok == TOK_DOTS && gnu_ext) {
6771 next();
6772 index_last = expr_const();
6774 skip(']');
6775 s = type->ref;
6776 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6777 index_last < index)
6778 tcc_error("invalid index");
6779 if (cur_field)
6780 (*cur_field)->c = index_last;
6781 type = pointed_type(type);
6782 elem_size = type_size(type, &align);
6783 c += index * elem_size;
6784 nb_elems = index_last - index + 1;
6785 } else {
6786 int cumofs;
6787 next();
6788 l = tok;
6789 struct_field:
6790 next();
6791 if ((type->t & VT_BTYPE) != VT_STRUCT)
6792 expect("struct/union type");
6793 cumofs = 0;
6794 f = find_field(type, l, &cumofs);
6795 if (!f)
6796 expect("field");
6797 if (cur_field)
6798 *cur_field = f;
6799 type = &f->type;
6800 c += cumofs + f->c;
6802 cur_field = NULL;
6804 if (!cur_field) {
6805 if (tok == '=') {
6806 next();
6807 } else if (!gnu_ext) {
6808 expect("=");
6810 } else {
6811 no_designator:
6812 if (type->t & VT_ARRAY) {
6813 index = (*cur_field)->c;
6814 if (type->ref->c >= 0 && index >= type->ref->c)
6815 tcc_error("index too large");
6816 type = pointed_type(type);
6817 c += index * type_size(type, &align);
6818 } else {
6819 f = *cur_field;
6820 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6821 *cur_field = f = f->next;
6822 if (!f)
6823 tcc_error("too many field init");
6824 type = &f->type;
6825 c += f->c;
6828 /* must put zero in holes (note that doing it that way
6829 ensures that it even works with designators) */
6830 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6831 init_putz(sec, corig + al, c - corig - al);
6832 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6834 /* XXX: make it more general */
6835 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6836 unsigned long c_end;
6837 uint8_t *src, *dst;
6838 int i;
6840 if (!sec) {
6841 vset(type, VT_LOCAL|VT_LVAL, c);
6842 for (i = 1; i < nb_elems; i++) {
6843 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6844 vswap();
6845 vstore();
6847 vpop();
6848 } else if (!NODATA_WANTED) {
6849 c_end = c + nb_elems * elem_size;
6850 if (c_end > sec->data_allocated)
6851 section_realloc(sec, c_end);
6852 src = sec->data + c;
6853 dst = src;
6854 for(i = 1; i < nb_elems; i++) {
6855 dst += elem_size;
6856 memcpy(dst, src, elem_size);
6860 c += nb_elems * type_size(type, &align);
6861 if (c - corig > al)
6862 al = c - corig;
6863 return al;
6866 /* store a value or an expression directly in global data or in local array */
6867 static void init_putv(CType *type, Section *sec, unsigned long c)
6869 int bt;
6870 void *ptr;
6871 CType dtype;
6873 dtype = *type;
6874 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6876 if (sec) {
6877 int size, align;
6878 /* XXX: not portable */
6879 /* XXX: generate error if incorrect relocation */
6880 gen_assign_cast(&dtype);
6881 bt = type->t & VT_BTYPE;
6883 if ((vtop->r & VT_SYM)
6884 && bt != VT_PTR
6885 && bt != VT_FUNC
6886 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6887 || (type->t & VT_BITFIELD))
6888 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6890 tcc_error("initializer element is not computable at load time");
6892 if (NODATA_WANTED) {
6893 vtop--;
6894 return;
6897 size = type_size(type, &align);
6898 section_reserve(sec, c + size);
6899 ptr = sec->data + c;
6901 /* XXX: make code faster ? */
6902 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6903 vtop->sym->v >= SYM_FIRST_ANOM &&
6904 /* XXX This rejects compound literals like
6905 '(void *){ptr}'. The problem is that '&sym' is
6906 represented the same way, which would be ruled out
6907 by the SYM_FIRST_ANOM check above, but also '"string"'
6908 in 'char *p = "string"' is represented the same
6909 with the type being VT_PTR and the symbol being an
6910 anonymous one. That is, there's no difference in vtop
6911 between '(void *){x}' and '&(void *){x}'. Ignore
6912 pointer typed entities here. Hopefully no real code
6913 will every use compound literals with scalar type. */
6914 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6915 /* These come from compound literals, memcpy stuff over. */
6916 Section *ssec;
6917 ElfSym *esym;
6918 ElfW_Rel *rel;
6919 esym = elfsym(vtop->sym);
6920 ssec = tcc_state->sections[esym->st_shndx];
6921 memmove (ptr, ssec->data + esym->st_value, size);
6922 if (ssec->reloc) {
6923 /* We need to copy over all memory contents, and that
6924 includes relocations. Use the fact that relocs are
6925 created it order, so look from the end of relocs
6926 until we hit one before the copied region. */
6927 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6928 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6929 while (num_relocs--) {
6930 rel--;
6931 if (rel->r_offset >= esym->st_value + size)
6932 continue;
6933 if (rel->r_offset < esym->st_value)
6934 break;
6935 /* Note: if the same fields are initialized multiple
6936 times (possible with designators) then we possibly
6937 add multiple relocations for the same offset here.
6938 That would lead to wrong code, the last reloc needs
6939 to win. We clean this up later after the whole
6940 initializer is parsed. */
6941 put_elf_reloca(symtab_section, sec,
6942 c + rel->r_offset - esym->st_value,
6943 ELFW(R_TYPE)(rel->r_info),
6944 ELFW(R_SYM)(rel->r_info),
6945 #if PTR_SIZE == 8
6946 rel->r_addend
6947 #else
6949 #endif
6953 } else {
6954 if (type->t & VT_BITFIELD) {
6955 int bit_pos, bit_size, bits, n;
6956 unsigned char *p, v, m;
6957 bit_pos = BIT_POS(vtop->type.t);
6958 bit_size = BIT_SIZE(vtop->type.t);
6959 p = (unsigned char*)ptr + (bit_pos >> 3);
6960 bit_pos &= 7, bits = 0;
6961 while (bit_size) {
6962 n = 8 - bit_pos;
6963 if (n > bit_size)
6964 n = bit_size;
6965 v = vtop->c.i >> bits << bit_pos;
6966 m = ((1 << n) - 1) << bit_pos;
6967 *p = (*p & ~m) | (v & m);
6968 bits += n, bit_size -= n, bit_pos = 0, ++p;
6970 } else
6971 switch(bt) {
6972 /* XXX: when cross-compiling we assume that each type has the
6973 same representation on host and target, which is likely to
6974 be wrong in the case of long double */
6975 case VT_BOOL:
6976 vtop->c.i = vtop->c.i != 0;
6977 case VT_BYTE:
6978 *(char *)ptr |= vtop->c.i;
6979 break;
6980 case VT_SHORT:
6981 *(short *)ptr |= vtop->c.i;
6982 break;
6983 case VT_FLOAT:
6984 *(float*)ptr = vtop->c.f;
6985 break;
6986 case VT_DOUBLE:
6987 *(double *)ptr = vtop->c.d;
6988 break;
6989 case VT_LDOUBLE:
6990 #if defined TCC_IS_NATIVE_387
6991 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6992 memcpy(ptr, &vtop->c.ld, 10);
6993 #ifdef __TINYC__
6994 else if (sizeof (long double) == sizeof (double))
6995 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6996 #endif
6997 else if (vtop->c.ld == 0.0)
6999 else
7000 #endif
7001 if (sizeof(long double) == LDOUBLE_SIZE)
7002 *(long double*)ptr = vtop->c.ld;
7003 else if (sizeof(double) == LDOUBLE_SIZE)
7004 *(double *)ptr = (double)vtop->c.ld;
7005 else
7006 tcc_error("can't cross compile long double constants");
7007 break;
7008 #if PTR_SIZE != 8
7009 case VT_LLONG:
7010 *(long long *)ptr |= vtop->c.i;
7011 break;
7012 #else
7013 case VT_LLONG:
7014 #endif
7015 case VT_PTR:
7017 addr_t val = vtop->c.i;
7018 #if PTR_SIZE == 8
7019 if (vtop->r & VT_SYM)
7020 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7021 else
7022 *(addr_t *)ptr |= val;
7023 #else
7024 if (vtop->r & VT_SYM)
7025 greloc(sec, vtop->sym, c, R_DATA_PTR);
7026 *(addr_t *)ptr |= val;
7027 #endif
7028 break;
7030 default:
7032 int val = vtop->c.i;
7033 #if PTR_SIZE == 8
7034 if (vtop->r & VT_SYM)
7035 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7036 else
7037 *(int *)ptr |= val;
7038 #else
7039 if (vtop->r & VT_SYM)
7040 greloc(sec, vtop->sym, c, R_DATA_PTR);
7041 *(int *)ptr |= val;
7042 #endif
7043 break;
7047 vtop--;
7048 } else {
7049 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7050 vswap();
7051 vstore();
7052 vpop();
7056 /* 't' contains the type and storage info. 'c' is the offset of the
7057 object in section 'sec'. If 'sec' is NULL, it means stack based
7058 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7059 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7060 size only evaluation is wanted (only for arrays). */
7061 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7062 int flags)
7064 int len, n, no_oblock, nb, i;
7065 int size1, align1;
7066 Sym *s, *f;
7067 Sym indexsym;
7068 CType *t1;
7070 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7071 /* In case of strings we have special handling for arrays, so
7072 don't consume them as initializer value (which would commit them
7073 to some anonymous symbol). */
7074 tok != TOK_LSTR && tok != TOK_STR &&
7075 !(flags & DIF_SIZE_ONLY)) {
7076 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7077 flags |= DIF_HAVE_ELEM;
7080 if ((flags & DIF_HAVE_ELEM) &&
7081 !(type->t & VT_ARRAY) &&
7082 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7083 The source type might have VT_CONSTANT set, which is
7084 of course assignable to non-const elements. */
7085 is_compatible_unqualified_types(type, &vtop->type)) {
7086 init_putv(type, sec, c);
7087 } else if (type->t & VT_ARRAY) {
7088 s = type->ref;
7089 n = s->c;
7090 t1 = pointed_type(type);
7091 size1 = type_size(t1, &align1);
7093 no_oblock = 1;
7094 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7095 tok == '{') {
7096 if (tok != '{')
7097 tcc_error("character array initializer must be a literal,"
7098 " optionally enclosed in braces");
7099 skip('{');
7100 no_oblock = 0;
7103 /* only parse strings here if correct type (otherwise: handle
7104 them as ((w)char *) expressions */
7105 if ((tok == TOK_LSTR &&
7106 #ifdef TCC_TARGET_PE
7107 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7108 #else
7109 (t1->t & VT_BTYPE) == VT_INT
7110 #endif
7111 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7112 len = 0;
7113 while (tok == TOK_STR || tok == TOK_LSTR) {
7114 int cstr_len, ch;
7116 /* compute maximum number of chars wanted */
7117 if (tok == TOK_STR)
7118 cstr_len = tokc.str.size;
7119 else
7120 cstr_len = tokc.str.size / sizeof(nwchar_t);
7121 cstr_len--;
7122 nb = cstr_len;
7123 if (n >= 0 && nb > (n - len))
7124 nb = n - len;
7125 if (!(flags & DIF_SIZE_ONLY)) {
7126 if (cstr_len > nb)
7127 tcc_warning("initializer-string for array is too long");
7128 /* in order to go faster for common case (char
7129 string in global variable, we handle it
7130 specifically */
7131 if (sec && tok == TOK_STR && size1 == 1) {
7132 if (!NODATA_WANTED)
7133 memcpy(sec->data + c + len, tokc.str.data, nb);
7134 } else {
7135 for(i=0;i<nb;i++) {
7136 if (tok == TOK_STR)
7137 ch = ((unsigned char *)tokc.str.data)[i];
7138 else
7139 ch = ((nwchar_t *)tokc.str.data)[i];
7140 vpushi(ch);
7141 init_putv(t1, sec, c + (len + i) * size1);
7145 len += nb;
7146 next();
7148 /* only add trailing zero if enough storage (no
7149 warning in this case since it is standard) */
7150 if (n < 0 || len < n) {
7151 if (!(flags & DIF_SIZE_ONLY)) {
7152 vpushi(0);
7153 init_putv(t1, sec, c + (len * size1));
7155 len++;
7157 len *= size1;
7158 } else {
7159 indexsym.c = 0;
7160 f = &indexsym;
7162 do_init_list:
7163 len = 0;
7164 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7165 len = decl_designator(type, sec, c, &f, flags, len);
7166 flags &= ~DIF_HAVE_ELEM;
7167 if (type->t & VT_ARRAY) {
7168 ++indexsym.c;
7169 /* special test for multi dimensional arrays (may not
7170 be strictly correct if designators are used at the
7171 same time) */
7172 if (no_oblock && len >= n*size1)
7173 break;
7174 } else {
7175 if (s->type.t == VT_UNION)
7176 f = NULL;
7177 else
7178 f = f->next;
7179 if (no_oblock && f == NULL)
7180 break;
7183 if (tok == '}')
7184 break;
7185 skip(',');
7188 /* put zeros at the end */
7189 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7190 init_putz(sec, c + len, n*size1 - len);
7191 if (!no_oblock)
7192 skip('}');
7193 /* patch type size if needed, which happens only for array types */
7194 if (n < 0)
7195 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7196 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7197 size1 = 1;
7198 no_oblock = 1;
7199 if ((flags & DIF_FIRST) || tok == '{') {
7200 skip('{');
7201 no_oblock = 0;
7203 s = type->ref;
7204 f = s->next;
7205 n = s->c;
7206 goto do_init_list;
7207 } else if (tok == '{') {
7208 if (flags & DIF_HAVE_ELEM)
7209 skip(';');
7210 next();
7211 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7212 skip('}');
7213 } else if ((flags & DIF_SIZE_ONLY)) {
7214 /* If we supported only ISO C we wouldn't have to accept calling
7215 this on anything than an array if DIF_SIZE_ONLY (and even then
7216 only on the outermost level, so no recursion would be needed),
7217 because initializing a flex array member isn't supported.
7218 But GNU C supports it, so we need to recurse even into
7219 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7220 /* just skip expression */
7221 skip_or_save_block(NULL);
7222 } else {
7223 if (!(flags & DIF_HAVE_ELEM)) {
7224 /* This should happen only when we haven't parsed
7225 the init element above for fear of committing a
7226 string constant to memory too early. */
7227 if (tok != TOK_STR && tok != TOK_LSTR)
7228 expect("string constant");
7229 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7231 init_putv(type, sec, c);
7235 /* parse an initializer for type 't' if 'has_init' is non zero, and
7236 allocate space in local or global data space ('r' is either
7237 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7238 variable 'v' of scope 'scope' is declared before initializers
7239 are parsed. If 'v' is zero, then a reference to the new object
7240 is put in the value stack. If 'has_init' is 2, a special parsing
7241 is done to handle string constants. */
7242 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7243 int has_init, int v, int scope)
7245 int size, align, addr;
7246 TokenString *init_str = NULL;
7248 Section *sec;
7249 Sym *flexible_array;
7250 Sym *sym = NULL;
7251 int saved_nocode_wanted = nocode_wanted;
7252 #ifdef CONFIG_TCC_BCHECK
7253 int bcheck;
7254 #endif
7256 /* Always allocate static or global variables */
7257 if (v && (r & VT_VALMASK) == VT_CONST)
7258 nocode_wanted |= 0x80000000;
7260 #ifdef CONFIG_TCC_BCHECK
7261 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7262 #endif
7264 flexible_array = NULL;
7265 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7266 Sym *field = type->ref->next;
7267 if (field) {
7268 while (field->next)
7269 field = field->next;
7270 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7271 flexible_array = field;
7275 size = type_size(type, &align);
7276 /* If unknown size, we must evaluate it before
7277 evaluating initializers because
7278 initializers can generate global data too
7279 (e.g. string pointers or ISOC99 compound
7280 literals). It also simplifies local
7281 initializers handling */
7282 if (size < 0 || (flexible_array && has_init)) {
7283 if (!has_init)
7284 tcc_error("unknown type size");
7285 /* get all init string */
7286 if (has_init == 2) {
7287 init_str = tok_str_alloc();
7288 /* only get strings */
7289 while (tok == TOK_STR || tok == TOK_LSTR) {
7290 tok_str_add_tok(init_str);
7291 next();
7293 tok_str_add(init_str, -1);
7294 tok_str_add(init_str, 0);
7295 } else {
7296 skip_or_save_block(&init_str);
7298 unget_tok(0);
7300 /* compute size */
7301 begin_macro(init_str, 1);
7302 next();
7303 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7304 /* prepare second initializer parsing */
7305 macro_ptr = init_str->str;
7306 next();
7308 /* if still unknown size, error */
7309 size = type_size(type, &align);
7310 if (size < 0)
7311 tcc_error("unknown type size");
7313 /* If there's a flex member and it was used in the initializer
7314 adjust size. */
7315 if (flexible_array &&
7316 flexible_array->type.ref->c > 0)
7317 size += flexible_array->type.ref->c
7318 * pointed_size(&flexible_array->type);
7319 /* take into account specified alignment if bigger */
7320 if (ad->a.aligned) {
7321 int speca = 1 << (ad->a.aligned - 1);
7322 if (speca > align)
7323 align = speca;
7324 } else if (ad->a.packed) {
7325 align = 1;
7328 if (!v && NODATA_WANTED)
7329 size = 0, align = 1;
7331 if ((r & VT_VALMASK) == VT_LOCAL) {
7332 sec = NULL;
7333 #ifdef CONFIG_TCC_BCHECK
7334 if (bcheck && (type->t & VT_ARRAY)) {
7335 loc--;
7337 #endif
7338 loc = (loc - size) & -align;
7339 addr = loc;
7340 #ifdef CONFIG_TCC_BCHECK
7341 /* handles bounds */
7342 /* XXX: currently, since we do only one pass, we cannot track
7343 '&' operators, so we add only arrays */
7344 if (bcheck && (type->t & VT_ARRAY)) {
7345 addr_t *bounds_ptr;
7346 /* add padding between regions */
7347 loc--;
7348 /* then add local bound info */
7349 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7350 bounds_ptr[0] = addr;
7351 bounds_ptr[1] = size;
7353 #endif
7354 if (v) {
7355 /* local variable */
7356 #ifdef CONFIG_TCC_ASM
7357 if (ad->asm_label) {
7358 int reg = asm_parse_regvar(ad->asm_label);
7359 if (reg >= 0)
7360 r = (r & ~VT_VALMASK) | reg;
7362 #endif
7363 sym = sym_push(v, type, r, addr);
7364 if (ad->cleanup_func) {
7365 Sym *cls = sym_push2(&all_cleanups,
7366 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7367 cls->prev_tok = sym;
7368 cls->next = ad->cleanup_func;
7369 cls->ncl = cur_scope->cl.s;
7370 cur_scope->cl.s = cls;
7373 sym->a = ad->a;
7374 } else {
7375 /* push local reference */
7376 vset(type, r, addr);
7378 } else {
7379 if (v && scope == VT_CONST) {
7380 /* see if the symbol was already defined */
7381 sym = sym_find(v);
7382 if (sym) {
7383 patch_storage(sym, ad, type);
7384 /* we accept several definitions of the same global variable. */
7385 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7386 goto no_alloc;
7390 /* allocate symbol in corresponding section */
7391 sec = ad->section;
7392 if (!sec) {
7393 if (has_init)
7394 sec = data_section;
7395 else if (tcc_state->nocommon)
7396 sec = bss_section;
7399 if (sec) {
7400 addr = section_add(sec, size, align);
7401 #ifdef CONFIG_TCC_BCHECK
7402 /* add padding if bound check */
7403 if (bcheck)
7404 section_add(sec, 1, 1);
7405 #endif
7406 } else {
7407 addr = align; /* SHN_COMMON is special, symbol value is align */
7408 sec = common_section;
7411 if (v) {
7412 if (!sym) {
7413 sym = sym_push(v, type, r | VT_SYM, 0);
7414 patch_storage(sym, ad, NULL);
7416 /* update symbol definition */
7417 put_extern_sym(sym, sec, addr, size);
7418 } else {
7419 /* push global reference */
7420 vpush_ref(type, sec, addr, size);
7421 sym = vtop->sym;
7422 vtop->r |= r;
7425 #ifdef CONFIG_TCC_BCHECK
7426 /* handles bounds now because the symbol must be defined
7427 before for the relocation */
7428 if (bcheck) {
7429 addr_t *bounds_ptr;
7431 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7432 /* then add global bound info */
7433 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7434 bounds_ptr[0] = 0; /* relocated */
7435 bounds_ptr[1] = size;
7437 #endif
7440 if (type->t & VT_VLA) {
7441 int a;
7443 if (NODATA_WANTED)
7444 goto no_alloc;
7446 /* save current stack pointer */
7447 if (root_scope->vla.loc == 0) {
7448 struct scope *v = cur_scope;
7449 gen_vla_sp_save(loc -= PTR_SIZE);
7450 do v->vla.loc = loc; while ((v = v->prev));
7453 vla_runtime_type_size(type, &a);
7454 gen_vla_alloc(type, a);
7455 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7456 /* on _WIN64, because of the function args scratch area, the
7457 result of alloca differs from RSP and is returned in RAX. */
7458 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7459 #endif
7460 gen_vla_sp_save(addr);
7461 cur_scope->vla.loc = addr;
7462 cur_scope->vla.num++;
7464 } else if (has_init) {
7465 size_t oldreloc_offset = 0;
7466 if (sec && sec->reloc)
7467 oldreloc_offset = sec->reloc->data_offset;
7468 decl_initializer(type, sec, addr, DIF_FIRST);
7469 if (sec && sec->reloc)
7470 squeeze_multi_relocs(sec, oldreloc_offset);
7471 /* patch flexible array member size back to -1, */
7472 /* for possible subsequent similar declarations */
7473 if (flexible_array)
7474 flexible_array->type.ref->c = -1;
7477 no_alloc:
7478 /* restore parse state if needed */
7479 if (init_str) {
7480 end_macro();
7481 next();
7484 nocode_wanted = saved_nocode_wanted;
7487 /* parse a function defined by symbol 'sym' and generate its code in
7488 'cur_text_section' */
7489 static void gen_function(Sym *sym)
7491 /* Initialize VLA state */
7492 struct scope f = { 0 };
7493 cur_scope = root_scope = &f;
7495 nocode_wanted = 0;
7496 ind = cur_text_section->data_offset;
7497 if (sym->a.aligned) {
7498 size_t newoff = section_add(cur_text_section, 0,
7499 1 << (sym->a.aligned - 1));
7500 gen_fill_nops(newoff - ind);
7502 /* NOTE: we patch the symbol size later */
7503 put_extern_sym(sym, cur_text_section, ind, 0);
7505 funcname = get_tok_str(sym->v, NULL);
7506 func_ind = ind;
7508 /* put debug symbol */
7509 tcc_debug_funcstart(tcc_state, sym);
7510 /* push a dummy symbol to enable local sym storage */
7511 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7512 local_scope = 1; /* for function parameters */
7513 gfunc_prolog(&sym->type);
7514 local_scope = 0;
7515 rsym = 0;
7516 clear_temp_local_var_list();
7517 block(0);
7518 gsym(rsym);
7519 nocode_wanted = 0;
7520 gfunc_epilog();
7521 cur_text_section->data_offset = ind;
7522 /* reset local stack */
7523 sym_pop(&local_stack, NULL, 0);
7524 local_scope = 0;
7525 label_pop(&global_label_stack, NULL, 0);
7526 sym_pop(&all_cleanups, NULL, 0);
7527 /* patch symbol size */
7528 elfsym(sym)->st_size = ind - func_ind;
7529 /* end of function */
7530 tcc_debug_funcend(tcc_state, ind - func_ind);
7531 /* It's better to crash than to generate wrong code */
7532 cur_text_section = NULL;
7533 funcname = ""; /* for safety */
7534 func_vt.t = VT_VOID; /* for safety */
7535 func_var = 0; /* for safety */
7536 ind = 0; /* for safety */
7537 nocode_wanted = 0x80000000;
7538 check_vstack();
7541 static void gen_inline_functions(TCCState *s)
7543 Sym *sym;
7544 int inline_generated, i;
7545 struct InlineFunc *fn;
7547 tcc_open_bf(s, ":inline:", 0);
7548 /* iterate while inline function are referenced */
7549 do {
7550 inline_generated = 0;
7551 for (i = 0; i < s->nb_inline_fns; ++i) {
7552 fn = s->inline_fns[i];
7553 sym = fn->sym;
7554 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7555 /* the function was used or forced (and then not internal):
7556 generate its code and convert it to a normal function */
7557 fn->sym = NULL;
7558 if (file)
7559 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7560 begin_macro(fn->func_str, 1);
7561 next();
7562 cur_text_section = text_section;
7563 gen_function(sym);
7564 end_macro();
7566 inline_generated = 1;
7569 } while (inline_generated);
7570 tcc_close();
7573 ST_FUNC void free_inline_functions(TCCState *s)
7575 int i;
7576 /* free tokens of unused inline functions */
7577 for (i = 0; i < s->nb_inline_fns; ++i) {
7578 struct InlineFunc *fn = s->inline_fns[i];
7579 if (fn->sym)
7580 tok_str_free(fn->func_str);
7582 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7585 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7586 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7587 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7589 int v, has_init, r;
7590 CType type, btype;
7591 Sym *sym;
7592 AttributeDef ad, adbase;
7594 while (1) {
7595 if (tok == TOK_STATIC_ASSERT) {
7596 int c;
7598 next();
7599 skip('(');
7600 c = expr_const();
7601 skip(',');
7602 if (c == 0)
7603 tcc_error("%s", get_tok_str(tok, &tokc));
7604 next();
7605 skip(')');
7606 skip(';');
7607 continue;
7609 if (!parse_btype(&btype, &adbase)) {
7610 if (is_for_loop_init)
7611 return 0;
7612 /* skip redundant ';' if not in old parameter decl scope */
7613 if (tok == ';' && l != VT_CMP) {
7614 next();
7615 continue;
7617 if (l != VT_CONST)
7618 break;
7619 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7620 /* global asm block */
7621 asm_global_instr();
7622 continue;
7624 if (tok >= TOK_UIDENT) {
7625 /* special test for old K&R protos without explicit int
7626 type. Only accepted when defining global data */
7627 btype.t = VT_INT;
7628 } else {
7629 if (tok != TOK_EOF)
7630 expect("declaration");
7631 break;
7634 if (tok == ';') {
7635 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7636 int v = btype.ref->v;
7637 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7638 tcc_warning("unnamed struct/union that defines no instances");
7639 next();
7640 continue;
7642 if (IS_ENUM(btype.t)) {
7643 next();
7644 continue;
7647 while (1) { /* iterate thru each declaration */
7648 type = btype;
7649 /* If the base type itself was an array type of unspecified
7650 size (like in 'typedef int arr[]; arr x = {1};') then
7651 we will overwrite the unknown size by the real one for
7652 this decl. We need to unshare the ref symbol holding
7653 that size. */
7654 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7655 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7657 ad = adbase;
7658 type_decl(&type, &ad, &v, TYPE_DIRECT);
7659 #if 0
7661 char buf[500];
7662 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7663 printf("type = '%s'\n", buf);
7665 #endif
7666 if ((type.t & VT_BTYPE) == VT_FUNC) {
7667 /* if old style function prototype, we accept a
7668 declaration list */
7669 sym = type.ref;
7670 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7671 decl0(VT_CMP, 0, sym);
7672 /* always compile 'extern inline' */
7673 if (type.t & VT_EXTERN)
7674 type.t &= ~VT_INLINE;
7677 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7678 ad.asm_label = asm_label_instr();
7679 /* parse one last attribute list, after asm label */
7680 parse_attribute(&ad);
7681 #if 0
7682 /* gcc does not allow __asm__("label") with function definition,
7683 but why not ... */
7684 if (tok == '{')
7685 expect(";");
7686 #endif
7689 #ifdef TCC_TARGET_PE
7690 if (ad.a.dllimport || ad.a.dllexport) {
7691 if (type.t & VT_STATIC)
7692 tcc_error("cannot have dll linkage with static");
7693 if (type.t & VT_TYPEDEF) {
7694 tcc_warning("'%s' attribute ignored for typedef",
7695 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7696 (ad.a.dllexport = 0, "dllexport"));
7697 } else if (ad.a.dllimport) {
7698 if ((type.t & VT_BTYPE) == VT_FUNC)
7699 ad.a.dllimport = 0;
7700 else
7701 type.t |= VT_EXTERN;
7704 #endif
7705 if (tok == '{') {
7706 if (l != VT_CONST)
7707 tcc_error("cannot use local functions");
7708 if ((type.t & VT_BTYPE) != VT_FUNC)
7709 expect("function definition");
7711 /* reject abstract declarators in function definition
7712 make old style params without decl have int type */
7713 sym = type.ref;
7714 while ((sym = sym->next) != NULL) {
7715 if (!(sym->v & ~SYM_FIELD))
7716 expect("identifier");
7717 if (sym->type.t == VT_VOID)
7718 sym->type = int_type;
7721 /* put function symbol */
7722 type.t &= ~VT_EXTERN;
7723 sym = external_sym(v, &type, 0, &ad);
7724 /* static inline functions are just recorded as a kind
7725 of macro. Their code will be emitted at the end of
7726 the compilation unit only if they are used */
7727 if (sym->type.t & VT_INLINE) {
7728 struct InlineFunc *fn;
7729 const char *filename;
7731 filename = file ? file->filename : "";
7732 fn = tcc_malloc(sizeof *fn + strlen(filename));
7733 strcpy(fn->filename, filename);
7734 fn->sym = sym;
7735 skip_or_save_block(&fn->func_str);
7736 dynarray_add(&tcc_state->inline_fns,
7737 &tcc_state->nb_inline_fns, fn);
7738 } else {
7739 /* compute text section */
7740 cur_text_section = ad.section;
7741 if (!cur_text_section)
7742 cur_text_section = text_section;
7743 gen_function(sym);
7745 break;
7746 } else {
7747 if (l == VT_CMP) {
7748 /* find parameter in function parameter list */
7749 for (sym = func_sym->next; sym; sym = sym->next)
7750 if ((sym->v & ~SYM_FIELD) == v)
7751 goto found;
7752 tcc_error("declaration for parameter '%s' but no such parameter",
7753 get_tok_str(v, NULL));
7754 found:
7755 if (type.t & VT_STORAGE) /* 'register' is okay */
7756 tcc_error("storage class specified for '%s'",
7757 get_tok_str(v, NULL));
7758 if (sym->type.t != VT_VOID)
7759 tcc_error("redefinition of parameter '%s'",
7760 get_tok_str(v, NULL));
7761 convert_parameter_type(&type);
7762 sym->type = type;
7763 } else if (type.t & VT_TYPEDEF) {
7764 /* save typedefed type */
7765 /* XXX: test storage specifiers ? */
7766 sym = sym_find(v);
7767 if (sym && sym->sym_scope == local_scope) {
7768 if (!is_compatible_types(&sym->type, &type)
7769 || !(sym->type.t & VT_TYPEDEF))
7770 tcc_error("incompatible redefinition of '%s'",
7771 get_tok_str(v, NULL));
7772 sym->type = type;
7773 } else {
7774 sym = sym_push(v, &type, 0, 0);
7776 sym->a = ad.a;
7777 sym->f = ad.f;
7778 } else if ((type.t & VT_BTYPE) == VT_VOID
7779 && !(type.t & VT_EXTERN)) {
7780 tcc_error("declaration of void object");
7781 } else {
7782 r = 0;
7783 if ((type.t & VT_BTYPE) == VT_FUNC) {
7784 /* external function definition */
7785 /* specific case for func_call attribute */
7786 type.ref->f = ad.f;
7787 } else if (!(type.t & VT_ARRAY)) {
7788 /* not lvalue if array */
7789 r |= lvalue_type(type.t);
7791 has_init = (tok == '=');
7792 if (has_init && (type.t & VT_VLA))
7793 tcc_error("variable length array cannot be initialized");
7794 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7795 || (type.t & VT_BTYPE) == VT_FUNC
7796 /* as with GCC, uninitialized global arrays with no size
7797 are considered extern: */
7798 || ((type.t & VT_ARRAY) && !has_init
7799 && l == VT_CONST && type.ref->c < 0)
7801 /* external variable or function */
7802 type.t |= VT_EXTERN;
7803 sym = external_sym(v, &type, r, &ad);
7804 if (ad.alias_target) {
7805 ElfSym *esym;
7806 Sym *alias_target;
7807 alias_target = sym_find(ad.alias_target);
7808 esym = elfsym(alias_target);
7809 if (!esym)
7810 tcc_error("unsupported forward __alias__ attribute");
7811 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7813 } else {
7814 if (type.t & VT_STATIC)
7815 r |= VT_CONST;
7816 else
7817 r |= l;
7818 if (has_init)
7819 next();
7820 else if (l == VT_CONST)
7821 /* uninitialized global variables may be overridden */
7822 type.t |= VT_EXTERN;
7823 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7826 if (tok != ',') {
7827 if (is_for_loop_init)
7828 return 1;
7829 skip(';');
7830 break;
7832 next();
7836 return 0;
7839 static void decl(int l)
7841 decl0(l, 0, NULL);
7844 /* ------------------------------------------------------------------------- */
7845 #undef gjmp_addr
7846 #undef gjmp
7847 /* ------------------------------------------------------------------------- */