Fix indentation and run_test type
[tinycc.git] / tccgen.c
bloba6181b0c2a47b8efe26c773ddcde0d290aea3055
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
880 else if (vtop->r == VT_CONST)
881 vset_VT_CMP(vtop->c.i != 0);
882 else
883 tcc_error("ICE");
885 p = inv ? &vtop->jfalse : &vtop->jtrue;
886 *p = gjmp_append(*p, t);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv, int t)
894 int op, u, x;
896 gvtst_set(inv, t);
898 t = vtop->jtrue, u = vtop->jfalse;
899 if (inv)
900 x = u, u = t, t = x;
901 op = vtop->cmp_op;
903 /* jump to the wanted target */
904 if (op > 1)
905 t = gjmp_cond(op ^ inv, t);
906 else if (op != inv)
907 t = gjmp(t);
908 /* resolve complementary jumps to here */
909 gsym(u);
911 vtop--;
912 return t;
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType *type, Sym *sym)
919 CValue cval;
920 cval.i = 0;
921 vsetc(type, VT_CONST | VT_SYM, &cval);
922 vtop->sym = sym;
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
928 int v;
929 Sym *sym;
931 v = anon_sym++;
932 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
933 sym->type.t |= VT_STATIC;
934 put_extern_sym(sym, sec, offset, size);
935 return sym;
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
941 vpushsym(type, get_sym_ref(type, sec, offset, size));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym *external_global_sym(int v, CType *type)
947 Sym *s;
949 s = sym_find(v);
950 if (!s) {
951 /* push forward reference */
952 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
953 s->type.ref = type->ref;
954 } else if (IS_ASM_SYM(s)) {
955 s->type.t = type->t | (s->type.t & VT_EXTERN);
956 s->type.ref = type->ref;
957 update_storage(s);
959 return s;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
965 if (sa1->aligned && !sa->aligned)
966 sa->aligned = sa1->aligned;
967 sa->packed |= sa1->packed;
968 sa->weak |= sa1->weak;
969 if (sa1->visibility != STV_DEFAULT) {
970 int vis = sa->visibility;
971 if (vis == STV_DEFAULT
972 || vis > sa1->visibility)
973 vis = sa1->visibility;
974 sa->visibility = vis;
976 sa->dllexport |= sa1->dllexport;
977 sa->nodecorate |= sa1->nodecorate;
978 sa->dllimport |= sa1->dllimport;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
984 if (fa1->func_call && !fa->func_call)
985 fa->func_call = fa1->func_call;
986 if (fa1->func_type && !fa->func_type)
987 fa->func_type = fa1->func_type;
988 if (fa1->func_args && !fa->func_args)
989 fa->func_args = fa1->func_args;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
995 merge_symattr(&ad->a, &ad1->a);
996 merge_funcattr(&ad->f, &ad1->f);
998 if (ad1->section)
999 ad->section = ad1->section;
1000 if (ad1->alias_target)
1001 ad->alias_target = ad1->alias_target;
1002 if (ad1->asm_label)
1003 ad->asm_label = ad1->asm_label;
1004 if (ad1->attr_mode)
1005 ad->attr_mode = ad1->attr_mode;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym *sym, CType *type)
1011 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1012 if (!(sym->type.t & VT_EXTERN))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1014 sym->type.t &= ~VT_EXTERN;
1017 if (IS_ASM_SYM(sym)) {
1018 /* stay static if both are static */
1019 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1020 sym->type.ref = type->ref;
1023 if (!is_compatible_types(&sym->type, type)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym->v, NULL));
1027 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1028 int static_proto = sym->type.t & VT_STATIC;
1029 /* warn if static follows non-static function declaration */
1030 if ((type->t & VT_STATIC) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type->t | sym->type.t) & VT_INLINE))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym->v, NULL));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type->t | sym->type.t) & VT_INLINE) {
1040 if (!((type->t ^ sym->type.t) & VT_INLINE)
1041 || ((type->t | sym->type.t) & VT_STATIC))
1042 static_proto |= VT_INLINE;
1045 if (0 == (type->t & VT_EXTERN)) {
1046 /* put complete type, use static from prototype */
1047 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1048 sym->type.ref = type->ref;
1049 } else {
1050 sym->type.t &= ~VT_INLINE | static_proto;
1053 if (sym->type.ref->f.func_type == FUNC_OLD
1054 && type->ref->f.func_type != FUNC_OLD) {
1055 sym->type.ref = type->ref;
1058 } else {
1059 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym->type.ref->c = type->ref->c;
1063 if ((type->t ^ sym->type.t) & VT_STATIC)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym->v, NULL));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1072 if (type)
1073 patch_type(sym, type);
1075 #ifdef TCC_TARGET_PE
1076 if (sym->a.dllimport != ad->a.dllimport)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym->v, NULL));
1079 #endif
1080 merge_symattr(&sym->a, &ad->a);
1081 if (ad->asm_label)
1082 sym->asm_label = ad->asm_label;
1083 update_storage(sym);
1086 /* copy sym to other stack */
1087 static Sym *sym_copy(Sym *s0, Sym **ps)
1089 Sym *s;
1090 s = sym_malloc(), *s = *s0;
1091 s->prev = *ps, *ps = s;
1092 if (s->v < SYM_FIRST_ANOM) {
1093 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1094 s->prev_tok = *ps, *ps = s;
1096 return s;
1099 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1100 static void sym_copy_ref(Sym *s, Sym **ps)
1102 int bt = s->type.t & VT_BTYPE;
1103 if (bt == VT_FUNC || bt == VT_PTR) {
1104 Sym **sp = &s->type.ref;
1105 for (s = *sp, *sp = NULL; s; s = s->next) {
1106 Sym *s2 = sym_copy(s, ps);
1107 sp = &(*sp = s2)->next;
1108 sym_copy_ref(s2, ps);
1113 /* define a new external reference to a symbol 'v' */
1114 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1116 Sym *s;
1118 /* look for global symbol */
1119 s = sym_find(v);
1120 while (s && s->sym_scope)
1121 s = s->prev_tok;
1123 if (!s) {
1124 /* push forward reference */
1125 s = global_identifier_push(v, type->t, 0);
1126 s->r |= r;
1127 s->a = ad->a;
1128 s->asm_label = ad->asm_label;
1129 s->type.ref = type->ref;
1130 /* copy type to the global stack */
1131 if (local_stack)
1132 sym_copy_ref(s, &global_stack);
1133 } else {
1134 patch_storage(s, ad, type);
1136 /* push variables on local_stack if any */
1137 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1138 s = sym_copy(s, &local_stack);
1139 return s;
1142 /* push a reference to global symbol v */
1143 ST_FUNC void vpush_global_sym(CType *type, int v)
1145 vpushsym(type, external_global_sym(v, type));
1148 /* save registers up to (vtop - n) stack entry */
1149 ST_FUNC void save_regs(int n)
1151 SValue *p, *p1;
1152 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1153 save_reg(p->r);
1156 /* save r to the memory stack, and mark it as being free */
1157 ST_FUNC void save_reg(int r)
1159 save_reg_upstack(r, 0);
1162 /* save r to the memory stack, and mark it as being free,
1163 if seen up to (vtop - n) stack entry */
1164 ST_FUNC void save_reg_upstack(int r, int n)
1166 int l, saved, size, align;
1167 SValue *p, *p1, sv;
1168 CType *type;
1170 if ((r &= VT_VALMASK) >= VT_CONST)
1171 return;
1172 if (nocode_wanted)
1173 return;
1175 /* modify all stack values */
1176 saved = 0;
1177 l = 0;
1178 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1179 if ((p->r & VT_VALMASK) == r || (p->r2 & VT_VALMASK) == r) {
1180 /* must save value on stack if not already done */
1181 if (!saved) {
1182 /* NOTE: must reload 'r' because r might be equal to r2 */
1183 r = p->r & VT_VALMASK;
1184 /* store register in the stack */
1185 type = &p->type;
1186 if ((p->r & VT_LVAL) ||
1187 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1188 #if PTR_SIZE == 8
1189 type = &char_pointer_type;
1190 #else
1191 type = &int_type;
1192 #endif
1193 size = type_size(type, &align);
1194 l=get_temp_local_var(size,align);
1195 sv.type.t = type->t;
1196 sv.r = VT_LOCAL | VT_LVAL;
1197 sv.c.i = l;
1198 store(r, &sv);
1199 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1200 /* x86 specific: need to pop fp register ST0 if saved */
1201 if (r == TREG_ST0) {
1202 o(0xd8dd); /* fstp %st(0) */
1204 #endif
1205 /* special long long case */
1206 if ((p->r2 & VT_VALMASK) < VT_CONST) {
1207 sv.c.i += PTR_SIZE;
1208 store(p->r2, &sv);
1210 saved = 1;
1212 /* mark that stack entry as being saved on the stack */
1213 if (p->r & VT_LVAL) {
1214 /* also clear the bounded flag because the
1215 relocation address of the function was stored in
1216 p->c.i */
1217 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1218 } else {
1219 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1221 p->r2 = VT_CONST;
1222 p->c.i = l;
1227 #ifdef TCC_TARGET_ARM
1228 /* find a register of class 'rc2' with at most one reference on stack.
1229 * If none, call get_reg(rc) */
1230 ST_FUNC int get_reg_ex(int rc, int rc2)
1232 int r;
1233 SValue *p;
1235 for(r=0;r<NB_REGS;r++) {
1236 if (reg_classes[r] & rc2) {
1237 int n;
1238 n=0;
1239 for(p = vstack; p <= vtop; p++) {
1240 if ((p->r & VT_VALMASK) == r ||
1241 (p->r2 & VT_VALMASK) == r)
1242 n++;
1244 if (n <= 1)
1245 return r;
1248 return get_reg(rc);
1250 #endif
1252 /* find a free register of class 'rc'. If none, save one register */
1253 ST_FUNC int get_reg(int rc)
1255 int r;
1256 SValue *p;
1258 /* find a free register */
1259 for(r=0;r<NB_REGS;r++) {
1260 if (reg_classes[r] & rc) {
1261 if (nocode_wanted)
1262 return r;
1263 for(p=vstack;p<=vtop;p++) {
1264 if ((p->r & VT_VALMASK) == r ||
1265 (p->r2 & VT_VALMASK) == r)
1266 goto notfound;
1268 return r;
1270 notfound: ;
1273 /* no register left : free the first one on the stack (VERY
1274 IMPORTANT to start from the bottom to ensure that we don't
1275 spill registers used in gen_opi()) */
1276 for(p=vstack;p<=vtop;p++) {
1277 /* look at second register (if long long) */
1278 r = p->r2 & VT_VALMASK;
1279 if (r < VT_CONST && (reg_classes[r] & rc))
1280 goto save_found;
1281 r = p->r & VT_VALMASK;
1282 if (r < VT_CONST && (reg_classes[r] & rc)) {
1283 save_found:
1284 save_reg(r);
1285 return r;
1288 /* Should never comes here */
1289 return -1;
1292 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1293 static int get_temp_local_var(int size,int align){
1294 int i;
1295 struct temp_local_variable *temp_var;
1296 int found_var;
1297 SValue *p;
1298 int r;
1299 char free;
1300 char found;
1301 found=0;
1302 for(i=0;i<nb_temp_local_vars;i++){
1303 temp_var=&arr_temp_local_vars[i];
1304 if(temp_var->size<size||align!=temp_var->align){
1305 continue;
1307 /*check if temp_var is free*/
1308 free=1;
1309 for(p=vstack;p<=vtop;p++) {
1310 r=p->r&VT_VALMASK;
1311 if(r==VT_LOCAL||r==VT_LLOCAL){
1312 if(p->c.i==temp_var->location){
1313 free=0;
1314 break;
1318 if(free){
1319 found_var=temp_var->location;
1320 found=1;
1321 break;
1324 if(!found){
1325 loc = (loc - size) & -align;
1326 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1327 temp_var=&arr_temp_local_vars[i];
1328 temp_var->location=loc;
1329 temp_var->size=size;
1330 temp_var->align=align;
1331 nb_temp_local_vars++;
1333 found_var=loc;
1335 return found_var;
1338 static void clear_temp_local_var_list(){
1339 nb_temp_local_vars=0;
1342 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1343 if needed */
1344 static void move_reg(int r, int s, int t)
1346 SValue sv;
1348 if (r != s) {
1349 save_reg(r);
1350 sv.type.t = t;
1351 sv.type.ref = NULL;
1352 sv.r = s;
1353 sv.c.i = 0;
1354 load(r, &sv);
1358 /* get address of vtop (vtop MUST BE an lvalue) */
1359 ST_FUNC void gaddrof(void)
1361 vtop->r &= ~VT_LVAL;
1362 /* tricky: if saved lvalue, then we can go back to lvalue */
1363 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1364 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1369 #ifdef CONFIG_TCC_BCHECK
1370 /* generate lvalue bound code */
1371 static void gbound(void)
1373 int lval_type;
1374 CType type1;
1376 vtop->r &= ~VT_MUSTBOUND;
1377 /* if lvalue, then use checking code before dereferencing */
1378 if (vtop->r & VT_LVAL) {
1379 /* if not VT_BOUNDED value, then make one */
1380 if (!(vtop->r & VT_BOUNDED)) {
1381 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1382 /* must save type because we must set it to int to get pointer */
1383 type1 = vtop->type;
1384 vtop->type.t = VT_PTR;
1385 gaddrof();
1386 vpushi(0);
1387 gen_bounded_ptr_add();
1388 vtop->r |= lval_type;
1389 vtop->type = type1;
1391 /* then check for dereferencing */
1392 gen_bounded_ptr_deref();
1395 #endif
1397 static void incr_bf_adr(int o)
1399 vtop->type = char_pointer_type;
1400 gaddrof();
1401 vpushi(o);
1402 gen_op('+');
1403 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1404 | (VT_BYTE|VT_UNSIGNED);
1405 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1406 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1409 /* single-byte load mode for packed or otherwise unaligned bitfields */
1410 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1412 int n, o, bits;
1413 save_reg_upstack(vtop->r, 1);
1414 vpush64(type->t & VT_BTYPE, 0); // B X
1415 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1416 do {
1417 vswap(); // X B
1418 incr_bf_adr(o);
1419 vdup(); // X B B
1420 n = 8 - bit_pos;
1421 if (n > bit_size)
1422 n = bit_size;
1423 if (bit_pos)
1424 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1425 if (n < 8)
1426 vpushi((1 << n) - 1), gen_op('&');
1427 gen_cast(type);
1428 if (bits)
1429 vpushi(bits), gen_op(TOK_SHL);
1430 vrotb(3); // B Y X
1431 gen_op('|'); // B X
1432 bits += n, bit_size -= n, o = 1;
1433 } while (bit_size);
1434 vswap(), vpop();
1435 if (!(type->t & VT_UNSIGNED)) {
1436 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1437 vpushi(n), gen_op(TOK_SHL);
1438 vpushi(n), gen_op(TOK_SAR);
1442 /* single-byte store mode for packed or otherwise unaligned bitfields */
1443 static void store_packed_bf(int bit_pos, int bit_size)
1445 int bits, n, o, m, c;
1447 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1448 vswap(); // X B
1449 save_reg_upstack(vtop->r, 1);
1450 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1451 do {
1452 incr_bf_adr(o); // X B
1453 vswap(); //B X
1454 c ? vdup() : gv_dup(); // B V X
1455 vrott(3); // X B V
1456 if (bits)
1457 vpushi(bits), gen_op(TOK_SHR);
1458 if (bit_pos)
1459 vpushi(bit_pos), gen_op(TOK_SHL);
1460 n = 8 - bit_pos;
1461 if (n > bit_size)
1462 n = bit_size;
1463 if (n < 8) {
1464 m = ((1 << n) - 1) << bit_pos;
1465 vpushi(m), gen_op('&'); // X B V1
1466 vpushv(vtop-1); // X B V1 B
1467 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1468 gen_op('&'); // X B V1 B1
1469 gen_op('|'); // X B V2
1471 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1472 vstore(), vpop(); // X B
1473 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1474 } while (bit_size);
1475 vpop(), vpop();
1478 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1480 int t;
1481 if (0 == sv->type.ref)
1482 return 0;
1483 t = sv->type.ref->auxtype;
1484 if (t != -1 && t != VT_STRUCT) {
1485 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1486 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1488 return t;
1491 /* store vtop a register belonging to class 'rc'. lvalues are
1492 converted to values. Cannot be used if cannot be converted to
1493 register value (such as structures). */
1494 ST_FUNC int gv(int rc)
1496 int r, bit_pos, bit_size, size, align, rc2;
1498 /* NOTE: get_reg can modify vstack[] */
1499 if (vtop->type.t & VT_BITFIELD) {
1500 CType type;
1502 bit_pos = BIT_POS(vtop->type.t);
1503 bit_size = BIT_SIZE(vtop->type.t);
1504 /* remove bit field info to avoid loops */
1505 vtop->type.t &= ~VT_STRUCT_MASK;
1507 type.ref = NULL;
1508 type.t = vtop->type.t & VT_UNSIGNED;
1509 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1510 type.t |= VT_UNSIGNED;
1512 r = adjust_bf(vtop, bit_pos, bit_size);
1514 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1515 type.t |= VT_LLONG;
1516 else
1517 type.t |= VT_INT;
1519 if (r == VT_STRUCT) {
1520 load_packed_bf(&type, bit_pos, bit_size);
1521 } else {
1522 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1523 /* cast to int to propagate signedness in following ops */
1524 gen_cast(&type);
1525 /* generate shifts */
1526 vpushi(bits - (bit_pos + bit_size));
1527 gen_op(TOK_SHL);
1528 vpushi(bits - bit_size);
1529 /* NOTE: transformed to SHR if unsigned */
1530 gen_op(TOK_SAR);
1532 r = gv(rc);
1533 } else {
1534 if (is_float(vtop->type.t) &&
1535 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1536 unsigned long offset;
1537 /* CPUs usually cannot use float constants, so we store them
1538 generically in data segment */
1539 size = type_size(&vtop->type, &align);
1540 if (NODATA_WANTED)
1541 size = 0, align = 1;
1542 offset = section_add(data_section, size, align);
1543 vpush_ref(&vtop->type, data_section, offset, size);
1544 vswap();
1545 init_putv(&vtop->type, data_section, offset);
1546 vtop->r |= VT_LVAL;
1548 #ifdef CONFIG_TCC_BCHECK
1549 if (vtop->r & VT_MUSTBOUND)
1550 gbound();
1551 #endif
1552 #ifdef TCC_TARGET_RISCV64
1553 /* XXX mega hack */
1554 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && rc == RC_FLOAT)
1555 rc = RC_INT;
1556 #endif
1558 r = vtop->r & VT_VALMASK;
1559 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1560 #ifndef TCC_TARGET_ARM64
1561 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1562 if (rc == RC_IRET)
1563 rc2 = RC_LRET;
1564 #ifdef TCC_TARGET_X86_64
1565 else if (rc == RC_FRET)
1566 rc2 = RC_QRET;
1567 #endif
1568 #endif
1569 #endif
1570 /* need to reload if:
1571 - constant
1572 - lvalue (need to dereference pointer)
1573 - already a register, but not in the right class */
1574 if (r >= VT_CONST
1575 || (vtop->r & VT_LVAL)
1576 || !(reg_classes[r] & rc)
1577 #ifdef TCC_TARGET_RISCV64
1578 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1579 || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1580 #elif PTR_SIZE == 8
1581 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1582 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1583 #else
1584 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1585 #endif
1588 r = get_reg(rc);
1589 #ifdef TCC_TARGET_RISCV64
1590 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)) {
1591 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
1592 #elif PTR_SIZE == 8
1593 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1594 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1595 #else
1596 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1597 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1598 unsigned long long ll;
1599 #endif
1600 int r2, original_type;
1601 original_type = vtop->type.t;
1602 /* two register type load : expand to two words
1603 temporarily */
1604 #if PTR_SIZE == 4
1605 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1606 /* load constant */
1607 ll = vtop->c.i;
1608 vtop->c.i = ll; /* first word */
1609 load(r, vtop);
1610 vtop->r = r; /* save register value */
1611 vpushi(ll >> 32); /* second word */
1612 } else
1613 #endif
1614 if (vtop->r & VT_LVAL) {
1615 /* We do not want to modifier the long long
1616 pointer here, so the safest (and less
1617 efficient) is to save all the other registers
1618 in the stack. XXX: totally inefficient. */
1619 #if 0
1620 save_regs(1);
1621 #else
1622 /* lvalue_save: save only if used further down the stack */
1623 save_reg_upstack(vtop->r, 1);
1624 #endif
1625 /* load from memory */
1626 vtop->type.t = load_type;
1627 load(r, vtop);
1628 vdup();
1629 vtop[-1].r = r; /* save register value */
1630 /* increment pointer to get second word */
1631 vtop->type.t = addr_type;
1632 gaddrof();
1633 vpushi(load_size);
1634 gen_op('+');
1635 vtop->r |= VT_LVAL;
1636 vtop->type.t = load_type;
1637 } else {
1638 /* move registers */
1639 load(r, vtop);
1640 vdup();
1641 vtop[-1].r = r; /* save register value */
1642 vtop->r = vtop[-1].r2;
1644 /* Allocate second register. Here we rely on the fact that
1645 get_reg() tries first to free r2 of an SValue. */
1646 r2 = get_reg(rc2);
1647 load(r2, vtop);
1648 vpop();
1649 /* write second register */
1650 vtop->r2 = r2;
1651 vtop->type.t = original_type;
1652 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1653 int t1, t;
1654 /* lvalue of scalar type : need to use lvalue type
1655 because of possible cast */
1656 t = vtop->type.t;
1657 t1 = t;
1658 /* compute memory access type */
1659 if (vtop->r & VT_LVAL_BYTE)
1660 t = VT_BYTE;
1661 else if (vtop->r & VT_LVAL_SHORT)
1662 t = VT_SHORT;
1663 if (vtop->r & VT_LVAL_UNSIGNED)
1664 t |= VT_UNSIGNED;
1665 vtop->type.t = t;
1666 load(r, vtop);
1667 /* restore wanted type */
1668 vtop->type.t = t1;
1669 } else {
1670 if (vtop->r == VT_CMP)
1671 vset_VT_JMP();
1672 /* one register type load */
1673 load(r, vtop);
1676 vtop->r = r;
1677 #ifdef TCC_TARGET_C67
1678 /* uses register pairs for doubles */
1679 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1680 vtop->r2 = r+1;
1681 #endif
1683 return r;
1686 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1687 ST_FUNC void gv2(int rc1, int rc2)
1689 /* generate more generic register first. But VT_JMP or VT_CMP
1690 values must be generated first in all cases to avoid possible
1691 reload errors */
1692 if (vtop->r != VT_CMP && rc1 <= rc2) {
1693 vswap();
1694 gv(rc1);
1695 vswap();
1696 gv(rc2);
1697 /* test if reload is needed for first register */
1698 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1699 vswap();
1700 gv(rc1);
1701 vswap();
1703 } else {
1704 gv(rc2);
1705 vswap();
1706 gv(rc1);
1707 vswap();
1708 /* test if reload is needed for first register */
1709 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1710 gv(rc2);
1715 #ifndef TCC_TARGET_ARM64
1716 /* wrapper around RC_FRET to return a register by type */
1717 static int rc_fret(int t)
1719 #ifdef TCC_TARGET_X86_64
1720 if (t == VT_LDOUBLE) {
1721 return RC_ST0;
1723 #elif defined TCC_TARGET_RISCV64
1724 if (t == VT_LDOUBLE)
1725 return RC_IRET;
1726 #endif
1727 return RC_FRET;
1729 #endif
1731 /* wrapper around REG_FRET to return a register by type */
1732 static int reg_fret(int t)
1734 #ifdef TCC_TARGET_X86_64
1735 if (t == VT_LDOUBLE) {
1736 return TREG_ST0;
1738 #elif defined TCC_TARGET_RISCV64
1739 if (t == VT_LDOUBLE)
1740 return REG_IRET;
1741 #endif
1742 return REG_FRET;
1745 #if PTR_SIZE == 4
1746 /* expand 64bit on stack in two ints */
1747 ST_FUNC void lexpand(void)
1749 int u, v;
1750 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1751 v = vtop->r & (VT_VALMASK | VT_LVAL);
1752 if (v == VT_CONST) {
1753 vdup();
1754 vtop[0].c.i >>= 32;
1755 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1756 vdup();
1757 vtop[0].c.i += 4;
1758 } else {
1759 gv(RC_INT);
1760 vdup();
1761 vtop[0].r = vtop[-1].r2;
1762 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1764 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1766 #endif
1768 #if PTR_SIZE == 4
1769 /* build a long long from two ints */
1770 static void lbuild(int t)
1772 gv2(RC_INT, RC_INT);
1773 vtop[-1].r2 = vtop[0].r;
1774 vtop[-1].type.t = t;
1775 vpop();
1777 #endif
1779 /* convert stack entry to register and duplicate its value in another
1780 register */
1781 static void gv_dup(void)
1783 int rc, t, r, r1;
1784 SValue sv;
1786 t = vtop->type.t;
1787 #if PTR_SIZE == 4
1788 if ((t & VT_BTYPE) == VT_LLONG) {
1789 if (t & VT_BITFIELD) {
1790 gv(RC_INT);
1791 t = vtop->type.t;
1793 lexpand();
1794 gv_dup();
1795 vswap();
1796 vrotb(3);
1797 gv_dup();
1798 vrotb(4);
1799 /* stack: H L L1 H1 */
1800 lbuild(t);
1801 vrotb(3);
1802 vrotb(3);
1803 vswap();
1804 lbuild(t);
1805 vswap();
1806 } else
1807 #endif
1809 /* duplicate value */
1810 rc = RC_INT;
1811 sv.type.t = VT_INT;
1812 if (is_float(t)) {
1813 rc = RC_FLOAT;
1814 #ifdef TCC_TARGET_X86_64
1815 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1816 rc = RC_ST0;
1818 #elif defined TCC_TARGET_RISCV64
1819 if ((t & VT_BTYPE) == VT_LDOUBLE)
1820 rc = RC_INT;
1821 #endif
1822 sv.type.t = t;
1824 r = gv(rc);
1825 r1 = get_reg(rc);
1826 sv.r = r;
1827 sv.c.i = 0;
1828 load(r1, &sv); /* move r to r1 */
1829 vdup();
1830 /* duplicates value */
1831 if (r != r1)
1832 vtop->r = r1;
1836 #if PTR_SIZE == 4
1837 /* generate CPU independent (unsigned) long long operations */
1838 static void gen_opl(int op)
1840 int t, a, b, op1, c, i;
1841 int func;
1842 unsigned short reg_iret = REG_IRET;
1843 unsigned short reg_lret = REG_LRET;
1844 SValue tmp;
1846 switch(op) {
1847 case '/':
1848 case TOK_PDIV:
1849 func = TOK___divdi3;
1850 goto gen_func;
1851 case TOK_UDIV:
1852 func = TOK___udivdi3;
1853 goto gen_func;
1854 case '%':
1855 func = TOK___moddi3;
1856 goto gen_mod_func;
1857 case TOK_UMOD:
1858 func = TOK___umoddi3;
1859 gen_mod_func:
1860 #ifdef TCC_ARM_EABI
1861 reg_iret = TREG_R2;
1862 reg_lret = TREG_R3;
1863 #endif
1864 gen_func:
1865 /* call generic long long function */
1866 vpush_global_sym(&func_old_type, func);
1867 vrott(3);
1868 gfunc_call(2);
1869 vpushi(0);
1870 vtop->r = reg_iret;
1871 vtop->r2 = reg_lret;
1872 break;
1873 case '^':
1874 case '&':
1875 case '|':
1876 case '*':
1877 case '+':
1878 case '-':
1879 //pv("gen_opl A",0,2);
1880 t = vtop->type.t;
1881 vswap();
1882 lexpand();
1883 vrotb(3);
1884 lexpand();
1885 /* stack: L1 H1 L2 H2 */
1886 tmp = vtop[0];
1887 vtop[0] = vtop[-3];
1888 vtop[-3] = tmp;
1889 tmp = vtop[-2];
1890 vtop[-2] = vtop[-3];
1891 vtop[-3] = tmp;
1892 vswap();
1893 /* stack: H1 H2 L1 L2 */
1894 //pv("gen_opl B",0,4);
1895 if (op == '*') {
1896 vpushv(vtop - 1);
1897 vpushv(vtop - 1);
1898 gen_op(TOK_UMULL);
1899 lexpand();
1900 /* stack: H1 H2 L1 L2 ML MH */
1901 for(i=0;i<4;i++)
1902 vrotb(6);
1903 /* stack: ML MH H1 H2 L1 L2 */
1904 tmp = vtop[0];
1905 vtop[0] = vtop[-2];
1906 vtop[-2] = tmp;
1907 /* stack: ML MH H1 L2 H2 L1 */
1908 gen_op('*');
1909 vrotb(3);
1910 vrotb(3);
1911 gen_op('*');
1912 /* stack: ML MH M1 M2 */
1913 gen_op('+');
1914 gen_op('+');
1915 } else if (op == '+' || op == '-') {
1916 /* XXX: add non carry method too (for MIPS or alpha) */
1917 if (op == '+')
1918 op1 = TOK_ADDC1;
1919 else
1920 op1 = TOK_SUBC1;
1921 gen_op(op1);
1922 /* stack: H1 H2 (L1 op L2) */
1923 vrotb(3);
1924 vrotb(3);
1925 gen_op(op1 + 1); /* TOK_xxxC2 */
1926 } else {
1927 gen_op(op);
1928 /* stack: H1 H2 (L1 op L2) */
1929 vrotb(3);
1930 vrotb(3);
1931 /* stack: (L1 op L2) H1 H2 */
1932 gen_op(op);
1933 /* stack: (L1 op L2) (H1 op H2) */
1935 /* stack: L H */
1936 lbuild(t);
1937 break;
1938 case TOK_SAR:
1939 case TOK_SHR:
1940 case TOK_SHL:
1941 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1942 t = vtop[-1].type.t;
1943 vswap();
1944 lexpand();
1945 vrotb(3);
1946 /* stack: L H shift */
1947 c = (int)vtop->c.i;
1948 /* constant: simpler */
1949 /* NOTE: all comments are for SHL. the other cases are
1950 done by swapping words */
1951 vpop();
1952 if (op != TOK_SHL)
1953 vswap();
1954 if (c >= 32) {
1955 /* stack: L H */
1956 vpop();
1957 if (c > 32) {
1958 vpushi(c - 32);
1959 gen_op(op);
1961 if (op != TOK_SAR) {
1962 vpushi(0);
1963 } else {
1964 gv_dup();
1965 vpushi(31);
1966 gen_op(TOK_SAR);
1968 vswap();
1969 } else {
1970 vswap();
1971 gv_dup();
1972 /* stack: H L L */
1973 vpushi(c);
1974 gen_op(op);
1975 vswap();
1976 vpushi(32 - c);
1977 if (op == TOK_SHL)
1978 gen_op(TOK_SHR);
1979 else
1980 gen_op(TOK_SHL);
1981 vrotb(3);
1982 /* stack: L L H */
1983 vpushi(c);
1984 if (op == TOK_SHL)
1985 gen_op(TOK_SHL);
1986 else
1987 gen_op(TOK_SHR);
1988 gen_op('|');
1990 if (op != TOK_SHL)
1991 vswap();
1992 lbuild(t);
1993 } else {
1994 /* XXX: should provide a faster fallback on x86 ? */
1995 switch(op) {
1996 case TOK_SAR:
1997 func = TOK___ashrdi3;
1998 goto gen_func;
1999 case TOK_SHR:
2000 func = TOK___lshrdi3;
2001 goto gen_func;
2002 case TOK_SHL:
2003 func = TOK___ashldi3;
2004 goto gen_func;
2007 break;
2008 default:
2009 /* compare operations */
2010 t = vtop->type.t;
2011 vswap();
2012 lexpand();
2013 vrotb(3);
2014 lexpand();
2015 /* stack: L1 H1 L2 H2 */
2016 tmp = vtop[-1];
2017 vtop[-1] = vtop[-2];
2018 vtop[-2] = tmp;
2019 /* stack: L1 L2 H1 H2 */
2020 save_regs(4);
2021 /* compare high */
2022 op1 = op;
2023 /* when values are equal, we need to compare low words. since
2024 the jump is inverted, we invert the test too. */
2025 if (op1 == TOK_LT)
2026 op1 = TOK_LE;
2027 else if (op1 == TOK_GT)
2028 op1 = TOK_GE;
2029 else if (op1 == TOK_ULT)
2030 op1 = TOK_ULE;
2031 else if (op1 == TOK_UGT)
2032 op1 = TOK_UGE;
2033 a = 0;
2034 b = 0;
2035 gen_op(op1);
2036 if (op == TOK_NE) {
2037 b = gvtst(0, 0);
2038 } else {
2039 a = gvtst(1, 0);
2040 if (op != TOK_EQ) {
2041 /* generate non equal test */
2042 vpushi(0);
2043 vset_VT_CMP(TOK_NE);
2044 b = gvtst(0, 0);
2047 /* compare low. Always unsigned */
2048 op1 = op;
2049 if (op1 == TOK_LT)
2050 op1 = TOK_ULT;
2051 else if (op1 == TOK_LE)
2052 op1 = TOK_ULE;
2053 else if (op1 == TOK_GT)
2054 op1 = TOK_UGT;
2055 else if (op1 == TOK_GE)
2056 op1 = TOK_UGE;
2057 gen_op(op1);
2058 #if 0//def TCC_TARGET_I386
2059 if (op == TOK_NE) { gsym(b); break; }
2060 if (op == TOK_EQ) { gsym(a); break; }
2061 #endif
2062 gvtst_set(1, a);
2063 gvtst_set(0, b);
2064 break;
2067 #endif
2069 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2071 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2072 return (a ^ b) >> 63 ? -x : x;
2075 static int gen_opic_lt(uint64_t a, uint64_t b)
2077 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2080 /* handle integer constant optimizations and various machine
2081 independent opt */
2082 static void gen_opic(int op)
2084 SValue *v1 = vtop - 1;
2085 SValue *v2 = vtop;
2086 int t1 = v1->type.t & VT_BTYPE;
2087 int t2 = v2->type.t & VT_BTYPE;
2088 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2089 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2090 uint64_t l1 = c1 ? v1->c.i : 0;
2091 uint64_t l2 = c2 ? v2->c.i : 0;
2092 int shm = (t1 == VT_LLONG) ? 63 : 31;
2094 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2095 l1 = ((uint32_t)l1 |
2096 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2097 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2098 l2 = ((uint32_t)l2 |
2099 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2101 if (c1 && c2) {
2102 switch(op) {
2103 case '+': l1 += l2; break;
2104 case '-': l1 -= l2; break;
2105 case '&': l1 &= l2; break;
2106 case '^': l1 ^= l2; break;
2107 case '|': l1 |= l2; break;
2108 case '*': l1 *= l2; break;
2110 case TOK_PDIV:
2111 case '/':
2112 case '%':
2113 case TOK_UDIV:
2114 case TOK_UMOD:
2115 /* if division by zero, generate explicit division */
2116 if (l2 == 0) {
2117 if (const_wanted)
2118 tcc_error("division by zero in constant");
2119 goto general_case;
2121 switch(op) {
2122 default: l1 = gen_opic_sdiv(l1, l2); break;
2123 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2124 case TOK_UDIV: l1 = l1 / l2; break;
2125 case TOK_UMOD: l1 = l1 % l2; break;
2127 break;
2128 case TOK_SHL: l1 <<= (l2 & shm); break;
2129 case TOK_SHR: l1 >>= (l2 & shm); break;
2130 case TOK_SAR:
2131 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2132 break;
2133 /* tests */
2134 case TOK_ULT: l1 = l1 < l2; break;
2135 case TOK_UGE: l1 = l1 >= l2; break;
2136 case TOK_EQ: l1 = l1 == l2; break;
2137 case TOK_NE: l1 = l1 != l2; break;
2138 case TOK_ULE: l1 = l1 <= l2; break;
2139 case TOK_UGT: l1 = l1 > l2; break;
2140 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2141 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2142 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2143 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2144 /* logical */
2145 case TOK_LAND: l1 = l1 && l2; break;
2146 case TOK_LOR: l1 = l1 || l2; break;
2147 default:
2148 goto general_case;
2150 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2151 l1 = ((uint32_t)l1 |
2152 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2153 v1->c.i = l1;
2154 vtop--;
2155 } else {
2156 /* if commutative ops, put c2 as constant */
2157 if (c1 && (op == '+' || op == '&' || op == '^' ||
2158 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2159 vswap();
2160 c2 = c1; //c = c1, c1 = c2, c2 = c;
2161 l2 = l1; //l = l1, l1 = l2, l2 = l;
2163 if (!const_wanted &&
2164 c1 && ((l1 == 0 &&
2165 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2166 (l1 == -1 && op == TOK_SAR))) {
2167 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2168 vtop--;
2169 } else if (!const_wanted &&
2170 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2171 (op == '|' &&
2172 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2173 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2174 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2175 if (l2 == 1)
2176 vtop->c.i = 0;
2177 vswap();
2178 vtop--;
2179 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2180 op == TOK_PDIV) &&
2181 l2 == 1) ||
2182 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2183 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2184 l2 == 0) ||
2185 (op == '&' &&
2186 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2187 /* filter out NOP operations like x*1, x-0, x&-1... */
2188 vtop--;
2189 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2190 /* try to use shifts instead of muls or divs */
2191 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2192 int n = -1;
2193 while (l2) {
2194 l2 >>= 1;
2195 n++;
2197 vtop->c.i = n;
2198 if (op == '*')
2199 op = TOK_SHL;
2200 else if (op == TOK_PDIV)
2201 op = TOK_SAR;
2202 else
2203 op = TOK_SHR;
2205 goto general_case;
2206 } else if (c2 && (op == '+' || op == '-') &&
2207 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2208 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2209 /* symbol + constant case */
2210 if (op == '-')
2211 l2 = -l2;
2212 l2 += vtop[-1].c.i;
2213 /* The backends can't always deal with addends to symbols
2214 larger than +-1<<31. Don't construct such. */
2215 if ((int)l2 != l2)
2216 goto general_case;
2217 vtop--;
2218 vtop->c.i = l2;
2219 } else {
2220 general_case:
2221 /* call low level op generator */
2222 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2223 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2224 gen_opl(op);
2225 else
2226 gen_opi(op);
2231 /* generate a floating point operation with constant propagation */
2232 static void gen_opif(int op)
2234 int c1, c2;
2235 SValue *v1, *v2;
2236 #if defined _MSC_VER && defined __x86_64__
2237 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2238 volatile
2239 #endif
2240 long double f1, f2;
2242 v1 = vtop - 1;
2243 v2 = vtop;
2244 /* currently, we cannot do computations with forward symbols */
2245 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2246 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2247 if (c1 && c2) {
2248 if (v1->type.t == VT_FLOAT) {
2249 f1 = v1->c.f;
2250 f2 = v2->c.f;
2251 } else if (v1->type.t == VT_DOUBLE) {
2252 f1 = v1->c.d;
2253 f2 = v2->c.d;
2254 } else {
2255 f1 = v1->c.ld;
2256 f2 = v2->c.ld;
2259 /* NOTE: we only do constant propagation if finite number (not
2260 NaN or infinity) (ANSI spec) */
2261 if (!ieee_finite(f1) || !ieee_finite(f2))
2262 goto general_case;
2264 switch(op) {
2265 case '+': f1 += f2; break;
2266 case '-': f1 -= f2; break;
2267 case '*': f1 *= f2; break;
2268 case '/':
2269 if (f2 == 0.0) {
2270 /* If not in initializer we need to potentially generate
2271 FP exceptions at runtime, otherwise we want to fold. */
2272 if (!const_wanted)
2273 goto general_case;
2275 f1 /= f2;
2276 break;
2277 /* XXX: also handles tests ? */
2278 default:
2279 goto general_case;
2281 /* XXX: overflow test ? */
2282 if (v1->type.t == VT_FLOAT) {
2283 v1->c.f = f1;
2284 } else if (v1->type.t == VT_DOUBLE) {
2285 v1->c.d = f1;
2286 } else {
2287 v1->c.ld = f1;
2289 vtop--;
2290 } else {
2291 general_case:
2292 gen_opf(op);
2296 static int pointed_size(CType *type)
2298 int align;
2299 return type_size(pointed_type(type), &align);
2302 static void vla_runtime_pointed_size(CType *type)
2304 int align;
2305 vla_runtime_type_size(pointed_type(type), &align);
2308 static inline int is_null_pointer(SValue *p)
2310 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2311 return 0;
2312 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2313 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2314 ((p->type.t & VT_BTYPE) == VT_PTR &&
2315 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2316 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2317 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2320 static inline int is_integer_btype(int bt)
2322 return (bt == VT_BYTE || bt == VT_SHORT ||
2323 bt == VT_INT || bt == VT_LLONG);
2326 /* check types for comparison or subtraction of pointers */
2327 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2329 CType *type1, *type2, tmp_type1, tmp_type2;
2330 int bt1, bt2;
2332 /* null pointers are accepted for all comparisons as gcc */
2333 if (is_null_pointer(p1) || is_null_pointer(p2))
2334 return;
2335 type1 = &p1->type;
2336 type2 = &p2->type;
2337 bt1 = type1->t & VT_BTYPE;
2338 bt2 = type2->t & VT_BTYPE;
2339 /* accept comparison between pointer and integer with a warning */
2340 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2341 if (op != TOK_LOR && op != TOK_LAND )
2342 tcc_warning("comparison between pointer and integer");
2343 return;
2346 /* both must be pointers or implicit function pointers */
2347 if (bt1 == VT_PTR) {
2348 type1 = pointed_type(type1);
2349 } else if (bt1 != VT_FUNC)
2350 goto invalid_operands;
2352 if (bt2 == VT_PTR) {
2353 type2 = pointed_type(type2);
2354 } else if (bt2 != VT_FUNC) {
2355 invalid_operands:
2356 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2358 if ((type1->t & VT_BTYPE) == VT_VOID ||
2359 (type2->t & VT_BTYPE) == VT_VOID)
2360 return;
2361 tmp_type1 = *type1;
2362 tmp_type2 = *type2;
2363 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2364 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2365 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2366 /* gcc-like error if '-' is used */
2367 if (op == '-')
2368 goto invalid_operands;
2369 else
2370 tcc_warning("comparison of distinct pointer types lacks a cast");
2374 /* generic gen_op: handles types problems */
2375 ST_FUNC void gen_op(int op)
2377 int u, t1, t2, bt1, bt2, t;
2378 CType type1;
2380 redo:
2381 t1 = vtop[-1].type.t;
2382 t2 = vtop[0].type.t;
2383 bt1 = t1 & VT_BTYPE;
2384 bt2 = t2 & VT_BTYPE;
2386 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2387 tcc_error("operation on a struct");
2388 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2389 if (bt2 == VT_FUNC) {
2390 mk_pointer(&vtop->type);
2391 gaddrof();
2393 if (bt1 == VT_FUNC) {
2394 vswap();
2395 mk_pointer(&vtop->type);
2396 gaddrof();
2397 vswap();
2399 goto redo;
2400 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2401 /* at least one operand is a pointer */
2402 /* relational op: must be both pointers */
2403 if (op >= TOK_ULT && op <= TOK_LOR) {
2404 check_comparison_pointer_types(vtop - 1, vtop, op);
2405 /* pointers are handled are unsigned */
2406 #if PTR_SIZE == 8
2407 t = VT_LLONG | VT_UNSIGNED;
2408 #else
2409 t = VT_INT | VT_UNSIGNED;
2410 #endif
2411 goto std_op;
2413 /* if both pointers, then it must be the '-' op */
2414 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2415 if (op != '-')
2416 tcc_error("cannot use pointers here");
2417 check_comparison_pointer_types(vtop - 1, vtop, op);
2418 /* XXX: check that types are compatible */
2419 if (vtop[-1].type.t & VT_VLA) {
2420 vla_runtime_pointed_size(&vtop[-1].type);
2421 } else {
2422 vpushi(pointed_size(&vtop[-1].type));
2424 vrott(3);
2425 gen_opic(op);
2426 vtop->type.t = ptrdiff_type.t;
2427 vswap();
2428 gen_op(TOK_PDIV);
2429 } else {
2430 /* exactly one pointer : must be '+' or '-'. */
2431 if (op != '-' && op != '+')
2432 tcc_error("cannot use pointers here");
2433 /* Put pointer as first operand */
2434 if (bt2 == VT_PTR) {
2435 vswap();
2436 t = t1, t1 = t2, t2 = t;
2438 #if PTR_SIZE == 4
2439 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2440 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2441 gen_cast_s(VT_INT);
2442 #endif
2443 type1 = vtop[-1].type;
2444 type1.t &= ~VT_ARRAY;
2445 if (vtop[-1].type.t & VT_VLA)
2446 vla_runtime_pointed_size(&vtop[-1].type);
2447 else {
2448 u = pointed_size(&vtop[-1].type);
2449 if (u < 0)
2450 tcc_error("unknown array element size");
2451 #if PTR_SIZE == 8
2452 vpushll(u);
2453 #else
2454 /* XXX: cast to int ? (long long case) */
2455 vpushi(u);
2456 #endif
2458 gen_op('*');
2459 #if 0
2460 /* #ifdef CONFIG_TCC_BCHECK
2461 The main reason to removing this code:
2462 #include <stdio.h>
2463 int main ()
2465 int v[10];
2466 int i = 10;
2467 int j = 9;
2468 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2469 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2471 When this code is on. then the output looks like
2472 v+i-j = 0xfffffffe
2473 v+(i-j) = 0xbff84000
2475 /* if evaluating constant expression, no code should be
2476 generated, so no bound check */
2477 if (tcc_state->do_bounds_check && !const_wanted) {
2478 /* if bounded pointers, we generate a special code to
2479 test bounds */
2480 if (op == '-') {
2481 vpushi(0);
2482 vswap();
2483 gen_op('-');
2485 gen_bounded_ptr_add();
2486 } else
2487 #endif
2489 gen_opic(op);
2491 /* put again type if gen_opic() swaped operands */
2492 vtop->type = type1;
2494 } else if (is_float(bt1) || is_float(bt2)) {
2495 /* compute bigger type and do implicit casts */
2496 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2497 t = VT_LDOUBLE;
2498 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2499 t = VT_DOUBLE;
2500 } else {
2501 t = VT_FLOAT;
2503 /* floats can only be used for a few operations */
2504 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2505 (op < TOK_ULT || op > TOK_GT))
2506 tcc_error("invalid operands for binary operation");
2507 goto std_op;
2508 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2509 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2510 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2511 t |= VT_UNSIGNED;
2512 t |= (VT_LONG & t1);
2513 goto std_op;
2514 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2515 /* cast to biggest op */
2516 t = VT_LLONG | VT_LONG;
2517 if (bt1 == VT_LLONG)
2518 t &= t1;
2519 if (bt2 == VT_LLONG)
2520 t &= t2;
2521 /* convert to unsigned if it does not fit in a long long */
2522 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2523 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2524 t |= VT_UNSIGNED;
2525 goto std_op;
2526 } else {
2527 /* integer operations */
2528 t = VT_INT | (VT_LONG & (t1 | t2));
2529 /* convert to unsigned if it does not fit in an integer */
2530 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2531 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2532 t |= VT_UNSIGNED;
2533 std_op:
2534 /* XXX: currently, some unsigned operations are explicit, so
2535 we modify them here */
2536 if (t & VT_UNSIGNED) {
2537 if (op == TOK_SAR)
2538 op = TOK_SHR;
2539 else if (op == '/')
2540 op = TOK_UDIV;
2541 else if (op == '%')
2542 op = TOK_UMOD;
2543 else if (op == TOK_LT)
2544 op = TOK_ULT;
2545 else if (op == TOK_GT)
2546 op = TOK_UGT;
2547 else if (op == TOK_LE)
2548 op = TOK_ULE;
2549 else if (op == TOK_GE)
2550 op = TOK_UGE;
2552 vswap();
2553 type1.t = t;
2554 type1.ref = NULL;
2555 gen_cast(&type1);
2556 vswap();
2557 /* special case for shifts and long long: we keep the shift as
2558 an integer */
2559 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2560 type1.t = VT_INT;
2561 gen_cast(&type1);
2562 if (is_float(t))
2563 gen_opif(op);
2564 else
2565 gen_opic(op);
2566 if (op >= TOK_ULT && op <= TOK_GT) {
2567 /* relational op: the result is an int */
2568 vtop->type.t = VT_INT;
2569 } else {
2570 vtop->type.t = t;
2573 // Make sure that we have converted to an rvalue:
2574 if (vtop->r & VT_LVAL)
2575 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2578 #ifndef TCC_TARGET_ARM
2579 /* generic itof for unsigned long long case */
2580 static void gen_cvt_itof1(int t)
2582 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2583 gen_cvt_itof(t);
2584 #else
2585 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2586 (VT_LLONG | VT_UNSIGNED)) {
2588 if (t == VT_FLOAT)
2589 vpush_global_sym(&func_old_type, TOK___floatundisf);
2590 #if LDOUBLE_SIZE != 8
2591 else if (t == VT_LDOUBLE)
2592 vpush_global_sym(&func_old_type, TOK___floatundixf);
2593 #endif
2594 else
2595 vpush_global_sym(&func_old_type, TOK___floatundidf);
2596 vrott(2);
2597 gfunc_call(1);
2598 vpushi(0);
2599 vtop->r = reg_fret(t);
2600 } else {
2601 gen_cvt_itof(t);
2603 #endif
2605 #endif
2607 /* generic ftoi for unsigned long long case */
2608 static void gen_cvt_ftoi1(int t)
2610 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2611 gen_cvt_ftoi(t);
2612 #else
2613 int st;
2615 if (t == (VT_LLONG | VT_UNSIGNED)) {
2616 /* not handled natively */
2617 st = vtop->type.t & VT_BTYPE;
2618 if (st == VT_FLOAT)
2619 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2620 #if LDOUBLE_SIZE != 8
2621 else if (st == VT_LDOUBLE)
2622 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2623 #endif
2624 else
2625 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2626 vrott(2);
2627 gfunc_call(1);
2628 vpushi(0);
2629 vtop->r = REG_IRET;
2630 #if PTR_SIZE == 4
2631 vtop->r2 = REG_LRET;
2632 #endif
2633 } else {
2634 gen_cvt_ftoi(t);
2636 #endif
2639 /* force char or short cast */
2640 static void force_charshort_cast(int t)
2642 int bits, dbt;
2644 /* cannot cast static initializers */
2645 if (STATIC_DATA_WANTED)
2646 return;
2648 dbt = t & VT_BTYPE;
2649 /* XXX: add optimization if lvalue : just change type and offset */
2650 if (dbt == VT_BYTE)
2651 bits = 8;
2652 else
2653 bits = 16;
2654 if (t & VT_UNSIGNED) {
2655 vpushi((1 << bits) - 1);
2656 gen_op('&');
2657 } else {
2658 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2659 bits = 64 - bits;
2660 else
2661 bits = 32 - bits;
2662 vpushi(bits);
2663 gen_op(TOK_SHL);
2664 /* result must be signed or the SAR is converted to an SHL
2665 This was not the case when "t" was a signed short
2666 and the last value on the stack was an unsigned int */
2667 vtop->type.t &= ~VT_UNSIGNED;
2668 vpushi(bits);
2669 gen_op(TOK_SAR);
2673 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2674 static void gen_cast_s(int t)
2676 CType type;
2677 type.t = t;
2678 type.ref = NULL;
2679 gen_cast(&type);
2682 static void gen_cast(CType *type)
2684 int sbt, dbt, sf, df, c, p;
2686 /* special delayed cast for char/short */
2687 /* XXX: in some cases (multiple cascaded casts), it may still
2688 be incorrect */
2689 if (vtop->r & VT_MUSTCAST) {
2690 vtop->r &= ~VT_MUSTCAST;
2691 force_charshort_cast(vtop->type.t);
2694 /* bitfields first get cast to ints */
2695 if (vtop->type.t & VT_BITFIELD) {
2696 gv(RC_INT);
2699 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2700 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2702 if (sbt != dbt) {
2703 sf = is_float(sbt);
2704 df = is_float(dbt);
2705 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2706 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2707 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2708 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
2709 #endif
2710 if (c) {
2711 /* constant case: we can do it now */
2712 /* XXX: in ISOC, cannot do it if error in convert */
2713 if (sbt == VT_FLOAT)
2714 vtop->c.ld = vtop->c.f;
2715 else if (sbt == VT_DOUBLE)
2716 vtop->c.ld = vtop->c.d;
2718 if (df) {
2719 if ((sbt & VT_BTYPE) == VT_LLONG) {
2720 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2721 vtop->c.ld = vtop->c.i;
2722 else
2723 vtop->c.ld = -(long double)-vtop->c.i;
2724 } else if(!sf) {
2725 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2726 vtop->c.ld = (uint32_t)vtop->c.i;
2727 else
2728 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2731 if (dbt == VT_FLOAT)
2732 vtop->c.f = (float)vtop->c.ld;
2733 else if (dbt == VT_DOUBLE)
2734 vtop->c.d = (double)vtop->c.ld;
2735 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2736 vtop->c.i = vtop->c.ld;
2737 } else if (sf && dbt == VT_BOOL) {
2738 vtop->c.i = (vtop->c.ld != 0);
2739 } else {
2740 if(sf)
2741 vtop->c.i = vtop->c.ld;
2742 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2744 else if (sbt & VT_UNSIGNED)
2745 vtop->c.i = (uint32_t)vtop->c.i;
2746 #if PTR_SIZE == 8
2747 else if (sbt == VT_PTR)
2749 #endif
2750 else if (sbt != VT_LLONG)
2751 vtop->c.i = ((uint32_t)vtop->c.i |
2752 -(vtop->c.i & 0x80000000));
2754 if (dbt == (VT_LLONG|VT_UNSIGNED))
2756 else if (dbt == VT_BOOL)
2757 vtop->c.i = (vtop->c.i != 0);
2758 #if PTR_SIZE == 8
2759 else if (dbt == VT_PTR)
2761 #endif
2762 else if (dbt != VT_LLONG) {
2763 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2764 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2765 0xffffffff);
2766 vtop->c.i &= m;
2767 if (!(dbt & VT_UNSIGNED))
2768 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2771 } else if (p && dbt == VT_BOOL) {
2772 vtop->r = VT_CONST;
2773 vtop->c.i = 1;
2774 } else {
2775 /* non constant case: generate code */
2776 if (sf && df) {
2777 /* convert from fp to fp */
2778 gen_cvt_ftof(dbt);
2779 } else if (df) {
2780 /* convert int to fp */
2781 gen_cvt_itof1(dbt);
2782 } else if (sf) {
2783 /* convert fp to int */
2784 if (dbt == VT_BOOL) {
2785 vpushi(0);
2786 gen_op(TOK_NE);
2787 } else {
2788 /* we handle char/short/etc... with generic code */
2789 if (dbt != (VT_INT | VT_UNSIGNED) &&
2790 dbt != (VT_LLONG | VT_UNSIGNED) &&
2791 dbt != VT_LLONG)
2792 dbt = VT_INT;
2793 gen_cvt_ftoi1(dbt);
2794 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2795 /* additional cast for char/short... */
2796 vtop->type.t = dbt;
2797 gen_cast(type);
2800 #if PTR_SIZE == 4
2801 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2802 if ((sbt & VT_BTYPE) != VT_LLONG) {
2803 /* scalar to long long */
2804 /* machine independent conversion */
2805 gv(RC_INT);
2806 /* generate high word */
2807 if (sbt == (VT_INT | VT_UNSIGNED)) {
2808 vpushi(0);
2809 gv(RC_INT);
2810 } else {
2811 if (sbt == VT_PTR) {
2812 /* cast from pointer to int before we apply
2813 shift operation, which pointers don't support*/
2814 gen_cast_s(VT_INT);
2816 gv_dup();
2817 vpushi(31);
2818 gen_op(TOK_SAR);
2820 /* patch second register */
2821 vtop[-1].r2 = vtop->r;
2822 vpop();
2824 #else
2825 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2826 (dbt & VT_BTYPE) == VT_PTR ||
2827 (dbt & VT_BTYPE) == VT_FUNC) {
2828 if ((sbt & VT_BTYPE) != VT_LLONG &&
2829 (sbt & VT_BTYPE) != VT_PTR &&
2830 (sbt & VT_BTYPE) != VT_FUNC) {
2831 /* need to convert from 32bit to 64bit */
2832 gv(RC_INT);
2833 if (sbt != (VT_INT | VT_UNSIGNED)) {
2834 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2835 gen_cvt_sxtw();
2836 #elif defined(TCC_TARGET_X86_64)
2837 int r = gv(RC_INT);
2838 /* x86_64 specific: movslq */
2839 o(0x6348);
2840 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2841 #else
2842 #error
2843 #endif
2844 } else if (sbt & VT_UNSIGNED) {
2845 #if defined(TCC_TARGET_RISCV64)
2846 /* RISC-V keeps 32bit vals in registers sign-extended.
2847 So here we need a zero-extension. */
2848 vtop->type.t = VT_LLONG;
2849 vpushi(32);
2850 gen_op(TOK_SHL);
2851 vpushi(32);
2852 gen_op(TOK_SHR);
2853 #endif
2856 #endif
2857 } else if (dbt == VT_BOOL) {
2858 /* scalar to bool */
2859 vpushi(0);
2860 gen_op(TOK_NE);
2861 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2862 (dbt & VT_BTYPE) == VT_SHORT) {
2863 if (sbt == VT_PTR) {
2864 vtop->type.t = VT_INT;
2865 tcc_warning("nonportable conversion from pointer to char/short");
2867 force_charshort_cast(dbt);
2868 } else if ((dbt & VT_BTYPE) == VT_INT) {
2869 /* scalar to int */
2870 if ((sbt & VT_BTYPE) == VT_LLONG) {
2871 #if PTR_SIZE == 4
2872 /* from long long: just take low order word */
2873 lexpand();
2874 vpop();
2875 #else
2876 if (dbt & VT_UNSIGNED) {
2877 /* XXX some architectures (e.g. risc-v) would like it
2878 better for this merely being a 32-to-64 sign or zero-
2879 extension. */
2880 vpushi(0xffffffff);
2881 vtop->type.t |= VT_UNSIGNED;
2882 gen_op('&');
2883 } else {
2885 #endif
2887 /* if lvalue and single word type, nothing to do because
2888 the lvalue already contains the real type size (see
2889 VT_LVAL_xxx constants) */
2892 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2893 /* if we are casting between pointer types,
2894 we must update the VT_LVAL_xxx size */
2895 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2896 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2898 vtop->type = *type;
2899 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2902 /* return type size as known at compile time. Put alignment at 'a' */
2903 ST_FUNC int type_size(CType *type, int *a)
2905 Sym *s;
2906 int bt;
2908 bt = type->t & VT_BTYPE;
2909 if (bt == VT_STRUCT) {
2910 /* struct/union */
2911 s = type->ref;
2912 *a = s->r;
2913 return s->c;
2914 } else if (bt == VT_PTR) {
2915 if (type->t & VT_ARRAY) {
2916 int ts;
2918 s = type->ref;
2919 ts = type_size(&s->type, a);
2921 if (ts < 0 && s->c < 0)
2922 ts = -ts;
2924 return ts * s->c;
2925 } else {
2926 *a = PTR_SIZE;
2927 return PTR_SIZE;
2929 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2930 return -1; /* incomplete enum */
2931 } else if (bt == VT_LDOUBLE) {
2932 *a = LDOUBLE_ALIGN;
2933 return LDOUBLE_SIZE;
2934 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2935 #ifdef TCC_TARGET_I386
2936 #ifdef TCC_TARGET_PE
2937 *a = 8;
2938 #else
2939 *a = 4;
2940 #endif
2941 #elif defined(TCC_TARGET_ARM)
2942 #ifdef TCC_ARM_EABI
2943 *a = 8;
2944 #else
2945 *a = 4;
2946 #endif
2947 #else
2948 *a = 8;
2949 #endif
2950 return 8;
2951 } else if (bt == VT_INT || bt == VT_FLOAT) {
2952 *a = 4;
2953 return 4;
2954 } else if (bt == VT_SHORT) {
2955 *a = 2;
2956 return 2;
2957 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2958 *a = 8;
2959 return 16;
2960 } else {
2961 /* char, void, function, _Bool */
2962 *a = 1;
2963 return 1;
2967 /* push type size as known at runtime time on top of value stack. Put
2968 alignment at 'a' */
2969 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2971 if (type->t & VT_VLA) {
2972 type_size(&type->ref->type, a);
2973 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2974 } else {
2975 vpushi(type_size(type, a));
2979 /* return the pointed type of t */
2980 static inline CType *pointed_type(CType *type)
2982 return &type->ref->type;
2985 /* modify type so that its it is a pointer to type. */
2986 ST_FUNC void mk_pointer(CType *type)
2988 Sym *s;
2989 s = sym_push(SYM_FIELD, type, 0, -1);
2990 type->t = VT_PTR | (type->t & VT_STORAGE);
2991 type->ref = s;
2994 /* compare function types. OLD functions match any new functions */
2995 static int is_compatible_func(CType *type1, CType *type2)
2997 Sym *s1, *s2;
2999 s1 = type1->ref;
3000 s2 = type2->ref;
3001 if (s1->f.func_call != s2->f.func_call)
3002 return 0;
3003 if (s1->f.func_type != s2->f.func_type
3004 && s1->f.func_type != FUNC_OLD
3005 && s2->f.func_type != FUNC_OLD)
3006 return 0;
3007 /* we should check the function return type for FUNC_OLD too
3008 but that causes problems with the internally used support
3009 functions such as TOK_memmove */
3010 if (s1->f.func_type == FUNC_OLD && !s1->next)
3011 return 1;
3012 if (s2->f.func_type == FUNC_OLD && !s2->next)
3013 return 1;
3014 for (;;) {
3015 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3016 return 0;
3017 s1 = s1->next;
3018 s2 = s2->next;
3019 if (!s1)
3020 return !s2;
3021 if (!s2)
3022 return 0;
3026 /* return true if type1 and type2 are the same. If unqualified is
3027 true, qualifiers on the types are ignored.
3029 static int compare_types(CType *type1, CType *type2, int unqualified)
3031 int bt1, t1, t2;
3033 t1 = type1->t & VT_TYPE;
3034 t2 = type2->t & VT_TYPE;
3035 if (unqualified) {
3036 /* strip qualifiers before comparing */
3037 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3038 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3041 /* Default Vs explicit signedness only matters for char */
3042 if ((t1 & VT_BTYPE) != VT_BYTE) {
3043 t1 &= ~VT_DEFSIGN;
3044 t2 &= ~VT_DEFSIGN;
3046 /* XXX: bitfields ? */
3047 if (t1 != t2)
3048 return 0;
3050 if ((t1 & VT_ARRAY)
3051 && !(type1->ref->c < 0
3052 || type2->ref->c < 0
3053 || type1->ref->c == type2->ref->c))
3054 return 0;
3056 /* test more complicated cases */
3057 bt1 = t1 & VT_BTYPE;
3058 if (bt1 == VT_PTR) {
3059 type1 = pointed_type(type1);
3060 type2 = pointed_type(type2);
3061 return is_compatible_types(type1, type2);
3062 } else if (bt1 == VT_STRUCT) {
3063 return (type1->ref == type2->ref);
3064 } else if (bt1 == VT_FUNC) {
3065 return is_compatible_func(type1, type2);
3066 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3067 return type1->ref == type2->ref;
3068 } else {
3069 return 1;
3073 /* return true if type1 and type2 are exactly the same (including
3074 qualifiers).
3076 static int is_compatible_types(CType *type1, CType *type2)
3078 return compare_types(type1,type2,0);
3081 /* return true if type1 and type2 are the same (ignoring qualifiers).
3083 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3085 return compare_types(type1,type2,1);
3088 /* print a type. If 'varstr' is not NULL, then the variable is also
3089 printed in the type */
3090 /* XXX: union */
3091 /* XXX: add array and function pointers */
3092 static void type_to_str(char *buf, int buf_size,
3093 CType *type, const char *varstr)
3095 int bt, v, t;
3096 Sym *s, *sa;
3097 char buf1[256];
3098 const char *tstr;
3100 t = type->t;
3101 bt = t & VT_BTYPE;
3102 buf[0] = '\0';
3104 if (t & VT_EXTERN)
3105 pstrcat(buf, buf_size, "extern ");
3106 if (t & VT_STATIC)
3107 pstrcat(buf, buf_size, "static ");
3108 if (t & VT_TYPEDEF)
3109 pstrcat(buf, buf_size, "typedef ");
3110 if (t & VT_INLINE)
3111 pstrcat(buf, buf_size, "inline ");
3112 if (t & VT_VOLATILE)
3113 pstrcat(buf, buf_size, "volatile ");
3114 if (t & VT_CONSTANT)
3115 pstrcat(buf, buf_size, "const ");
3117 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3118 || ((t & VT_UNSIGNED)
3119 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3120 && !IS_ENUM(t)
3122 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3124 buf_size -= strlen(buf);
3125 buf += strlen(buf);
3127 switch(bt) {
3128 case VT_VOID:
3129 tstr = "void";
3130 goto add_tstr;
3131 case VT_BOOL:
3132 tstr = "_Bool";
3133 goto add_tstr;
3134 case VT_BYTE:
3135 tstr = "char";
3136 goto add_tstr;
3137 case VT_SHORT:
3138 tstr = "short";
3139 goto add_tstr;
3140 case VT_INT:
3141 tstr = "int";
3142 goto maybe_long;
3143 case VT_LLONG:
3144 tstr = "long long";
3145 maybe_long:
3146 if (t & VT_LONG)
3147 tstr = "long";
3148 if (!IS_ENUM(t))
3149 goto add_tstr;
3150 tstr = "enum ";
3151 goto tstruct;
3152 case VT_FLOAT:
3153 tstr = "float";
3154 goto add_tstr;
3155 case VT_DOUBLE:
3156 tstr = "double";
3157 goto add_tstr;
3158 case VT_LDOUBLE:
3159 tstr = "long double";
3160 add_tstr:
3161 pstrcat(buf, buf_size, tstr);
3162 break;
3163 case VT_STRUCT:
3164 tstr = "struct ";
3165 if (IS_UNION(t))
3166 tstr = "union ";
3167 tstruct:
3168 pstrcat(buf, buf_size, tstr);
3169 v = type->ref->v & ~SYM_STRUCT;
3170 if (v >= SYM_FIRST_ANOM)
3171 pstrcat(buf, buf_size, "<anonymous>");
3172 else
3173 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3174 break;
3175 case VT_FUNC:
3176 s = type->ref;
3177 buf1[0]=0;
3178 if (varstr && '*' == *varstr) {
3179 pstrcat(buf1, sizeof(buf1), "(");
3180 pstrcat(buf1, sizeof(buf1), varstr);
3181 pstrcat(buf1, sizeof(buf1), ")");
3183 pstrcat(buf1, buf_size, "(");
3184 sa = s->next;
3185 while (sa != NULL) {
3186 char buf2[256];
3187 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3188 pstrcat(buf1, sizeof(buf1), buf2);
3189 sa = sa->next;
3190 if (sa)
3191 pstrcat(buf1, sizeof(buf1), ", ");
3193 if (s->f.func_type == FUNC_ELLIPSIS)
3194 pstrcat(buf1, sizeof(buf1), ", ...");
3195 pstrcat(buf1, sizeof(buf1), ")");
3196 type_to_str(buf, buf_size, &s->type, buf1);
3197 goto no_var;
3198 case VT_PTR:
3199 s = type->ref;
3200 if (t & VT_ARRAY) {
3201 if (varstr && '*' == *varstr)
3202 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3203 else
3204 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3205 type_to_str(buf, buf_size, &s->type, buf1);
3206 goto no_var;
3208 pstrcpy(buf1, sizeof(buf1), "*");
3209 if (t & VT_CONSTANT)
3210 pstrcat(buf1, buf_size, "const ");
3211 if (t & VT_VOLATILE)
3212 pstrcat(buf1, buf_size, "volatile ");
3213 if (varstr)
3214 pstrcat(buf1, sizeof(buf1), varstr);
3215 type_to_str(buf, buf_size, &s->type, buf1);
3216 goto no_var;
3218 if (varstr) {
3219 pstrcat(buf, buf_size, " ");
3220 pstrcat(buf, buf_size, varstr);
3222 no_var: ;
3225 /* verify type compatibility to store vtop in 'dt' type, and generate
3226 casts if needed. */
3227 static void gen_assign_cast(CType *dt)
3229 CType *st, *type1, *type2;
3230 char buf1[256], buf2[256];
3231 int dbt, sbt, qualwarn, lvl;
3233 st = &vtop->type; /* source type */
3234 dbt = dt->t & VT_BTYPE;
3235 sbt = st->t & VT_BTYPE;
3236 if (sbt == VT_VOID || dbt == VT_VOID) {
3237 if (sbt == VT_VOID && dbt == VT_VOID)
3238 ; /* It is Ok if both are void */
3239 else
3240 tcc_error("cannot cast from/to void");
3242 if (dt->t & VT_CONSTANT)
3243 tcc_warning("assignment of read-only location");
3244 switch(dbt) {
3245 case VT_PTR:
3246 /* special cases for pointers */
3247 /* '0' can also be a pointer */
3248 if (is_null_pointer(vtop))
3249 break;
3250 /* accept implicit pointer to integer cast with warning */
3251 if (is_integer_btype(sbt)) {
3252 tcc_warning("assignment makes pointer from integer without a cast");
3253 break;
3255 type1 = pointed_type(dt);
3256 if (sbt == VT_PTR)
3257 type2 = pointed_type(st);
3258 else if (sbt == VT_FUNC)
3259 type2 = st; /* a function is implicitly a function pointer */
3260 else
3261 goto error;
3262 if (is_compatible_types(type1, type2))
3263 break;
3264 for (qualwarn = lvl = 0;; ++lvl) {
3265 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3266 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3267 qualwarn = 1;
3268 dbt = type1->t & (VT_BTYPE|VT_LONG);
3269 sbt = type2->t & (VT_BTYPE|VT_LONG);
3270 if (dbt != VT_PTR || sbt != VT_PTR)
3271 break;
3272 type1 = pointed_type(type1);
3273 type2 = pointed_type(type2);
3275 if (!is_compatible_unqualified_types(type1, type2)) {
3276 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3277 /* void * can match anything */
3278 } else if (dbt == sbt
3279 && is_integer_btype(sbt & VT_BTYPE)
3280 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3281 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3282 /* Like GCC don't warn by default for merely changes
3283 in pointer target signedness. Do warn for different
3284 base types, though, in particular for unsigned enums
3285 and signed int targets. */
3286 } else {
3287 tcc_warning("assignment from incompatible pointer type");
3288 break;
3291 if (qualwarn)
3292 tcc_warning("assignment discards qualifiers from pointer target type");
3293 break;
3294 case VT_BYTE:
3295 case VT_SHORT:
3296 case VT_INT:
3297 case VT_LLONG:
3298 if (sbt == VT_PTR || sbt == VT_FUNC) {
3299 tcc_warning("assignment makes integer from pointer without a cast");
3300 } else if (sbt == VT_STRUCT) {
3301 goto case_VT_STRUCT;
3303 /* XXX: more tests */
3304 break;
3305 case VT_STRUCT:
3306 case_VT_STRUCT:
3307 if (!is_compatible_unqualified_types(dt, st)) {
3308 error:
3309 type_to_str(buf1, sizeof(buf1), st, NULL);
3310 type_to_str(buf2, sizeof(buf2), dt, NULL);
3311 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3313 break;
3315 gen_cast(dt);
3318 /* store vtop in lvalue pushed on stack */
3319 ST_FUNC void vstore(void)
3321 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3323 ft = vtop[-1].type.t;
3324 sbt = vtop->type.t & VT_BTYPE;
3325 dbt = ft & VT_BTYPE;
3326 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3327 (sbt == VT_INT && dbt == VT_SHORT))
3328 && !(vtop->type.t & VT_BITFIELD)) {
3329 /* optimize char/short casts */
3330 delayed_cast = VT_MUSTCAST;
3331 vtop->type.t = ft & VT_TYPE;
3332 /* XXX: factorize */
3333 if (ft & VT_CONSTANT)
3334 tcc_warning("assignment of read-only location");
3335 } else {
3336 delayed_cast = 0;
3337 if (!(ft & VT_BITFIELD))
3338 gen_assign_cast(&vtop[-1].type);
3341 if (sbt == VT_STRUCT) {
3342 /* if structure, only generate pointer */
3343 /* structure assignment : generate memcpy */
3344 /* XXX: optimize if small size */
3345 size = type_size(&vtop->type, &align);
3347 /* destination */
3348 vswap();
3349 vtop->type.t = VT_PTR;
3350 gaddrof();
3352 /* address of memcpy() */
3353 #ifdef TCC_ARM_EABI
3354 if(!(align & 7))
3355 vpush_global_sym(&func_old_type, TOK_memcpy8);
3356 else if(!(align & 3))
3357 vpush_global_sym(&func_old_type, TOK_memcpy4);
3358 else
3359 #endif
3360 /* Use memmove, rather than memcpy, as dest and src may be same: */
3361 vpush_global_sym(&func_old_type, TOK_memmove);
3363 vswap();
3364 /* source */
3365 vpushv(vtop - 2);
3366 vtop->type.t = VT_PTR;
3367 gaddrof();
3368 /* type size */
3369 vpushi(size);
3370 gfunc_call(3);
3372 /* leave source on stack */
3373 } else if (ft & VT_BITFIELD) {
3374 /* bitfield store handling */
3376 /* save lvalue as expression result (example: s.b = s.a = n;) */
3377 vdup(), vtop[-1] = vtop[-2];
3379 bit_pos = BIT_POS(ft);
3380 bit_size = BIT_SIZE(ft);
3381 /* remove bit field info to avoid loops */
3382 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3384 if ((ft & VT_BTYPE) == VT_BOOL) {
3385 gen_cast(&vtop[-1].type);
3386 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3389 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3390 if (r == VT_STRUCT) {
3391 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3392 store_packed_bf(bit_pos, bit_size);
3393 } else {
3394 unsigned long long mask = (1ULL << bit_size) - 1;
3395 if ((ft & VT_BTYPE) != VT_BOOL) {
3396 /* mask source */
3397 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3398 vpushll(mask);
3399 else
3400 vpushi((unsigned)mask);
3401 gen_op('&');
3403 /* shift source */
3404 vpushi(bit_pos);
3405 gen_op(TOK_SHL);
3406 vswap();
3407 /* duplicate destination */
3408 vdup();
3409 vrott(3);
3410 /* load destination, mask and or with source */
3411 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3412 vpushll(~(mask << bit_pos));
3413 else
3414 vpushi(~((unsigned)mask << bit_pos));
3415 gen_op('&');
3416 gen_op('|');
3417 /* store result */
3418 vstore();
3419 /* ... and discard */
3420 vpop();
3422 } else if (dbt == VT_VOID) {
3423 --vtop;
3424 } else {
3425 #ifdef CONFIG_TCC_BCHECK
3426 /* bound check case */
3427 if (vtop[-1].r & VT_MUSTBOUND) {
3428 vswap();
3429 gbound();
3430 vswap();
3432 #endif
3433 rc = RC_INT;
3434 if (is_float(ft)) {
3435 rc = RC_FLOAT;
3436 #ifdef TCC_TARGET_X86_64
3437 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3438 rc = RC_ST0;
3439 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3440 rc = RC_FRET;
3442 #elif defined TCC_TARGET_RISCV64
3443 if (dbt == VT_LDOUBLE)
3444 rc = RC_INT;
3445 #endif
3447 r = gv(rc); /* generate value */
3448 /* if lvalue was saved on stack, must read it */
3449 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3450 SValue sv;
3451 t = get_reg(RC_INT);
3452 #if PTR_SIZE == 8
3453 sv.type.t = VT_PTR;
3454 #else
3455 sv.type.t = VT_INT;
3456 #endif
3457 sv.r = VT_LOCAL | VT_LVAL;
3458 sv.c.i = vtop[-1].c.i;
3459 load(t, &sv);
3460 vtop[-1].r = t | VT_LVAL;
3462 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3463 #ifdef TCC_TARGET_RISCV64
3464 if (dbt == VT_QLONG || dbt == VT_LDOUBLE) {
3465 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
3466 #elif PTR_SIZE == 8
3467 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3468 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3469 #else
3470 if ((ft & VT_BTYPE) == VT_LLONG) {
3471 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3472 #endif
3473 vtop[-1].type.t = load_type;
3474 store(r, vtop - 1);
3475 vswap();
3476 /* convert to int to increment easily */
3477 vtop->type.t = addr_type;
3478 gaddrof();
3479 vpushi(load_size);
3480 gen_op('+');
3481 vtop->r |= VT_LVAL;
3482 vswap();
3483 vtop[-1].type.t = load_type;
3484 /* XXX: it works because r2 is spilled last ! */
3485 store(vtop->r2, vtop - 1);
3486 } else {
3487 store(r, vtop - 1);
3490 vswap();
3491 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3492 vtop->r |= delayed_cast;
3496 /* post defines POST/PRE add. c is the token ++ or -- */
3497 ST_FUNC void inc(int post, int c)
3499 test_lvalue();
3500 vdup(); /* save lvalue */
3501 if (post) {
3502 gv_dup(); /* duplicate value */
3503 vrotb(3);
3504 vrotb(3);
3506 /* add constant */
3507 vpushi(c - TOK_MID);
3508 gen_op('+');
3509 vstore(); /* store value */
3510 if (post)
3511 vpop(); /* if post op, return saved value */
3514 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3516 /* read the string */
3517 if (tok != TOK_STR)
3518 expect(msg);
3519 cstr_new(astr);
3520 while (tok == TOK_STR) {
3521 /* XXX: add \0 handling too ? */
3522 cstr_cat(astr, tokc.str.data, -1);
3523 next();
3525 cstr_ccat(astr, '\0');
3528 /* If I is >= 1 and a power of two, returns log2(i)+1.
3529 If I is 0 returns 0. */
3530 static int exact_log2p1(int i)
3532 int ret;
3533 if (!i)
3534 return 0;
3535 for (ret = 1; i >= 1 << 8; ret += 8)
3536 i >>= 8;
3537 if (i >= 1 << 4)
3538 ret += 4, i >>= 4;
3539 if (i >= 1 << 2)
3540 ret += 2, i >>= 2;
3541 if (i >= 1 << 1)
3542 ret++;
3543 return ret;
3546 /* Parse __attribute__((...)) GNUC extension. */
3547 static void parse_attribute(AttributeDef *ad)
3549 int t, n;
3550 CString astr;
3552 redo:
3553 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3554 return;
3555 next();
3556 skip('(');
3557 skip('(');
3558 while (tok != ')') {
3559 if (tok < TOK_IDENT)
3560 expect("attribute name");
3561 t = tok;
3562 next();
3563 switch(t) {
3564 case TOK_CLEANUP1:
3565 case TOK_CLEANUP2:
3567 Sym *s;
3569 skip('(');
3570 s = sym_find(tok);
3571 if (!s) {
3572 tcc_warning("implicit declaration of function '%s'",
3573 get_tok_str(tok, &tokc));
3574 s = external_global_sym(tok, &func_old_type);
3576 ad->cleanup_func = s;
3577 next();
3578 skip(')');
3579 break;
3581 case TOK_CONSTRUCTOR1:
3582 case TOK_CONSTRUCTOR2:
3583 ad->a.constructor = 1;
3584 break;
3585 case TOK_DESTRUCTOR1:
3586 case TOK_DESTRUCTOR2:
3587 ad->a.destructor = 1;
3588 break;
3589 case TOK_SECTION1:
3590 case TOK_SECTION2:
3591 skip('(');
3592 parse_mult_str(&astr, "section name");
3593 ad->section = find_section(tcc_state, (char *)astr.data);
3594 skip(')');
3595 cstr_free(&astr);
3596 break;
3597 case TOK_ALIAS1:
3598 case TOK_ALIAS2:
3599 skip('(');
3600 parse_mult_str(&astr, "alias(\"target\")");
3601 ad->alias_target = /* save string as token, for later */
3602 tok_alloc((char*)astr.data, astr.size-1)->tok;
3603 skip(')');
3604 cstr_free(&astr);
3605 break;
3606 case TOK_VISIBILITY1:
3607 case TOK_VISIBILITY2:
3608 skip('(');
3609 parse_mult_str(&astr,
3610 "visibility(\"default|hidden|internal|protected\")");
3611 if (!strcmp (astr.data, "default"))
3612 ad->a.visibility = STV_DEFAULT;
3613 else if (!strcmp (astr.data, "hidden"))
3614 ad->a.visibility = STV_HIDDEN;
3615 else if (!strcmp (astr.data, "internal"))
3616 ad->a.visibility = STV_INTERNAL;
3617 else if (!strcmp (astr.data, "protected"))
3618 ad->a.visibility = STV_PROTECTED;
3619 else
3620 expect("visibility(\"default|hidden|internal|protected\")");
3621 skip(')');
3622 cstr_free(&astr);
3623 break;
3624 case TOK_ALIGNED1:
3625 case TOK_ALIGNED2:
3626 if (tok == '(') {
3627 next();
3628 n = expr_const();
3629 if (n <= 0 || (n & (n - 1)) != 0)
3630 tcc_error("alignment must be a positive power of two");
3631 skip(')');
3632 } else {
3633 n = MAX_ALIGN;
3635 ad->a.aligned = exact_log2p1(n);
3636 if (n != 1 << (ad->a.aligned - 1))
3637 tcc_error("alignment of %d is larger than implemented", n);
3638 break;
3639 case TOK_PACKED1:
3640 case TOK_PACKED2:
3641 ad->a.packed = 1;
3642 break;
3643 case TOK_WEAK1:
3644 case TOK_WEAK2:
3645 ad->a.weak = 1;
3646 break;
3647 case TOK_UNUSED1:
3648 case TOK_UNUSED2:
3649 /* currently, no need to handle it because tcc does not
3650 track unused objects */
3651 break;
3652 case TOK_NORETURN1:
3653 case TOK_NORETURN2:
3654 ad->f.func_noreturn = 1;
3655 break;
3656 case TOK_CDECL1:
3657 case TOK_CDECL2:
3658 case TOK_CDECL3:
3659 ad->f.func_call = FUNC_CDECL;
3660 break;
3661 case TOK_STDCALL1:
3662 case TOK_STDCALL2:
3663 case TOK_STDCALL3:
3664 ad->f.func_call = FUNC_STDCALL;
3665 break;
3666 #ifdef TCC_TARGET_I386
3667 case TOK_REGPARM1:
3668 case TOK_REGPARM2:
3669 skip('(');
3670 n = expr_const();
3671 if (n > 3)
3672 n = 3;
3673 else if (n < 0)
3674 n = 0;
3675 if (n > 0)
3676 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3677 skip(')');
3678 break;
3679 case TOK_FASTCALL1:
3680 case TOK_FASTCALL2:
3681 case TOK_FASTCALL3:
3682 ad->f.func_call = FUNC_FASTCALLW;
3683 break;
3684 #endif
3685 case TOK_MODE:
3686 skip('(');
3687 switch(tok) {
3688 case TOK_MODE_DI:
3689 ad->attr_mode = VT_LLONG + 1;
3690 break;
3691 case TOK_MODE_QI:
3692 ad->attr_mode = VT_BYTE + 1;
3693 break;
3694 case TOK_MODE_HI:
3695 ad->attr_mode = VT_SHORT + 1;
3696 break;
3697 case TOK_MODE_SI:
3698 case TOK_MODE_word:
3699 ad->attr_mode = VT_INT + 1;
3700 break;
3701 default:
3702 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3703 break;
3705 next();
3706 skip(')');
3707 break;
3708 case TOK_DLLEXPORT:
3709 ad->a.dllexport = 1;
3710 break;
3711 case TOK_NODECORATE:
3712 ad->a.nodecorate = 1;
3713 break;
3714 case TOK_DLLIMPORT:
3715 ad->a.dllimport = 1;
3716 break;
3717 default:
3718 if (tcc_state->warn_unsupported)
3719 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3720 /* skip parameters */
3721 if (tok == '(') {
3722 int parenthesis = 0;
3723 do {
3724 if (tok == '(')
3725 parenthesis++;
3726 else if (tok == ')')
3727 parenthesis--;
3728 next();
3729 } while (parenthesis && tok != -1);
3731 break;
3733 if (tok != ',')
3734 break;
3735 next();
3737 skip(')');
3738 skip(')');
3739 goto redo;
3742 static Sym * find_field (CType *type, int v, int *cumofs)
3744 Sym *s = type->ref;
3745 v |= SYM_FIELD;
3746 while ((s = s->next) != NULL) {
3747 if ((s->v & SYM_FIELD) &&
3748 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3749 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3750 Sym *ret = find_field (&s->type, v, cumofs);
3751 if (ret) {
3752 *cumofs += s->c;
3753 return ret;
3756 if (s->v == v)
3757 break;
3759 return s;
3762 static void struct_layout(CType *type, AttributeDef *ad)
3764 int size, align, maxalign, offset, c, bit_pos, bit_size;
3765 int packed, a, bt, prevbt, prev_bit_size;
3766 int pcc = !tcc_state->ms_bitfields;
3767 int pragma_pack = *tcc_state->pack_stack_ptr;
3768 Sym *f;
3770 maxalign = 1;
3771 offset = 0;
3772 c = 0;
3773 bit_pos = 0;
3774 prevbt = VT_STRUCT; /* make it never match */
3775 prev_bit_size = 0;
3777 //#define BF_DEBUG
3779 for (f = type->ref->next; f; f = f->next) {
3780 if (f->type.t & VT_BITFIELD)
3781 bit_size = BIT_SIZE(f->type.t);
3782 else
3783 bit_size = -1;
3784 size = type_size(&f->type, &align);
3785 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3786 packed = 0;
3788 if (pcc && bit_size == 0) {
3789 /* in pcc mode, packing does not affect zero-width bitfields */
3791 } else {
3792 /* in pcc mode, attribute packed overrides if set. */
3793 if (pcc && (f->a.packed || ad->a.packed))
3794 align = packed = 1;
3796 /* pragma pack overrides align if lesser and packs bitfields always */
3797 if (pragma_pack) {
3798 packed = 1;
3799 if (pragma_pack < align)
3800 align = pragma_pack;
3801 /* in pcc mode pragma pack also overrides individual align */
3802 if (pcc && pragma_pack < a)
3803 a = 0;
3806 /* some individual align was specified */
3807 if (a)
3808 align = a;
3810 if (type->ref->type.t == VT_UNION) {
3811 if (pcc && bit_size >= 0)
3812 size = (bit_size + 7) >> 3;
3813 offset = 0;
3814 if (size > c)
3815 c = size;
3817 } else if (bit_size < 0) {
3818 if (pcc)
3819 c += (bit_pos + 7) >> 3;
3820 c = (c + align - 1) & -align;
3821 offset = c;
3822 if (size > 0)
3823 c += size;
3824 bit_pos = 0;
3825 prevbt = VT_STRUCT;
3826 prev_bit_size = 0;
3828 } else {
3829 /* A bit-field. Layout is more complicated. There are two
3830 options: PCC (GCC) compatible and MS compatible */
3831 if (pcc) {
3832 /* In PCC layout a bit-field is placed adjacent to the
3833 preceding bit-fields, except if:
3834 - it has zero-width
3835 - an individual alignment was given
3836 - it would overflow its base type container and
3837 there is no packing */
3838 if (bit_size == 0) {
3839 new_field:
3840 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3841 bit_pos = 0;
3842 } else if (f->a.aligned) {
3843 goto new_field;
3844 } else if (!packed) {
3845 int a8 = align * 8;
3846 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3847 if (ofs > size / align)
3848 goto new_field;
3851 /* in pcc mode, long long bitfields have type int if they fit */
3852 if (size == 8 && bit_size <= 32)
3853 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3855 while (bit_pos >= align * 8)
3856 c += align, bit_pos -= align * 8;
3857 offset = c;
3859 /* In PCC layout named bit-fields influence the alignment
3860 of the containing struct using the base types alignment,
3861 except for packed fields (which here have correct align). */
3862 if (f->v & SYM_FIRST_ANOM
3863 // && bit_size // ??? gcc on ARM/rpi does that
3865 align = 1;
3867 } else {
3868 bt = f->type.t & VT_BTYPE;
3869 if ((bit_pos + bit_size > size * 8)
3870 || (bit_size > 0) == (bt != prevbt)
3872 c = (c + align - 1) & -align;
3873 offset = c;
3874 bit_pos = 0;
3875 /* In MS bitfield mode a bit-field run always uses
3876 at least as many bits as the underlying type.
3877 To start a new run it's also required that this
3878 or the last bit-field had non-zero width. */
3879 if (bit_size || prev_bit_size)
3880 c += size;
3882 /* In MS layout the records alignment is normally
3883 influenced by the field, except for a zero-width
3884 field at the start of a run (but by further zero-width
3885 fields it is again). */
3886 if (bit_size == 0 && prevbt != bt)
3887 align = 1;
3888 prevbt = bt;
3889 prev_bit_size = bit_size;
3892 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3893 | (bit_pos << VT_STRUCT_SHIFT);
3894 bit_pos += bit_size;
3896 if (align > maxalign)
3897 maxalign = align;
3899 #ifdef BF_DEBUG
3900 printf("set field %s offset %-2d size %-2d align %-2d",
3901 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3902 if (f->type.t & VT_BITFIELD) {
3903 printf(" pos %-2d bits %-2d",
3904 BIT_POS(f->type.t),
3905 BIT_SIZE(f->type.t)
3908 printf("\n");
3909 #endif
3911 f->c = offset;
3912 f->r = 0;
3915 if (pcc)
3916 c += (bit_pos + 7) >> 3;
3918 /* store size and alignment */
3919 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3920 if (a < maxalign)
3921 a = maxalign;
3922 type->ref->r = a;
3923 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3924 /* can happen if individual align for some member was given. In
3925 this case MSVC ignores maxalign when aligning the size */
3926 a = pragma_pack;
3927 if (a < bt)
3928 a = bt;
3930 c = (c + a - 1) & -a;
3931 type->ref->c = c;
3933 #ifdef BF_DEBUG
3934 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3935 #endif
3937 /* check whether we can access bitfields by their type */
3938 for (f = type->ref->next; f; f = f->next) {
3939 int s, px, cx, c0;
3940 CType t;
3942 if (0 == (f->type.t & VT_BITFIELD))
3943 continue;
3944 f->type.ref = f;
3945 f->auxtype = -1;
3946 bit_size = BIT_SIZE(f->type.t);
3947 if (bit_size == 0)
3948 continue;
3949 bit_pos = BIT_POS(f->type.t);
3950 size = type_size(&f->type, &align);
3951 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3952 continue;
3954 /* try to access the field using a different type */
3955 c0 = -1, s = align = 1;
3956 for (;;) {
3957 px = f->c * 8 + bit_pos;
3958 cx = (px >> 3) & -align;
3959 px = px - (cx << 3);
3960 if (c0 == cx)
3961 break;
3962 s = (px + bit_size + 7) >> 3;
3963 if (s > 4) {
3964 t.t = VT_LLONG;
3965 } else if (s > 2) {
3966 t.t = VT_INT;
3967 } else if (s > 1) {
3968 t.t = VT_SHORT;
3969 } else {
3970 t.t = VT_BYTE;
3972 s = type_size(&t, &align);
3973 c0 = cx;
3976 if (px + bit_size <= s * 8 && cx + s <= c) {
3977 /* update offset and bit position */
3978 f->c = cx;
3979 bit_pos = px;
3980 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3981 | (bit_pos << VT_STRUCT_SHIFT);
3982 if (s != size)
3983 f->auxtype = t.t;
3984 #ifdef BF_DEBUG
3985 printf("FIX field %s offset %-2d size %-2d align %-2d "
3986 "pos %-2d bits %-2d\n",
3987 get_tok_str(f->v & ~SYM_FIELD, NULL),
3988 cx, s, align, px, bit_size);
3989 #endif
3990 } else {
3991 /* fall back to load/store single-byte wise */
3992 f->auxtype = VT_STRUCT;
3993 #ifdef BF_DEBUG
3994 printf("FIX field %s : load byte-wise\n",
3995 get_tok_str(f->v & ~SYM_FIELD, NULL));
3996 #endif
4001 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4002 static void struct_decl(CType *type, int u)
4004 int v, c, size, align, flexible;
4005 int bit_size, bsize, bt;
4006 Sym *s, *ss, **ps;
4007 AttributeDef ad, ad1;
4008 CType type1, btype;
4010 memset(&ad, 0, sizeof ad);
4011 next();
4012 parse_attribute(&ad);
4013 if (tok != '{') {
4014 v = tok;
4015 next();
4016 /* struct already defined ? return it */
4017 if (v < TOK_IDENT)
4018 expect("struct/union/enum name");
4019 s = struct_find(v);
4020 if (s && (s->sym_scope == local_scope || tok != '{')) {
4021 if (u == s->type.t)
4022 goto do_decl;
4023 if (u == VT_ENUM && IS_ENUM(s->type.t))
4024 goto do_decl;
4025 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4027 } else {
4028 v = anon_sym++;
4030 /* Record the original enum/struct/union token. */
4031 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4032 type1.ref = NULL;
4033 /* we put an undefined size for struct/union */
4034 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4035 s->r = 0; /* default alignment is zero as gcc */
4036 do_decl:
4037 type->t = s->type.t;
4038 type->ref = s;
4040 if (tok == '{') {
4041 next();
4042 if (s->c != -1)
4043 tcc_error("struct/union/enum already defined");
4044 s->c = -2;
4045 /* cannot be empty */
4046 /* non empty enums are not allowed */
4047 ps = &s->next;
4048 if (u == VT_ENUM) {
4049 long long ll = 0, pl = 0, nl = 0;
4050 CType t;
4051 t.ref = s;
4052 /* enum symbols have static storage */
4053 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4054 for(;;) {
4055 v = tok;
4056 if (v < TOK_UIDENT)
4057 expect("identifier");
4058 ss = sym_find(v);
4059 if (ss && !local_stack)
4060 tcc_error("redefinition of enumerator '%s'",
4061 get_tok_str(v, NULL));
4062 next();
4063 if (tok == '=') {
4064 next();
4065 ll = expr_const64();
4067 ss = sym_push(v, &t, VT_CONST, 0);
4068 ss->enum_val = ll;
4069 *ps = ss, ps = &ss->next;
4070 if (ll < nl)
4071 nl = ll;
4072 if (ll > pl)
4073 pl = ll;
4074 if (tok != ',')
4075 break;
4076 next();
4077 ll++;
4078 /* NOTE: we accept a trailing comma */
4079 if (tok == '}')
4080 break;
4082 skip('}');
4083 /* set integral type of the enum */
4084 t.t = VT_INT;
4085 if (nl >= 0) {
4086 if (pl != (unsigned)pl)
4087 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4088 t.t |= VT_UNSIGNED;
4089 } else if (pl != (int)pl || nl != (int)nl)
4090 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4091 s->type.t = type->t = t.t | VT_ENUM;
4092 s->c = 0;
4093 /* set type for enum members */
4094 for (ss = s->next; ss; ss = ss->next) {
4095 ll = ss->enum_val;
4096 if (ll == (int)ll) /* default is int if it fits */
4097 continue;
4098 if (t.t & VT_UNSIGNED) {
4099 ss->type.t |= VT_UNSIGNED;
4100 if (ll == (unsigned)ll)
4101 continue;
4103 ss->type.t = (ss->type.t & ~VT_BTYPE)
4104 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4106 } else {
4107 c = 0;
4108 flexible = 0;
4109 while (tok != '}') {
4110 if (!parse_btype(&btype, &ad1)) {
4111 skip(';');
4112 continue;
4114 while (1) {
4115 if (flexible)
4116 tcc_error("flexible array member '%s' not at the end of struct",
4117 get_tok_str(v, NULL));
4118 bit_size = -1;
4119 v = 0;
4120 type1 = btype;
4121 if (tok != ':') {
4122 if (tok != ';')
4123 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4124 if (v == 0) {
4125 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4126 expect("identifier");
4127 else {
4128 int v = btype.ref->v;
4129 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4130 if (tcc_state->ms_extensions == 0)
4131 expect("identifier");
4135 if (type_size(&type1, &align) < 0) {
4136 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4137 flexible = 1;
4138 else
4139 tcc_error("field '%s' has incomplete type",
4140 get_tok_str(v, NULL));
4142 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4143 (type1.t & VT_BTYPE) == VT_VOID ||
4144 (type1.t & VT_STORAGE))
4145 tcc_error("invalid type for '%s'",
4146 get_tok_str(v, NULL));
4148 if (tok == ':') {
4149 next();
4150 bit_size = expr_const();
4151 /* XXX: handle v = 0 case for messages */
4152 if (bit_size < 0)
4153 tcc_error("negative width in bit-field '%s'",
4154 get_tok_str(v, NULL));
4155 if (v && bit_size == 0)
4156 tcc_error("zero width for bit-field '%s'",
4157 get_tok_str(v, NULL));
4158 parse_attribute(&ad1);
4160 size = type_size(&type1, &align);
4161 if (bit_size >= 0) {
4162 bt = type1.t & VT_BTYPE;
4163 if (bt != VT_INT &&
4164 bt != VT_BYTE &&
4165 bt != VT_SHORT &&
4166 bt != VT_BOOL &&
4167 bt != VT_LLONG)
4168 tcc_error("bitfields must have scalar type");
4169 bsize = size * 8;
4170 if (bit_size > bsize) {
4171 tcc_error("width of '%s' exceeds its type",
4172 get_tok_str(v, NULL));
4173 } else if (bit_size == bsize
4174 && !ad.a.packed && !ad1.a.packed) {
4175 /* no need for bit fields */
4177 } else if (bit_size == 64) {
4178 tcc_error("field width 64 not implemented");
4179 } else {
4180 type1.t = (type1.t & ~VT_STRUCT_MASK)
4181 | VT_BITFIELD
4182 | (bit_size << (VT_STRUCT_SHIFT + 6));
4185 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4186 /* Remember we've seen a real field to check
4187 for placement of flexible array member. */
4188 c = 1;
4190 /* If member is a struct or bit-field, enforce
4191 placing into the struct (as anonymous). */
4192 if (v == 0 &&
4193 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4194 bit_size >= 0)) {
4195 v = anon_sym++;
4197 if (v) {
4198 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4199 ss->a = ad1.a;
4200 *ps = ss;
4201 ps = &ss->next;
4203 if (tok == ';' || tok == TOK_EOF)
4204 break;
4205 skip(',');
4207 skip(';');
4209 skip('}');
4210 parse_attribute(&ad);
4211 struct_layout(type, &ad);
4216 static void sym_to_attr(AttributeDef *ad, Sym *s)
4218 merge_symattr(&ad->a, &s->a);
4219 merge_funcattr(&ad->f, &s->f);
4222 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4223 are added to the element type, copied because it could be a typedef. */
4224 static void parse_btype_qualify(CType *type, int qualifiers)
4226 while (type->t & VT_ARRAY) {
4227 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4228 type = &type->ref->type;
4230 type->t |= qualifiers;
4233 /* return 0 if no type declaration. otherwise, return the basic type
4234 and skip it.
4236 static int parse_btype(CType *type, AttributeDef *ad)
4238 int t, u, bt, st, type_found, typespec_found, g, n;
4239 Sym *s;
4240 CType type1;
4242 memset(ad, 0, sizeof(AttributeDef));
4243 type_found = 0;
4244 typespec_found = 0;
4245 t = VT_INT;
4246 bt = st = -1;
4247 type->ref = NULL;
4249 while(1) {
4250 switch(tok) {
4251 case TOK_EXTENSION:
4252 /* currently, we really ignore extension */
4253 next();
4254 continue;
4256 /* basic types */
4257 case TOK_CHAR:
4258 u = VT_BYTE;
4259 basic_type:
4260 next();
4261 basic_type1:
4262 if (u == VT_SHORT || u == VT_LONG) {
4263 if (st != -1 || (bt != -1 && bt != VT_INT))
4264 tmbt: tcc_error("too many basic types");
4265 st = u;
4266 } else {
4267 if (bt != -1 || (st != -1 && u != VT_INT))
4268 goto tmbt;
4269 bt = u;
4271 if (u != VT_INT)
4272 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4273 typespec_found = 1;
4274 break;
4275 case TOK_VOID:
4276 u = VT_VOID;
4277 goto basic_type;
4278 case TOK_SHORT:
4279 u = VT_SHORT;
4280 goto basic_type;
4281 case TOK_INT:
4282 u = VT_INT;
4283 goto basic_type;
4284 case TOK_ALIGNAS:
4285 { int n;
4286 AttributeDef ad1;
4287 next();
4288 skip('(');
4289 memset(&ad1, 0, sizeof(AttributeDef));
4290 if (parse_btype(&type1, &ad1)) {
4291 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4292 if (ad1.a.aligned)
4293 n = 1 << (ad1.a.aligned - 1);
4294 else
4295 type_size(&type1, &n);
4296 } else {
4297 n = expr_const();
4298 if (n <= 0 || (n & (n - 1)) != 0)
4299 tcc_error("alignment must be a positive power of two");
4301 skip(')');
4302 ad->a.aligned = exact_log2p1(n);
4304 continue;
4305 case TOK_LONG:
4306 if ((t & VT_BTYPE) == VT_DOUBLE) {
4307 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4308 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4309 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4310 } else {
4311 u = VT_LONG;
4312 goto basic_type;
4314 next();
4315 break;
4316 #ifdef TCC_TARGET_ARM64
4317 case TOK_UINT128:
4318 /* GCC's __uint128_t appears in some Linux header files. Make it a
4319 synonym for long double to get the size and alignment right. */
4320 u = VT_LDOUBLE;
4321 goto basic_type;
4322 #endif
4323 case TOK_BOOL:
4324 u = VT_BOOL;
4325 goto basic_type;
4326 case TOK_FLOAT:
4327 u = VT_FLOAT;
4328 goto basic_type;
4329 case TOK_DOUBLE:
4330 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4331 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4332 } else {
4333 u = VT_DOUBLE;
4334 goto basic_type;
4336 next();
4337 break;
4338 case TOK_ENUM:
4339 struct_decl(&type1, VT_ENUM);
4340 basic_type2:
4341 u = type1.t;
4342 type->ref = type1.ref;
4343 goto basic_type1;
4344 case TOK_STRUCT:
4345 struct_decl(&type1, VT_STRUCT);
4346 goto basic_type2;
4347 case TOK_UNION:
4348 struct_decl(&type1, VT_UNION);
4349 goto basic_type2;
4351 /* type modifiers */
4352 case TOK_CONST1:
4353 case TOK_CONST2:
4354 case TOK_CONST3:
4355 type->t = t;
4356 parse_btype_qualify(type, VT_CONSTANT);
4357 t = type->t;
4358 next();
4359 break;
4360 case TOK_VOLATILE1:
4361 case TOK_VOLATILE2:
4362 case TOK_VOLATILE3:
4363 type->t = t;
4364 parse_btype_qualify(type, VT_VOLATILE);
4365 t = type->t;
4366 next();
4367 break;
4368 case TOK_SIGNED1:
4369 case TOK_SIGNED2:
4370 case TOK_SIGNED3:
4371 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4372 tcc_error("signed and unsigned modifier");
4373 t |= VT_DEFSIGN;
4374 next();
4375 typespec_found = 1;
4376 break;
4377 case TOK_REGISTER:
4378 case TOK_AUTO:
4379 case TOK_RESTRICT1:
4380 case TOK_RESTRICT2:
4381 case TOK_RESTRICT3:
4382 next();
4383 break;
4384 case TOK_UNSIGNED:
4385 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4386 tcc_error("signed and unsigned modifier");
4387 t |= VT_DEFSIGN | VT_UNSIGNED;
4388 next();
4389 typespec_found = 1;
4390 break;
4392 /* storage */
4393 case TOK_EXTERN:
4394 g = VT_EXTERN;
4395 goto storage;
4396 case TOK_STATIC:
4397 g = VT_STATIC;
4398 goto storage;
4399 case TOK_TYPEDEF:
4400 g = VT_TYPEDEF;
4401 goto storage;
4402 storage:
4403 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4404 tcc_error("multiple storage classes");
4405 t |= g;
4406 next();
4407 break;
4408 case TOK_INLINE1:
4409 case TOK_INLINE2:
4410 case TOK_INLINE3:
4411 t |= VT_INLINE;
4412 next();
4413 break;
4414 case TOK_NORETURN3:
4415 next();
4416 ad->f.func_noreturn = 1;
4417 break;
4418 /* GNUC attribute */
4419 case TOK_ATTRIBUTE1:
4420 case TOK_ATTRIBUTE2:
4421 parse_attribute(ad);
4422 if (ad->attr_mode) {
4423 u = ad->attr_mode -1;
4424 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4426 continue;
4427 /* GNUC typeof */
4428 case TOK_TYPEOF1:
4429 case TOK_TYPEOF2:
4430 case TOK_TYPEOF3:
4431 next();
4432 parse_expr_type(&type1);
4433 /* remove all storage modifiers except typedef */
4434 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4435 if (type1.ref)
4436 sym_to_attr(ad, type1.ref);
4437 goto basic_type2;
4438 default:
4439 if (typespec_found)
4440 goto the_end;
4441 s = sym_find(tok);
4442 if (!s || !(s->type.t & VT_TYPEDEF))
4443 goto the_end;
4445 n = tok, next();
4446 if (tok == ':' && !in_generic) {
4447 /* ignore if it's a label */
4448 unget_tok(n);
4449 goto the_end;
4452 t &= ~(VT_BTYPE|VT_LONG);
4453 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4454 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4455 type->ref = s->type.ref;
4456 if (t)
4457 parse_btype_qualify(type, t);
4458 t = type->t;
4459 /* get attributes from typedef */
4460 sym_to_attr(ad, s);
4461 typespec_found = 1;
4462 st = bt = -2;
4463 break;
4465 type_found = 1;
4467 the_end:
4468 if (tcc_state->char_is_unsigned) {
4469 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4470 t |= VT_UNSIGNED;
4472 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4473 bt = t & (VT_BTYPE|VT_LONG);
4474 if (bt == VT_LONG)
4475 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4476 #ifdef TCC_TARGET_PE
4477 if (bt == VT_LDOUBLE)
4478 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4479 #endif
4480 type->t = t;
4481 return type_found;
4484 /* convert a function parameter type (array to pointer and function to
4485 function pointer) */
4486 static inline void convert_parameter_type(CType *pt)
4488 /* remove const and volatile qualifiers (XXX: const could be used
4489 to indicate a const function parameter */
4490 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4491 /* array must be transformed to pointer according to ANSI C */
4492 pt->t &= ~VT_ARRAY;
4493 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4494 mk_pointer(pt);
4498 ST_FUNC void parse_asm_str(CString *astr)
4500 skip('(');
4501 parse_mult_str(astr, "string constant");
4504 /* Parse an asm label and return the token */
4505 static int asm_label_instr(void)
4507 int v;
4508 CString astr;
4510 next();
4511 parse_asm_str(&astr);
4512 skip(')');
4513 #ifdef ASM_DEBUG
4514 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4515 #endif
4516 v = tok_alloc(astr.data, astr.size - 1)->tok;
4517 cstr_free(&astr);
4518 return v;
4521 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4523 int n, l, t1, arg_size, align, unused_align;
4524 Sym **plast, *s, *first;
4525 AttributeDef ad1;
4526 CType pt;
4528 if (tok == '(') {
4529 /* function type, or recursive declarator (return if so) */
4530 next();
4531 if (td && !(td & TYPE_ABSTRACT))
4532 return 0;
4533 if (tok == ')')
4534 l = 0;
4535 else if (parse_btype(&pt, &ad1))
4536 l = FUNC_NEW;
4537 else if (td) {
4538 merge_attr (ad, &ad1);
4539 return 0;
4540 } else
4541 l = FUNC_OLD;
4542 first = NULL;
4543 plast = &first;
4544 arg_size = 0;
4545 if (l) {
4546 for(;;) {
4547 /* read param name and compute offset */
4548 if (l != FUNC_OLD) {
4549 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4550 break;
4551 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4552 if ((pt.t & VT_BTYPE) == VT_VOID)
4553 tcc_error("parameter declared as void");
4554 } else {
4555 n = tok;
4556 if (n < TOK_UIDENT)
4557 expect("identifier");
4558 pt.t = VT_VOID; /* invalid type */
4559 pt.ref = NULL;
4560 next();
4562 convert_parameter_type(&pt);
4563 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4564 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4565 *plast = s;
4566 plast = &s->next;
4567 if (tok == ')')
4568 break;
4569 skip(',');
4570 if (l == FUNC_NEW && tok == TOK_DOTS) {
4571 l = FUNC_ELLIPSIS;
4572 next();
4573 break;
4575 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4576 tcc_error("invalid type");
4578 } else
4579 /* if no parameters, then old type prototype */
4580 l = FUNC_OLD;
4581 skip(')');
4582 /* NOTE: const is ignored in returned type as it has a special
4583 meaning in gcc / C++ */
4584 type->t &= ~VT_CONSTANT;
4585 /* some ancient pre-K&R C allows a function to return an array
4586 and the array brackets to be put after the arguments, such
4587 that "int c()[]" means something like "int[] c()" */
4588 if (tok == '[') {
4589 next();
4590 skip(']'); /* only handle simple "[]" */
4591 mk_pointer(type);
4593 /* we push a anonymous symbol which will contain the function prototype */
4594 ad->f.func_args = arg_size;
4595 ad->f.func_type = l;
4596 s = sym_push(SYM_FIELD, type, 0, 0);
4597 s->a = ad->a;
4598 s->f = ad->f;
4599 s->next = first;
4600 type->t = VT_FUNC;
4601 type->ref = s;
4602 } else if (tok == '[') {
4603 int saved_nocode_wanted = nocode_wanted;
4604 /* array definition */
4605 next();
4606 while (1) {
4607 /* XXX The optional type-quals and static should only be accepted
4608 in parameter decls. The '*' as well, and then even only
4609 in prototypes (not function defs). */
4610 switch (tok) {
4611 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4612 case TOK_CONST1:
4613 case TOK_VOLATILE1:
4614 case TOK_STATIC:
4615 case '*':
4616 next();
4617 continue;
4618 default:
4619 break;
4621 break;
4623 n = -1;
4624 t1 = 0;
4625 if (tok != ']') {
4626 if (!local_stack || (storage & VT_STATIC))
4627 vpushi(expr_const());
4628 else {
4629 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4630 length must always be evaluated, even under nocode_wanted,
4631 so that its size slot is initialized (e.g. under sizeof
4632 or typeof). */
4633 nocode_wanted = 0;
4634 gexpr();
4636 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4637 n = vtop->c.i;
4638 if (n < 0)
4639 tcc_error("invalid array size");
4640 } else {
4641 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4642 tcc_error("size of variable length array should be an integer");
4643 n = 0;
4644 t1 = VT_VLA;
4647 skip(']');
4648 /* parse next post type */
4649 post_type(type, ad, storage, 0);
4651 if ((type->t & VT_BTYPE) == VT_FUNC)
4652 tcc_error("declaration of an array of functions");
4653 if ((type->t & VT_BTYPE) == VT_VOID
4654 || type_size(type, &unused_align) < 0)
4655 tcc_error("declaration of an array of incomplete type elements");
4657 t1 |= type->t & VT_VLA;
4659 if (t1 & VT_VLA) {
4660 if (n < 0)
4661 tcc_error("need explicit inner array size in VLAs");
4662 loc -= type_size(&int_type, &align);
4663 loc &= -align;
4664 n = loc;
4666 vla_runtime_type_size(type, &align);
4667 gen_op('*');
4668 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4669 vswap();
4670 vstore();
4672 if (n != -1)
4673 vpop();
4674 nocode_wanted = saved_nocode_wanted;
4676 /* we push an anonymous symbol which will contain the array
4677 element type */
4678 s = sym_push(SYM_FIELD, type, 0, n);
4679 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4680 type->ref = s;
4682 return 1;
4685 /* Parse a type declarator (except basic type), and return the type
4686 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4687 expected. 'type' should contain the basic type. 'ad' is the
4688 attribute definition of the basic type. It can be modified by
4689 type_decl(). If this (possibly abstract) declarator is a pointer chain
4690 it returns the innermost pointed to type (equals *type, but is a different
4691 pointer), otherwise returns type itself, that's used for recursive calls. */
4692 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4694 CType *post, *ret;
4695 int qualifiers, storage;
4697 /* recursive type, remove storage bits first, apply them later again */
4698 storage = type->t & VT_STORAGE;
4699 type->t &= ~VT_STORAGE;
4700 post = ret = type;
4702 while (tok == '*') {
4703 qualifiers = 0;
4704 redo:
4705 next();
4706 switch(tok) {
4707 case TOK_CONST1:
4708 case TOK_CONST2:
4709 case TOK_CONST3:
4710 qualifiers |= VT_CONSTANT;
4711 goto redo;
4712 case TOK_VOLATILE1:
4713 case TOK_VOLATILE2:
4714 case TOK_VOLATILE3:
4715 qualifiers |= VT_VOLATILE;
4716 goto redo;
4717 case TOK_RESTRICT1:
4718 case TOK_RESTRICT2:
4719 case TOK_RESTRICT3:
4720 goto redo;
4721 /* XXX: clarify attribute handling */
4722 case TOK_ATTRIBUTE1:
4723 case TOK_ATTRIBUTE2:
4724 parse_attribute(ad);
4725 break;
4727 mk_pointer(type);
4728 type->t |= qualifiers;
4729 if (ret == type)
4730 /* innermost pointed to type is the one for the first derivation */
4731 ret = pointed_type(type);
4734 if (tok == '(') {
4735 /* This is possibly a parameter type list for abstract declarators
4736 ('int ()'), use post_type for testing this. */
4737 if (!post_type(type, ad, 0, td)) {
4738 /* It's not, so it's a nested declarator, and the post operations
4739 apply to the innermost pointed to type (if any). */
4740 /* XXX: this is not correct to modify 'ad' at this point, but
4741 the syntax is not clear */
4742 parse_attribute(ad);
4743 post = type_decl(type, ad, v, td);
4744 skip(')');
4745 } else
4746 goto abstract;
4747 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4748 /* type identifier */
4749 *v = tok;
4750 next();
4751 } else {
4752 abstract:
4753 if (!(td & TYPE_ABSTRACT))
4754 expect("identifier");
4755 *v = 0;
4757 post_type(post, ad, storage, 0);
4758 parse_attribute(ad);
4759 type->t |= storage;
4760 return ret;
4763 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4764 ST_FUNC int lvalue_type(int t)
4766 int bt, r;
4767 r = VT_LVAL;
4768 bt = t & VT_BTYPE;
4769 if (bt == VT_BYTE || bt == VT_BOOL)
4770 r |= VT_LVAL_BYTE;
4771 else if (bt == VT_SHORT)
4772 r |= VT_LVAL_SHORT;
4773 else
4774 return r;
4775 if (t & VT_UNSIGNED)
4776 r |= VT_LVAL_UNSIGNED;
4777 return r;
4780 /* indirection with full error checking and bound check */
4781 ST_FUNC void indir(void)
4783 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4784 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4785 return;
4786 expect("pointer");
4788 if (vtop->r & VT_LVAL)
4789 gv(RC_INT);
4790 vtop->type = *pointed_type(&vtop->type);
4791 /* Arrays and functions are never lvalues */
4792 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4793 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4794 vtop->r |= lvalue_type(vtop->type.t);
4795 /* if bound checking, the referenced pointer must be checked */
4796 #ifdef CONFIG_TCC_BCHECK
4797 if (tcc_state->do_bounds_check)
4798 vtop->r |= VT_MUSTBOUND;
4799 #endif
4803 /* pass a parameter to a function and do type checking and casting */
4804 static void gfunc_param_typed(Sym *func, Sym *arg)
4806 int func_type;
4807 CType type;
4809 func_type = func->f.func_type;
4810 if (func_type == FUNC_OLD ||
4811 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4812 /* default casting : only need to convert float to double */
4813 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4814 gen_cast_s(VT_DOUBLE);
4815 } else if (vtop->type.t & VT_BITFIELD) {
4816 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4817 type.ref = vtop->type.ref;
4818 gen_cast(&type);
4820 } else if (arg == NULL) {
4821 tcc_error("too many arguments to function");
4822 } else {
4823 type = arg->type;
4824 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4825 gen_assign_cast(&type);
4829 /* parse an expression and return its type without any side effect. */
4830 static void expr_type(CType *type, void (*expr_fn)(void))
4832 nocode_wanted++;
4833 expr_fn();
4834 *type = vtop->type;
4835 vpop();
4836 nocode_wanted--;
4839 /* parse an expression of the form '(type)' or '(expr)' and return its
4840 type */
4841 static void parse_expr_type(CType *type)
4843 int n;
4844 AttributeDef ad;
4846 skip('(');
4847 if (parse_btype(type, &ad)) {
4848 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4849 } else {
4850 expr_type(type, gexpr);
4852 skip(')');
4855 static void parse_type(CType *type)
4857 AttributeDef ad;
4858 int n;
4860 if (!parse_btype(type, &ad)) {
4861 expect("type");
4863 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4866 static void parse_builtin_params(int nc, const char *args)
4868 char c, sep = '(';
4869 CType t;
4870 if (nc)
4871 nocode_wanted++;
4872 next();
4873 while ((c = *args++)) {
4874 skip(sep);
4875 sep = ',';
4876 switch (c) {
4877 case 'e': expr_eq(); continue;
4878 case 't': parse_type(&t); vpush(&t); continue;
4879 default: tcc_error("internal error"); break;
4882 skip(')');
4883 if (nc)
4884 nocode_wanted--;
4887 ST_FUNC void unary(void)
4889 int n, t, align, size, r, sizeof_caller;
4890 CType type;
4891 Sym *s;
4892 AttributeDef ad;
4894 sizeof_caller = in_sizeof;
4895 in_sizeof = 0;
4896 type.ref = NULL;
4897 /* XXX: GCC 2.95.3 does not generate a table although it should be
4898 better here */
4899 tok_next:
4900 switch(tok) {
4901 case TOK_EXTENSION:
4902 next();
4903 goto tok_next;
4904 case TOK_LCHAR:
4905 #ifdef TCC_TARGET_PE
4906 t = VT_SHORT|VT_UNSIGNED;
4907 goto push_tokc;
4908 #endif
4909 case TOK_CINT:
4910 case TOK_CCHAR:
4911 t = VT_INT;
4912 push_tokc:
4913 type.t = t;
4914 vsetc(&type, VT_CONST, &tokc);
4915 next();
4916 break;
4917 case TOK_CUINT:
4918 t = VT_INT | VT_UNSIGNED;
4919 goto push_tokc;
4920 case TOK_CLLONG:
4921 t = VT_LLONG;
4922 goto push_tokc;
4923 case TOK_CULLONG:
4924 t = VT_LLONG | VT_UNSIGNED;
4925 goto push_tokc;
4926 case TOK_CFLOAT:
4927 t = VT_FLOAT;
4928 goto push_tokc;
4929 case TOK_CDOUBLE:
4930 t = VT_DOUBLE;
4931 goto push_tokc;
4932 case TOK_CLDOUBLE:
4933 t = VT_LDOUBLE;
4934 goto push_tokc;
4935 case TOK_CLONG:
4936 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4937 goto push_tokc;
4938 case TOK_CULONG:
4939 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4940 goto push_tokc;
4941 case TOK___FUNCTION__:
4942 if (!gnu_ext)
4943 goto tok_identifier;
4944 /* fall thru */
4945 case TOK___FUNC__:
4947 void *ptr;
4948 int len;
4949 /* special function name identifier */
4950 len = strlen(funcname) + 1;
4951 /* generate char[len] type */
4952 type.t = VT_BYTE;
4953 mk_pointer(&type);
4954 type.t |= VT_ARRAY;
4955 type.ref->c = len;
4956 vpush_ref(&type, data_section, data_section->data_offset, len);
4957 if (!NODATA_WANTED) {
4958 ptr = section_ptr_add(data_section, len);
4959 memcpy(ptr, funcname, len);
4961 next();
4963 break;
4964 case TOK_LSTR:
4965 #ifdef TCC_TARGET_PE
4966 t = VT_SHORT | VT_UNSIGNED;
4967 #else
4968 t = VT_INT;
4969 #endif
4970 goto str_init;
4971 case TOK_STR:
4972 /* string parsing */
4973 t = VT_BYTE;
4974 if (tcc_state->char_is_unsigned)
4975 t = VT_BYTE | VT_UNSIGNED;
4976 str_init:
4977 if (tcc_state->warn_write_strings)
4978 t |= VT_CONSTANT;
4979 type.t = t;
4980 mk_pointer(&type);
4981 type.t |= VT_ARRAY;
4982 memset(&ad, 0, sizeof(AttributeDef));
4983 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4984 break;
4985 case '(':
4986 next();
4987 /* cast ? */
4988 if (parse_btype(&type, &ad)) {
4989 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4990 skip(')');
4991 /* check ISOC99 compound literal */
4992 if (tok == '{') {
4993 /* data is allocated locally by default */
4994 if (global_expr)
4995 r = VT_CONST;
4996 else
4997 r = VT_LOCAL;
4998 /* all except arrays are lvalues */
4999 if (!(type.t & VT_ARRAY))
5000 r |= lvalue_type(type.t);
5001 memset(&ad, 0, sizeof(AttributeDef));
5002 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5003 } else {
5004 if (sizeof_caller) {
5005 vpush(&type);
5006 return;
5008 unary();
5009 gen_cast(&type);
5011 } else if (tok == '{') {
5012 int saved_nocode_wanted = nocode_wanted;
5013 if (const_wanted)
5014 tcc_error("expected constant");
5015 /* save all registers */
5016 save_regs(0);
5017 /* statement expression : we do not accept break/continue
5018 inside as GCC does. We do retain the nocode_wanted state,
5019 as statement expressions can't ever be entered from the
5020 outside, so any reactivation of code emission (from labels
5021 or loop heads) can be disabled again after the end of it. */
5022 block(1);
5023 nocode_wanted = saved_nocode_wanted;
5024 skip(')');
5025 } else {
5026 gexpr();
5027 skip(')');
5029 break;
5030 case '*':
5031 next();
5032 unary();
5033 indir();
5034 break;
5035 case '&':
5036 next();
5037 unary();
5038 /* functions names must be treated as function pointers,
5039 except for unary '&' and sizeof. Since we consider that
5040 functions are not lvalues, we only have to handle it
5041 there and in function calls. */
5042 /* arrays can also be used although they are not lvalues */
5043 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5044 !(vtop->type.t & VT_ARRAY))
5045 test_lvalue();
5046 mk_pointer(&vtop->type);
5047 gaddrof();
5048 break;
5049 case '!':
5050 next();
5051 unary();
5052 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5053 gen_cast_s(VT_BOOL);
5054 vtop->c.i = !vtop->c.i;
5055 } else if (vtop->r == VT_CMP) {
5056 vtop->cmp_op ^= 1;
5057 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5058 } else {
5059 vpushi(0);
5060 gen_op(TOK_EQ);
5062 break;
5063 case '~':
5064 next();
5065 unary();
5066 vpushi(-1);
5067 gen_op('^');
5068 break;
5069 case '+':
5070 next();
5071 unary();
5072 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5073 tcc_error("pointer not accepted for unary plus");
5074 /* In order to force cast, we add zero, except for floating point
5075 where we really need an noop (otherwise -0.0 will be transformed
5076 into +0.0). */
5077 if (!is_float(vtop->type.t)) {
5078 vpushi(0);
5079 gen_op('+');
5081 break;
5082 case TOK_SIZEOF:
5083 case TOK_ALIGNOF1:
5084 case TOK_ALIGNOF2:
5085 case TOK_ALIGNOF3:
5086 t = tok;
5087 next();
5088 in_sizeof++;
5089 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5090 s = NULL;
5091 if (vtop[1].r & VT_SYM)
5092 s = vtop[1].sym; /* hack: accessing previous vtop */
5093 size = type_size(&type, &align);
5094 if (s && s->a.aligned)
5095 align = 1 << (s->a.aligned - 1);
5096 if (t == TOK_SIZEOF) {
5097 if (!(type.t & VT_VLA)) {
5098 if (size < 0)
5099 tcc_error("sizeof applied to an incomplete type");
5100 vpushs(size);
5101 } else {
5102 vla_runtime_type_size(&type, &align);
5104 } else {
5105 vpushs(align);
5107 vtop->type.t |= VT_UNSIGNED;
5108 break;
5110 case TOK_builtin_expect:
5111 /* __builtin_expect is a no-op for now */
5112 parse_builtin_params(0, "ee");
5113 vpop();
5114 break;
5115 case TOK_builtin_types_compatible_p:
5116 parse_builtin_params(0, "tt");
5117 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5118 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5119 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5120 vtop -= 2;
5121 vpushi(n);
5122 break;
5123 case TOK_builtin_choose_expr:
5125 int64_t c;
5126 next();
5127 skip('(');
5128 c = expr_const64();
5129 skip(',');
5130 if (!c) {
5131 nocode_wanted++;
5133 expr_eq();
5134 if (!c) {
5135 vpop();
5136 nocode_wanted--;
5138 skip(',');
5139 if (c) {
5140 nocode_wanted++;
5142 expr_eq();
5143 if (c) {
5144 vpop();
5145 nocode_wanted--;
5147 skip(')');
5149 break;
5150 case TOK_builtin_constant_p:
5151 parse_builtin_params(1, "e");
5152 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5153 vtop--;
5154 vpushi(n);
5155 break;
5156 case TOK_builtin_frame_address:
5157 case TOK_builtin_return_address:
5159 int tok1 = tok;
5160 int level;
5161 next();
5162 skip('(');
5163 if (tok != TOK_CINT) {
5164 tcc_error("%s only takes positive integers",
5165 tok1 == TOK_builtin_return_address ?
5166 "__builtin_return_address" :
5167 "__builtin_frame_address");
5169 level = (uint32_t)tokc.i;
5170 next();
5171 skip(')');
5172 type.t = VT_VOID;
5173 mk_pointer(&type);
5174 vset(&type, VT_LOCAL, 0); /* local frame */
5175 while (level--) {
5176 mk_pointer(&vtop->type);
5177 indir(); /* -> parent frame */
5179 if (tok1 == TOK_builtin_return_address) {
5180 // assume return address is just above frame pointer on stack
5181 vpushi(PTR_SIZE);
5182 gen_op('+');
5183 mk_pointer(&vtop->type);
5184 indir();
5187 break;
5188 #ifdef TCC_TARGET_RISCV64
5189 case TOK_builtin_va_start:
5190 parse_builtin_params(0, "ee");
5191 r = vtop->r & VT_VALMASK;
5192 if (r == VT_LLOCAL)
5193 r = VT_LOCAL;
5194 if (r != VT_LOCAL)
5195 tcc_error("__builtin_va_start expects a local variable");
5196 gen_va_start();
5197 vstore();
5198 break;
5199 #endif
5200 #ifdef TCC_TARGET_X86_64
5201 #ifdef TCC_TARGET_PE
5202 case TOK_builtin_va_start:
5203 parse_builtin_params(0, "ee");
5204 r = vtop->r & VT_VALMASK;
5205 if (r == VT_LLOCAL)
5206 r = VT_LOCAL;
5207 if (r != VT_LOCAL)
5208 tcc_error("__builtin_va_start expects a local variable");
5209 vtop->r = r;
5210 vtop->type = char_pointer_type;
5211 vtop->c.i += 8;
5212 vstore();
5213 break;
5214 #else
5215 case TOK_builtin_va_arg_types:
5216 parse_builtin_params(0, "t");
5217 vpushi(classify_x86_64_va_arg(&vtop->type));
5218 vswap();
5219 vpop();
5220 break;
5221 #endif
5222 #endif
5224 #ifdef TCC_TARGET_ARM64
5225 case TOK___va_start: {
5226 parse_builtin_params(0, "ee");
5227 //xx check types
5228 gen_va_start();
5229 vpushi(0);
5230 vtop->type.t = VT_VOID;
5231 break;
5233 case TOK___va_arg: {
5234 parse_builtin_params(0, "et");
5235 type = vtop->type;
5236 vpop();
5237 //xx check types
5238 gen_va_arg(&type);
5239 vtop->type = type;
5240 break;
5242 case TOK___arm64_clear_cache: {
5243 parse_builtin_params(0, "ee");
5244 gen_clear_cache();
5245 vpushi(0);
5246 vtop->type.t = VT_VOID;
5247 break;
5249 #endif
5250 /* pre operations */
5251 case TOK_INC:
5252 case TOK_DEC:
5253 t = tok;
5254 next();
5255 unary();
5256 inc(0, t);
5257 break;
5258 case '-':
5259 next();
5260 unary();
5261 t = vtop->type.t & VT_BTYPE;
5262 if (is_float(t)) {
5263 /* In IEEE negate(x) isn't subtract(0,x), but rather
5264 subtract(-0, x). */
5265 vpush(&vtop->type);
5266 if (t == VT_FLOAT)
5267 vtop->c.f = -1.0 * 0.0;
5268 else if (t == VT_DOUBLE)
5269 vtop->c.d = -1.0 * 0.0;
5270 else
5271 vtop->c.ld = -1.0 * 0.0;
5272 } else
5273 vpushi(0);
5274 vswap();
5275 gen_op('-');
5276 break;
5277 case TOK_LAND:
5278 if (!gnu_ext)
5279 goto tok_identifier;
5280 next();
5281 /* allow to take the address of a label */
5282 if (tok < TOK_UIDENT)
5283 expect("label identifier");
5284 s = label_find(tok);
5285 if (!s) {
5286 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5287 } else {
5288 if (s->r == LABEL_DECLARED)
5289 s->r = LABEL_FORWARD;
5291 if (!s->type.t) {
5292 s->type.t = VT_VOID;
5293 mk_pointer(&s->type);
5294 s->type.t |= VT_STATIC;
5296 vpushsym(&s->type, s);
5297 next();
5298 break;
5300 case TOK_GENERIC:
5302 CType controlling_type;
5303 int has_default = 0;
5304 int has_match = 0;
5305 int learn = 0;
5306 TokenString *str = NULL;
5307 int saved_const_wanted = const_wanted;
5309 next();
5310 skip('(');
5311 const_wanted = 0;
5312 expr_type(&controlling_type, expr_eq);
5313 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5314 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5315 mk_pointer(&controlling_type);
5316 const_wanted = saved_const_wanted;
5317 for (;;) {
5318 learn = 0;
5319 skip(',');
5320 if (tok == TOK_DEFAULT) {
5321 if (has_default)
5322 tcc_error("too many 'default'");
5323 has_default = 1;
5324 if (!has_match)
5325 learn = 1;
5326 next();
5327 } else {
5328 AttributeDef ad_tmp;
5329 int itmp;
5330 CType cur_type;
5332 in_generic++;
5333 parse_btype(&cur_type, &ad_tmp);
5334 in_generic--;
5336 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5337 if (compare_types(&controlling_type, &cur_type, 0)) {
5338 if (has_match) {
5339 tcc_error("type match twice");
5341 has_match = 1;
5342 learn = 1;
5345 skip(':');
5346 if (learn) {
5347 if (str)
5348 tok_str_free(str);
5349 skip_or_save_block(&str);
5350 } else {
5351 skip_or_save_block(NULL);
5353 if (tok == ')')
5354 break;
5356 if (!str) {
5357 char buf[60];
5358 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5359 tcc_error("type '%s' does not match any association", buf);
5361 begin_macro(str, 1);
5362 next();
5363 expr_eq();
5364 if (tok != TOK_EOF)
5365 expect(",");
5366 end_macro();
5367 next();
5368 break;
5370 // special qnan , snan and infinity values
5371 case TOK___NAN__:
5372 n = 0x7fc00000;
5373 special_math_val:
5374 vpushi(n);
5375 vtop->type.t = VT_FLOAT;
5376 next();
5377 break;
5378 case TOK___SNAN__:
5379 n = 0x7f800001;
5380 goto special_math_val;
5381 case TOK___INF__:
5382 n = 0x7f800000;
5383 goto special_math_val;
5385 default:
5386 tok_identifier:
5387 t = tok;
5388 next();
5389 if (t < TOK_UIDENT)
5390 expect("identifier");
5391 s = sym_find(t);
5392 if (!s || IS_ASM_SYM(s)) {
5393 const char *name = get_tok_str(t, NULL);
5394 if (tok != '(')
5395 tcc_error("'%s' undeclared", name);
5396 /* for simple function calls, we tolerate undeclared
5397 external reference to int() function */
5398 if (tcc_state->warn_implicit_function_declaration
5399 #ifdef TCC_TARGET_PE
5400 /* people must be warned about using undeclared WINAPI functions
5401 (which usually start with uppercase letter) */
5402 || (name[0] >= 'A' && name[0] <= 'Z')
5403 #endif
5405 tcc_warning("implicit declaration of function '%s'", name);
5406 s = external_global_sym(t, &func_old_type);
5409 r = s->r;
5410 /* A symbol that has a register is a local register variable,
5411 which starts out as VT_LOCAL value. */
5412 if ((r & VT_VALMASK) < VT_CONST)
5413 r = (r & ~VT_VALMASK) | VT_LOCAL;
5415 vset(&s->type, r, s->c);
5416 /* Point to s as backpointer (even without r&VT_SYM).
5417 Will be used by at least the x86 inline asm parser for
5418 regvars. */
5419 vtop->sym = s;
5421 if (r & VT_SYM) {
5422 vtop->c.i = 0;
5423 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5424 vtop->c.i = s->enum_val;
5426 break;
5429 /* post operations */
5430 while (1) {
5431 if (tok == TOK_INC || tok == TOK_DEC) {
5432 inc(1, tok);
5433 next();
5434 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5435 int qualifiers, cumofs = 0;
5436 /* field */
5437 if (tok == TOK_ARROW)
5438 indir();
5439 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5440 test_lvalue();
5441 gaddrof();
5442 /* expect pointer on structure */
5443 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5444 expect("struct or union");
5445 if (tok == TOK_CDOUBLE)
5446 expect("field name");
5447 next();
5448 if (tok == TOK_CINT || tok == TOK_CUINT)
5449 expect("field name");
5450 s = find_field(&vtop->type, tok, &cumofs);
5451 if (!s)
5452 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5453 /* add field offset to pointer */
5454 vtop->type = char_pointer_type; /* change type to 'char *' */
5455 vpushi(cumofs + s->c);
5456 gen_op('+');
5457 /* change type to field type, and set to lvalue */
5458 vtop->type = s->type;
5459 vtop->type.t |= qualifiers;
5460 /* an array is never an lvalue */
5461 if (!(vtop->type.t & VT_ARRAY)) {
5462 vtop->r |= lvalue_type(vtop->type.t);
5463 #ifdef CONFIG_TCC_BCHECK
5464 /* if bound checking, the referenced pointer must be checked */
5465 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5466 vtop->r |= VT_MUSTBOUND;
5467 #endif
5469 next();
5470 } else if (tok == '[') {
5471 next();
5472 gexpr();
5473 gen_op('+');
5474 indir();
5475 skip(']');
5476 } else if (tok == '(') {
5477 SValue ret;
5478 Sym *sa;
5479 int nb_args, ret_nregs, ret_align, regsize, variadic;
5481 /* function call */
5482 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5483 /* pointer test (no array accepted) */
5484 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5485 vtop->type = *pointed_type(&vtop->type);
5486 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5487 goto error_func;
5488 } else {
5489 error_func:
5490 expect("function pointer");
5492 } else {
5493 vtop->r &= ~VT_LVAL; /* no lvalue */
5495 /* get return type */
5496 s = vtop->type.ref;
5497 next();
5498 sa = s->next; /* first parameter */
5499 nb_args = regsize = 0;
5500 ret.r2 = VT_CONST;
5501 /* compute first implicit argument if a structure is returned */
5502 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5503 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5504 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5505 &ret_align, &regsize);
5506 if (ret_nregs <= 0) {
5507 /* get some space for the returned structure */
5508 size = type_size(&s->type, &align);
5509 #ifdef TCC_TARGET_ARM64
5510 /* On arm64, a small struct is return in registers.
5511 It is much easier to write it to memory if we know
5512 that we are allowed to write some extra bytes, so
5513 round the allocated space up to a power of 2: */
5514 if (size < 16)
5515 while (size & (size - 1))
5516 size = (size | (size - 1)) + 1;
5517 #endif
5518 loc = (loc - size) & -align;
5519 ret.type = s->type;
5520 ret.r = VT_LOCAL | VT_LVAL;
5521 /* pass it as 'int' to avoid structure arg passing
5522 problems */
5523 vseti(VT_LOCAL, loc);
5524 ret.c = vtop->c;
5525 if (ret_nregs < 0)
5526 vtop--;
5527 else
5528 nb_args++;
5530 } else {
5531 ret_nregs = 1;
5532 ret.type = s->type;
5535 if (ret_nregs > 0) {
5536 /* return in register */
5537 if (is_float(ret.type.t)) {
5538 ret.r = reg_fret(ret.type.t);
5539 #ifdef TCC_TARGET_X86_64
5540 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5541 ret.r2 = REG_QRET;
5542 #elif defined TCC_TARGET_RISCV64
5543 if ((ret.type.t & VT_BTYPE) == VT_LDOUBLE)
5544 ret.r2 = ret.r + 1;
5545 #endif
5546 } else {
5547 #ifndef TCC_TARGET_ARM64
5548 #ifndef TCC_TARGET_RISCV64
5549 #ifdef TCC_TARGET_X86_64
5550 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5551 #else
5552 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5553 #endif
5554 ret.r2 = REG_LRET;
5555 #endif
5556 #endif
5557 ret.r = REG_IRET;
5559 ret.c.i = 0;
5561 if (tok != ')') {
5562 for(;;) {
5563 expr_eq();
5564 gfunc_param_typed(s, sa);
5565 nb_args++;
5566 if (sa)
5567 sa = sa->next;
5568 if (tok == ')')
5569 break;
5570 skip(',');
5573 if (sa)
5574 tcc_error("too few arguments to function");
5575 skip(')');
5576 gfunc_call(nb_args);
5578 if (ret_nregs < 0) {
5579 vsetc(&ret.type, ret.r, &ret.c);
5580 #ifdef TCC_TARGET_RISCV64
5581 arch_transfer_ret_regs(1);
5582 #endif
5583 } else {
5584 /* return value */
5585 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5586 vsetc(&ret.type, r, &ret.c);
5587 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5590 /* handle packed struct return */
5591 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5592 int addr, offset;
5594 size = type_size(&s->type, &align);
5595 /* We're writing whole regs often, make sure there's enough
5596 space. Assume register size is power of 2. */
5597 if (regsize > align)
5598 align = regsize;
5599 loc = (loc - size) & -align;
5600 addr = loc;
5601 offset = 0;
5602 for (;;) {
5603 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5604 vswap();
5605 vstore();
5606 vtop--;
5607 if (--ret_nregs == 0)
5608 break;
5609 offset += regsize;
5611 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5614 if (s->f.func_noreturn)
5615 CODE_OFF();
5616 } else {
5617 break;
5622 ST_FUNC void expr_prod(void)
5624 int t;
5626 unary();
5627 while (tok == '*' || tok == '/' || tok == '%') {
5628 t = tok;
5629 next();
5630 unary();
5631 gen_op(t);
5635 ST_FUNC void expr_sum(void)
5637 int t;
5639 expr_prod();
5640 while (tok == '+' || tok == '-') {
5641 t = tok;
5642 next();
5643 expr_prod();
5644 gen_op(t);
5648 static void expr_shift(void)
5650 int t;
5652 expr_sum();
5653 while (tok == TOK_SHL || tok == TOK_SAR) {
5654 t = tok;
5655 next();
5656 expr_sum();
5657 gen_op(t);
5661 static void expr_cmp(void)
5663 int t;
5665 expr_shift();
5666 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5667 tok == TOK_ULT || tok == TOK_UGE) {
5668 t = tok;
5669 next();
5670 expr_shift();
5671 gen_op(t);
5675 static void expr_cmpeq(void)
5677 int t;
5679 expr_cmp();
5680 while (tok == TOK_EQ || tok == TOK_NE) {
5681 t = tok;
5682 next();
5683 expr_cmp();
5684 gen_op(t);
5688 static void expr_and(void)
5690 expr_cmpeq();
5691 while (tok == '&') {
5692 next();
5693 expr_cmpeq();
5694 gen_op('&');
5698 static void expr_xor(void)
5700 expr_and();
5701 while (tok == '^') {
5702 next();
5703 expr_and();
5704 gen_op('^');
5708 static void expr_or(void)
5710 expr_xor();
5711 while (tok == '|') {
5712 next();
5713 expr_xor();
5714 gen_op('|');
5718 static int condition_3way(void);
5720 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5722 int t = 0, cc = 1, f = 0, c;
5723 for(;;) {
5724 c = f ? i : condition_3way();
5725 if (c < 0) {
5726 save_regs(1), cc = 0;
5727 } else if (c != i) {
5728 nocode_wanted++, f = 1;
5730 if (tok != e_op) {
5731 if (cc || f) {
5732 vpop();
5733 vpushi(i ^ f);
5734 gsym(t);
5735 nocode_wanted -= f;
5736 } else {
5737 gvtst_set(i, t);
5739 break;
5741 if (c < 0)
5742 t = gvtst(i, t);
5743 else
5744 vpop();
5745 next();
5746 e_fn();
5750 static void expr_land(void)
5752 expr_or();
5753 if (tok == TOK_LAND)
5754 expr_landor(expr_or, TOK_LAND, 1);
5757 static void expr_lor(void)
5759 expr_land();
5760 if (tok == TOK_LOR)
5761 expr_landor(expr_land, TOK_LOR, 0);
5764 /* Assuming vtop is a value used in a conditional context
5765 (i.e. compared with zero) return 0 if it's false, 1 if
5766 true and -1 if it can't be statically determined. */
5767 static int condition_3way(void)
5769 int c = -1;
5770 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5771 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5772 vdup();
5773 gen_cast_s(VT_BOOL);
5774 c = vtop->c.i;
5775 vpop();
5777 return c;
5780 static int is_cond_bool(SValue *sv)
5782 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5783 && (sv->type.t & VT_BTYPE) == VT_INT)
5784 return (unsigned)sv->c.i < 2;
5785 if (sv->r == VT_CMP)
5786 return 1;
5787 return 0;
5790 static void expr_cond(void)
5792 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5793 SValue sv;
5794 CType type, type1, type2;
5795 int ncw_prev;
5797 expr_lor();
5798 if (tok == '?') {
5799 next();
5800 c = condition_3way();
5801 g = (tok == ':' && gnu_ext);
5802 tt = 0;
5803 if (!g) {
5804 if (c < 0) {
5805 save_regs(1);
5806 tt = gvtst(1, 0);
5807 } else {
5808 vpop();
5810 } else if (c < 0) {
5811 /* needed to avoid having different registers saved in
5812 each branch */
5813 save_regs(1);
5814 gv_dup();
5815 tt = gvtst(0, 0);
5818 ncw_prev = nocode_wanted;
5819 if (1) {
5820 if (c == 0)
5821 nocode_wanted++;
5822 if (!g)
5823 gexpr();
5825 if (c < 0 && vtop->r == VT_CMP) {
5826 t1 = gvtst(0, 0);
5827 vpushi(0);
5828 gvtst_set(0, t1);
5831 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5832 mk_pointer(&vtop->type);
5833 type1 = vtop->type;
5834 sv = *vtop; /* save value to handle it later */
5835 vtop--; /* no vpop so that FP stack is not flushed */
5837 if (g) {
5838 u = tt;
5839 } else if (c < 0) {
5840 u = gjmp(0);
5841 gsym(tt);
5842 } else
5843 u = 0;
5845 nocode_wanted = ncw_prev;
5846 if (c == 1)
5847 nocode_wanted++;
5848 skip(':');
5849 expr_cond();
5851 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5852 if (sv.r == VT_CMP) {
5853 t1 = sv.jtrue;
5854 t2 = u;
5855 } else {
5856 t1 = gvtst(0, 0);
5857 t2 = gjmp(0);
5858 gsym(u);
5859 vpushv(&sv);
5861 gvtst_set(0, t1);
5862 gvtst_set(1, t2);
5863 nocode_wanted = ncw_prev;
5864 // tcc_warning("two conditions expr_cond");
5865 return;
5868 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5869 mk_pointer(&vtop->type);
5870 type2=vtop->type;
5871 t1 = type1.t;
5872 bt1 = t1 & VT_BTYPE;
5873 t2 = type2.t;
5874 bt2 = t2 & VT_BTYPE;
5875 type.ref = NULL;
5877 /* cast operands to correct type according to ISOC rules */
5878 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5879 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5880 } else if (is_float(bt1) || is_float(bt2)) {
5881 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5882 type.t = VT_LDOUBLE;
5884 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5885 type.t = VT_DOUBLE;
5886 } else {
5887 type.t = VT_FLOAT;
5889 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5890 /* cast to biggest op */
5891 type.t = VT_LLONG | VT_LONG;
5892 if (bt1 == VT_LLONG)
5893 type.t &= t1;
5894 if (bt2 == VT_LLONG)
5895 type.t &= t2;
5896 /* convert to unsigned if it does not fit in a long long */
5897 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5898 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5899 type.t |= VT_UNSIGNED;
5900 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5901 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5902 /* If one is a null ptr constant the result type
5903 is the other. */
5904 if (is_null_pointer (vtop)) type = type1;
5905 else if (is_null_pointer (&sv)) type = type2;
5906 else if (bt1 != bt2)
5907 tcc_error("incompatible types in conditional expressions");
5908 else {
5909 CType *pt1 = pointed_type(&type1);
5910 CType *pt2 = pointed_type(&type2);
5911 int pbt1 = pt1->t & VT_BTYPE;
5912 int pbt2 = pt2->t & VT_BTYPE;
5913 int newquals, copied = 0;
5914 /* pointers to void get preferred, otherwise the
5915 pointed to types minus qualifs should be compatible */
5916 type = (pbt1 == VT_VOID) ? type1 : type2;
5917 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5918 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5919 tcc_warning("pointer type mismatch in conditional expression\n");
5921 /* combine qualifs */
5922 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5923 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5924 & newquals)
5926 /* copy the pointer target symbol */
5927 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5928 0, type.ref->c);
5929 copied = 1;
5930 pointed_type(&type)->t |= newquals;
5932 /* pointers to incomplete arrays get converted to
5933 pointers to completed ones if possible */
5934 if (pt1->t & VT_ARRAY
5935 && pt2->t & VT_ARRAY
5936 && pointed_type(&type)->ref->c < 0
5937 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5939 if (!copied)
5940 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5941 0, type.ref->c);
5942 pointed_type(&type)->ref =
5943 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5944 0, pointed_type(&type)->ref->c);
5945 pointed_type(&type)->ref->c =
5946 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5949 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5950 /* XXX: test structure compatibility */
5951 type = bt1 == VT_STRUCT ? type1 : type2;
5952 } else {
5953 /* integer operations */
5954 type.t = VT_INT | (VT_LONG & (t1 | t2));
5955 /* convert to unsigned if it does not fit in an integer */
5956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5958 type.t |= VT_UNSIGNED;
5960 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5961 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5962 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5964 /* now we convert second operand */
5965 if (c != 1) {
5966 gen_cast(&type);
5967 if (islv) {
5968 mk_pointer(&vtop->type);
5969 gaddrof();
5970 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5971 gaddrof();
5974 rc = RC_INT;
5975 if (is_float(type.t)) {
5976 rc = RC_FLOAT;
5977 #ifdef TCC_TARGET_X86_64
5978 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5979 rc = RC_ST0;
5981 #elif defined TCC_TARGET_RISCV64
5982 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)
5983 rc = RC_INT;
5984 #endif
5985 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5986 /* for long longs, we use fixed registers to avoid having
5987 to handle a complicated move */
5988 rc = RC_IRET;
5991 tt = r2 = 0;
5992 if (c < 0) {
5993 r2 = gv(rc);
5994 tt = gjmp(0);
5996 gsym(u);
5997 nocode_wanted = ncw_prev;
5999 /* this is horrible, but we must also convert first
6000 operand */
6001 if (c != 0) {
6002 *vtop = sv;
6003 gen_cast(&type);
6004 if (islv) {
6005 mk_pointer(&vtop->type);
6006 gaddrof();
6007 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6008 gaddrof();
6011 if (c < 0) {
6012 r1 = gv(rc);
6013 move_reg(r2, r1, islv ? VT_PTR : type.t);
6014 vtop->r = r2;
6015 gsym(tt);
6018 if (islv)
6019 indir();
6024 static void expr_eq(void)
6026 int t;
6028 expr_cond();
6029 if (tok == '=' ||
6030 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6031 tok == TOK_A_XOR || tok == TOK_A_OR ||
6032 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6033 test_lvalue();
6034 t = tok;
6035 next();
6036 if (t == '=') {
6037 expr_eq();
6038 } else {
6039 vdup();
6040 expr_eq();
6041 gen_op(t & 0x7f);
6043 vstore();
6047 ST_FUNC void gexpr(void)
6049 while (1) {
6050 expr_eq();
6051 if (tok != ',')
6052 break;
6053 vpop();
6054 next();
6058 /* parse a constant expression and return value in vtop. */
6059 static void expr_const1(void)
6061 const_wanted++;
6062 nocode_wanted++;
6063 expr_cond();
6064 nocode_wanted--;
6065 const_wanted--;
6068 /* parse an integer constant and return its value. */
6069 static inline int64_t expr_const64(void)
6071 int64_t c;
6072 expr_const1();
6073 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6074 expect("constant expression");
6075 c = vtop->c.i;
6076 vpop();
6077 return c;
6080 /* parse an integer constant and return its value.
6081 Complain if it doesn't fit 32bit (signed or unsigned). */
6082 ST_FUNC int expr_const(void)
6084 int c;
6085 int64_t wc = expr_const64();
6086 c = wc;
6087 if (c != wc && (unsigned)c != wc)
6088 tcc_error("constant exceeds 32 bit");
6089 return c;
6092 /* ------------------------------------------------------------------------- */
6093 /* return from function */
6095 #ifndef TCC_TARGET_ARM64
6096 static void gfunc_return(CType *func_type)
6098 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6099 CType type, ret_type;
6100 int ret_align, ret_nregs, regsize;
6101 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6102 &ret_align, &regsize);
6103 if (ret_nregs < 0) {
6104 #ifdef TCC_TARGET_RISCV64
6105 arch_transfer_ret_regs(0);
6106 #endif
6107 } else if (0 == ret_nregs) {
6108 /* if returning structure, must copy it to implicit
6109 first pointer arg location */
6110 type = *func_type;
6111 mk_pointer(&type);
6112 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6113 indir();
6114 vswap();
6115 /* copy structure value to pointer */
6116 vstore();
6117 } else {
6118 /* returning structure packed into registers */
6119 int r, size, addr, align;
6120 size = type_size(func_type,&align);
6121 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6122 (vtop->c.i & (ret_align-1)))
6123 && (align & (ret_align-1))) {
6124 loc = (loc - size) & -ret_align;
6125 addr = loc;
6126 type = *func_type;
6127 vset(&type, VT_LOCAL | VT_LVAL, addr);
6128 vswap();
6129 vstore();
6130 vpop();
6131 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6133 vtop->type = ret_type;
6134 if (is_float(ret_type.t))
6135 r = rc_fret(ret_type.t);
6136 else
6137 r = RC_IRET;
6139 if (ret_nregs == 1)
6140 gv(r);
6141 else {
6142 for (;;) {
6143 vdup();
6144 gv(r);
6145 vpop();
6146 if (--ret_nregs == 0)
6147 break;
6148 /* We assume that when a structure is returned in multiple
6149 registers, their classes are consecutive values of the
6150 suite s(n) = 2^n */
6151 r <<= 1;
6152 vtop->c.i += regsize;
6156 } else if (is_float(func_type->t)) {
6157 gv(rc_fret(func_type->t));
6158 } else {
6159 gv(RC_IRET);
6161 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6163 #endif
6165 static void check_func_return(void)
6167 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6168 return;
6169 if (!strcmp (funcname, "main")
6170 && (func_vt.t & VT_BTYPE) == VT_INT) {
6171 /* main returns 0 by default */
6172 vpushi(0);
6173 gen_assign_cast(&func_vt);
6174 gfunc_return(&func_vt);
6175 } else {
6176 tcc_warning("function might return no value: '%s'", funcname);
6180 /* ------------------------------------------------------------------------- */
6181 /* switch/case */
6183 static int case_cmp(const void *pa, const void *pb)
6185 int64_t a = (*(struct case_t**) pa)->v1;
6186 int64_t b = (*(struct case_t**) pb)->v1;
6187 return a < b ? -1 : a > b;
6190 static void gtst_addr(int t, int a)
6192 gsym_addr(gvtst(0, t), a);
6195 static void gcase(struct case_t **base, int len, int *bsym)
6197 struct case_t *p;
6198 int e;
6199 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6200 while (len > 8) {
6201 /* binary search */
6202 p = base[len/2];
6203 vdup();
6204 if (ll)
6205 vpushll(p->v2);
6206 else
6207 vpushi(p->v2);
6208 gen_op(TOK_LE);
6209 e = gvtst(1, 0);
6210 vdup();
6211 if (ll)
6212 vpushll(p->v1);
6213 else
6214 vpushi(p->v1);
6215 gen_op(TOK_GE);
6216 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6217 /* x < v1 */
6218 gcase(base, len/2, bsym);
6219 /* x > v2 */
6220 gsym(e);
6221 e = len/2 + 1;
6222 base += e; len -= e;
6224 /* linear scan */
6225 while (len--) {
6226 p = *base++;
6227 vdup();
6228 if (ll)
6229 vpushll(p->v2);
6230 else
6231 vpushi(p->v2);
6232 if (p->v1 == p->v2) {
6233 gen_op(TOK_EQ);
6234 gtst_addr(0, p->sym);
6235 } else {
6236 gen_op(TOK_LE);
6237 e = gvtst(1, 0);
6238 vdup();
6239 if (ll)
6240 vpushll(p->v1);
6241 else
6242 vpushi(p->v1);
6243 gen_op(TOK_GE);
6244 gtst_addr(0, p->sym);
6245 gsym(e);
6248 *bsym = gjmp(*bsym);
6251 /* ------------------------------------------------------------------------- */
6252 /* __attribute__((cleanup(fn))) */
6254 static void try_call_scope_cleanup(Sym *stop)
6256 Sym *cls = cur_scope->cl.s;
6258 for (; cls != stop; cls = cls->ncl) {
6259 Sym *fs = cls->next;
6260 Sym *vs = cls->prev_tok;
6262 vpushsym(&fs->type, fs);
6263 vset(&vs->type, vs->r, vs->c);
6264 vtop->sym = vs;
6265 mk_pointer(&vtop->type);
6266 gaddrof();
6267 gfunc_call(1);
6271 static void try_call_cleanup_goto(Sym *cleanupstate)
6273 Sym *oc, *cc;
6274 int ocd, ccd;
6276 if (!cur_scope->cl.s)
6277 return;
6279 /* search NCA of both cleanup chains given parents and initial depth */
6280 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6281 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6283 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6285 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6288 try_call_scope_cleanup(cc);
6291 /* call 'func' for each __attribute__((cleanup(func))) */
6292 static void block_cleanup(struct scope *o)
6294 int jmp = 0;
6295 Sym *g, **pg;
6296 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6297 if (g->prev_tok->r & LABEL_FORWARD) {
6298 Sym *pcl = g->next;
6299 if (!jmp)
6300 jmp = gjmp(0);
6301 gsym(pcl->jnext);
6302 try_call_scope_cleanup(o->cl.s);
6303 pcl->jnext = gjmp(0);
6304 if (!o->cl.n)
6305 goto remove_pending;
6306 g->c = o->cl.n;
6307 pg = &g->prev;
6308 } else {
6309 remove_pending:
6310 *pg = g->prev;
6311 sym_free(g);
6314 gsym(jmp);
6315 try_call_scope_cleanup(o->cl.s);
6318 /* ------------------------------------------------------------------------- */
6319 /* VLA */
6321 static void vla_restore(int loc)
6323 if (loc)
6324 gen_vla_sp_restore(loc);
6327 static void vla_leave(struct scope *o)
6329 if (o->vla.num < cur_scope->vla.num)
6330 vla_restore(o->vla.loc);
6333 /* ------------------------------------------------------------------------- */
6334 /* local scopes */
6336 void new_scope(struct scope *o)
6338 /* copy and link previous scope */
6339 *o = *cur_scope;
6340 o->prev = cur_scope;
6341 cur_scope = o;
6343 /* record local declaration stack position */
6344 o->lstk = local_stack;
6345 o->llstk = local_label_stack;
6347 ++local_scope;
6350 void prev_scope(struct scope *o, int is_expr)
6352 vla_leave(o->prev);
6354 if (o->cl.s != o->prev->cl.s)
6355 block_cleanup(o->prev);
6357 /* pop locally defined labels */
6358 label_pop(&local_label_stack, o->llstk, is_expr);
6360 /* In the is_expr case (a statement expression is finished here),
6361 vtop might refer to symbols on the local_stack. Either via the
6362 type or via vtop->sym. We can't pop those nor any that in turn
6363 might be referred to. To make it easier we don't roll back
6364 any symbols in that case; some upper level call to block() will
6365 do that. We do have to remove such symbols from the lookup
6366 tables, though. sym_pop will do that. */
6368 /* pop locally defined symbols */
6369 sym_pop(&local_stack, o->lstk, is_expr);
6371 cur_scope = o->prev;
6372 --local_scope;
6375 /* leave a scope via break/continue(/goto) */
6376 void leave_scope(struct scope *o)
6378 if (!o)
6379 return;
6380 try_call_scope_cleanup(o->cl.s);
6381 vla_leave(o);
6384 /* ------------------------------------------------------------------------- */
6385 /* call block from 'for do while' loops */
6387 static void lblock(int *bsym, int *csym)
6389 struct scope *lo = loop_scope, *co = cur_scope;
6390 int *b = co->bsym, *c = co->csym;
6391 if (csym) {
6392 co->csym = csym;
6393 loop_scope = co;
6395 co->bsym = bsym;
6396 block(0);
6397 co->bsym = b;
6398 if (csym) {
6399 co->csym = c;
6400 loop_scope = lo;
6404 static void block(int is_expr)
6406 int a, b, c, d, e, t;
6407 Sym *s;
6409 if (is_expr) {
6410 /* default return value is (void) */
6411 vpushi(0);
6412 vtop->type.t = VT_VOID;
6415 again:
6416 t = tok, next();
6418 if (t == TOK_IF) {
6419 skip('(');
6420 gexpr();
6421 skip(')');
6422 a = gvtst(1, 0);
6423 block(0);
6424 if (tok == TOK_ELSE) {
6425 d = gjmp(0);
6426 gsym(a);
6427 next();
6428 block(0);
6429 gsym(d); /* patch else jmp */
6430 } else {
6431 gsym(a);
6434 } else if (t == TOK_WHILE) {
6435 d = gind();
6436 skip('(');
6437 gexpr();
6438 skip(')');
6439 a = gvtst(1, 0);
6440 b = 0;
6441 lblock(&a, &b);
6442 gjmp_addr(d);
6443 gsym_addr(b, d);
6444 gsym(a);
6446 } else if (t == '{') {
6447 struct scope o;
6448 new_scope(&o);
6450 /* handle local labels declarations */
6451 while (tok == TOK_LABEL) {
6452 do {
6453 next();
6454 if (tok < TOK_UIDENT)
6455 expect("label identifier");
6456 label_push(&local_label_stack, tok, LABEL_DECLARED);
6457 next();
6458 } while (tok == ',');
6459 skip(';');
6462 while (tok != '}') {
6463 decl(VT_LOCAL);
6464 if (tok != '}') {
6465 if (is_expr)
6466 vpop();
6467 block(is_expr);
6471 prev_scope(&o, is_expr);
6473 if (0 == local_scope && !nocode_wanted)
6474 check_func_return();
6475 next();
6477 } else if (t == TOK_RETURN) {
6478 a = tok != ';';
6479 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6480 if (a)
6481 gexpr(), gen_assign_cast(&func_vt);
6482 leave_scope(root_scope);
6483 if (a && b)
6484 gfunc_return(&func_vt);
6485 else if (a)
6486 vtop--;
6487 else if (b)
6488 tcc_warning("'return' with no value.");
6489 skip(';');
6490 /* jump unless last stmt in top-level block */
6491 if (tok != '}' || local_scope != 1)
6492 rsym = gjmp(rsym);
6493 CODE_OFF();
6495 } else if (t == TOK_BREAK) {
6496 /* compute jump */
6497 if (!cur_scope->bsym)
6498 tcc_error("cannot break");
6499 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6500 leave_scope(loop_scope);
6501 else
6502 leave_scope(cur_switch->scope);
6503 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6504 skip(';');
6506 } else if (t == TOK_CONTINUE) {
6507 /* compute jump */
6508 if (!cur_scope->csym)
6509 tcc_error("cannot continue");
6510 leave_scope(loop_scope);
6511 *cur_scope->csym = gjmp(*cur_scope->csym);
6512 skip(';');
6514 } else if (t == TOK_FOR) {
6515 struct scope o;
6516 new_scope(&o);
6518 skip('(');
6519 if (tok != ';') {
6520 /* c99 for-loop init decl? */
6521 if (!decl0(VT_LOCAL, 1, NULL)) {
6522 /* no, regular for-loop init expr */
6523 gexpr();
6524 vpop();
6527 skip(';');
6528 a = b = 0;
6529 c = d = gind();
6530 if (tok != ';') {
6531 gexpr();
6532 a = gvtst(1, 0);
6534 skip(';');
6535 if (tok != ')') {
6536 e = gjmp(0);
6537 d = gind();
6538 gexpr();
6539 vpop();
6540 gjmp_addr(c);
6541 gsym(e);
6543 skip(')');
6544 lblock(&a, &b);
6545 gjmp_addr(d);
6546 gsym_addr(b, d);
6547 gsym(a);
6548 prev_scope(&o, 0);
6550 } else if (t == TOK_DO) {
6551 a = b = 0;
6552 d = gind();
6553 lblock(&a, &b);
6554 gsym(b);
6555 skip(TOK_WHILE);
6556 skip('(');
6557 gexpr();
6558 skip(')');
6559 skip(';');
6560 c = gvtst(0, 0);
6561 gsym_addr(c, d);
6562 gsym(a);
6564 } else if (t == TOK_SWITCH) {
6565 struct switch_t *saved, sw;
6566 SValue switchval;
6568 sw.p = NULL;
6569 sw.n = 0;
6570 sw.def_sym = 0;
6571 sw.bsym = &a;
6572 sw.scope = cur_scope;
6574 saved = cur_switch;
6575 cur_switch = &sw;
6577 skip('(');
6578 gexpr();
6579 skip(')');
6580 switchval = *vtop--;
6582 a = 0;
6583 b = gjmp(0); /* jump to first case */
6584 lblock(&a, NULL);
6585 a = gjmp(a); /* add implicit break */
6586 /* case lookup */
6587 gsym(b);
6589 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6590 for (b = 1; b < sw.n; b++)
6591 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6592 tcc_error("duplicate case value");
6594 /* Our switch table sorting is signed, so the compared
6595 value needs to be as well when it's 64bit. */
6596 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6597 switchval.type.t &= ~VT_UNSIGNED;
6598 vpushv(&switchval);
6599 gv(RC_INT);
6600 d = 0, gcase(sw.p, sw.n, &d);
6601 vpop();
6602 if (sw.def_sym)
6603 gsym_addr(d, sw.def_sym);
6604 else
6605 gsym(d);
6606 /* break label */
6607 gsym(a);
6609 dynarray_reset(&sw.p, &sw.n);
6610 cur_switch = saved;
6612 } else if (t == TOK_CASE) {
6613 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6614 if (!cur_switch)
6615 expect("switch");
6616 cr->v1 = cr->v2 = expr_const64();
6617 if (gnu_ext && tok == TOK_DOTS) {
6618 next();
6619 cr->v2 = expr_const64();
6620 if (cr->v2 < cr->v1)
6621 tcc_warning("empty case range");
6623 cr->sym = gind();
6624 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6625 skip(':');
6626 is_expr = 0;
6627 goto block_after_label;
6629 } else if (t == TOK_DEFAULT) {
6630 if (!cur_switch)
6631 expect("switch");
6632 if (cur_switch->def_sym)
6633 tcc_error("too many 'default'");
6634 cur_switch->def_sym = gind();
6635 skip(':');
6636 is_expr = 0;
6637 goto block_after_label;
6639 } else if (t == TOK_GOTO) {
6640 vla_restore(root_scope->vla.loc);
6641 if (tok == '*' && gnu_ext) {
6642 /* computed goto */
6643 next();
6644 gexpr();
6645 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6646 expect("pointer");
6647 ggoto();
6649 } else if (tok >= TOK_UIDENT) {
6650 s = label_find(tok);
6651 /* put forward definition if needed */
6652 if (!s)
6653 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6654 else if (s->r == LABEL_DECLARED)
6655 s->r = LABEL_FORWARD;
6657 if (s->r & LABEL_FORWARD) {
6658 /* start new goto chain for cleanups, linked via label->next */
6659 if (cur_scope->cl.s && !nocode_wanted) {
6660 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6661 pending_gotos->prev_tok = s;
6662 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6663 pending_gotos->next = s;
6665 s->jnext = gjmp(s->jnext);
6666 } else {
6667 try_call_cleanup_goto(s->cleanupstate);
6668 gjmp_addr(s->jnext);
6670 next();
6672 } else {
6673 expect("label identifier");
6675 skip(';');
6677 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6678 asm_instr();
6680 } else {
6681 if (tok == ':' && t >= TOK_UIDENT) {
6682 /* label case */
6683 next();
6684 s = label_find(t);
6685 if (s) {
6686 if (s->r == LABEL_DEFINED)
6687 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6688 s->r = LABEL_DEFINED;
6689 if (s->next) {
6690 Sym *pcl; /* pending cleanup goto */
6691 for (pcl = s->next; pcl; pcl = pcl->prev)
6692 gsym(pcl->jnext);
6693 sym_pop(&s->next, NULL, 0);
6694 } else
6695 gsym(s->jnext);
6696 } else {
6697 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6699 s->jnext = gind();
6700 s->cleanupstate = cur_scope->cl.s;
6702 block_after_label:
6703 vla_restore(cur_scope->vla.loc);
6704 /* we accept this, but it is a mistake */
6705 if (tok == '}') {
6706 tcc_warning("deprecated use of label at end of compound statement");
6707 } else {
6708 goto again;
6711 } else {
6712 /* expression case */
6713 if (t != ';') {
6714 unget_tok(t);
6715 if (is_expr) {
6716 vpop();
6717 gexpr();
6718 } else {
6719 gexpr();
6720 vpop();
6722 skip(';');
6728 /* This skips over a stream of tokens containing balanced {} and ()
6729 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6730 with a '{'). If STR then allocates and stores the skipped tokens
6731 in *STR. This doesn't check if () and {} are nested correctly,
6732 i.e. "({)}" is accepted. */
6733 static void skip_or_save_block(TokenString **str)
6735 int braces = tok == '{';
6736 int level = 0;
6737 if (str)
6738 *str = tok_str_alloc();
6740 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6741 int t;
6742 if (tok == TOK_EOF) {
6743 if (str || level > 0)
6744 tcc_error("unexpected end of file");
6745 else
6746 break;
6748 if (str)
6749 tok_str_add_tok(*str);
6750 t = tok;
6751 next();
6752 if (t == '{' || t == '(') {
6753 level++;
6754 } else if (t == '}' || t == ')') {
6755 level--;
6756 if (level == 0 && braces && t == '}')
6757 break;
6760 if (str) {
6761 tok_str_add(*str, -1);
6762 tok_str_add(*str, 0);
6766 #define EXPR_CONST 1
6767 #define EXPR_ANY 2
6769 static void parse_init_elem(int expr_type)
6771 int saved_global_expr;
6772 switch(expr_type) {
6773 case EXPR_CONST:
6774 /* compound literals must be allocated globally in this case */
6775 saved_global_expr = global_expr;
6776 global_expr = 1;
6777 expr_const1();
6778 global_expr = saved_global_expr;
6779 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6780 (compound literals). */
6781 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6782 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6783 || vtop->sym->v < SYM_FIRST_ANOM))
6784 #ifdef TCC_TARGET_PE
6785 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6786 #endif
6788 tcc_error("initializer element is not constant");
6789 break;
6790 case EXPR_ANY:
6791 expr_eq();
6792 break;
6796 /* put zeros for variable based init */
6797 static void init_putz(Section *sec, unsigned long c, int size)
6799 if (sec) {
6800 /* nothing to do because globals are already set to zero */
6801 } else {
6802 vpush_global_sym(&func_old_type, TOK_memset);
6803 vseti(VT_LOCAL, c);
6804 #ifdef TCC_TARGET_ARM
6805 vpushs(size);
6806 vpushi(0);
6807 #else
6808 vpushi(0);
6809 vpushs(size);
6810 #endif
6811 gfunc_call(3);
6815 #define DIF_FIRST 1
6816 #define DIF_SIZE_ONLY 2
6817 #define DIF_HAVE_ELEM 4
6819 /* t is the array or struct type. c is the array or struct
6820 address. cur_field is the pointer to the current
6821 field, for arrays the 'c' member contains the current start
6822 index. 'flags' is as in decl_initializer.
6823 'al' contains the already initialized length of the
6824 current container (starting at c). This returns the new length of that. */
6825 static int decl_designator(CType *type, Section *sec, unsigned long c,
6826 Sym **cur_field, int flags, int al)
6828 Sym *s, *f;
6829 int index, index_last, align, l, nb_elems, elem_size;
6830 unsigned long corig = c;
6832 elem_size = 0;
6833 nb_elems = 1;
6835 if (flags & DIF_HAVE_ELEM)
6836 goto no_designator;
6838 if (gnu_ext && tok >= TOK_UIDENT) {
6839 l = tok, next();
6840 if (tok == ':')
6841 goto struct_field;
6842 unget_tok(l);
6845 /* NOTE: we only support ranges for last designator */
6846 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6847 if (tok == '[') {
6848 if (!(type->t & VT_ARRAY))
6849 expect("array type");
6850 next();
6851 index = index_last = expr_const();
6852 if (tok == TOK_DOTS && gnu_ext) {
6853 next();
6854 index_last = expr_const();
6856 skip(']');
6857 s = type->ref;
6858 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6859 index_last < index)
6860 tcc_error("invalid index");
6861 if (cur_field)
6862 (*cur_field)->c = index_last;
6863 type = pointed_type(type);
6864 elem_size = type_size(type, &align);
6865 c += index * elem_size;
6866 nb_elems = index_last - index + 1;
6867 } else {
6868 int cumofs;
6869 next();
6870 l = tok;
6871 struct_field:
6872 next();
6873 if ((type->t & VT_BTYPE) != VT_STRUCT)
6874 expect("struct/union type");
6875 cumofs = 0;
6876 f = find_field(type, l, &cumofs);
6877 if (!f)
6878 expect("field");
6879 if (cur_field)
6880 *cur_field = f;
6881 type = &f->type;
6882 c += cumofs + f->c;
6884 cur_field = NULL;
6886 if (!cur_field) {
6887 if (tok == '=') {
6888 next();
6889 } else if (!gnu_ext) {
6890 expect("=");
6892 } else {
6893 no_designator:
6894 if (type->t & VT_ARRAY) {
6895 index = (*cur_field)->c;
6896 if (type->ref->c >= 0 && index >= type->ref->c)
6897 tcc_error("index too large");
6898 type = pointed_type(type);
6899 c += index * type_size(type, &align);
6900 } else {
6901 f = *cur_field;
6902 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6903 *cur_field = f = f->next;
6904 if (!f)
6905 tcc_error("too many field init");
6906 type = &f->type;
6907 c += f->c;
6910 /* must put zero in holes (note that doing it that way
6911 ensures that it even works with designators) */
6912 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6913 init_putz(sec, corig + al, c - corig - al);
6914 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6916 /* XXX: make it more general */
6917 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6918 unsigned long c_end;
6919 uint8_t *src, *dst;
6920 int i;
6922 if (!sec) {
6923 vset(type, VT_LOCAL|VT_LVAL, c);
6924 for (i = 1; i < nb_elems; i++) {
6925 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6926 vswap();
6927 vstore();
6929 vpop();
6930 } else if (!NODATA_WANTED) {
6931 c_end = c + nb_elems * elem_size;
6932 if (c_end > sec->data_allocated)
6933 section_realloc(sec, c_end);
6934 src = sec->data + c;
6935 dst = src;
6936 for(i = 1; i < nb_elems; i++) {
6937 dst += elem_size;
6938 memcpy(dst, src, elem_size);
6942 c += nb_elems * type_size(type, &align);
6943 if (c - corig > al)
6944 al = c - corig;
6945 return al;
6948 /* store a value or an expression directly in global data or in local array */
6949 static void init_putv(CType *type, Section *sec, unsigned long c)
6951 int bt;
6952 void *ptr;
6953 CType dtype;
6955 dtype = *type;
6956 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6958 if (sec) {
6959 int size, align;
6960 /* XXX: not portable */
6961 /* XXX: generate error if incorrect relocation */
6962 gen_assign_cast(&dtype);
6963 bt = type->t & VT_BTYPE;
6965 if ((vtop->r & VT_SYM)
6966 && bt != VT_PTR
6967 && bt != VT_FUNC
6968 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6969 || (type->t & VT_BITFIELD))
6970 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6972 tcc_error("initializer element is not computable at load time");
6974 if (NODATA_WANTED) {
6975 vtop--;
6976 return;
6979 size = type_size(type, &align);
6980 section_reserve(sec, c + size);
6981 ptr = sec->data + c;
6983 /* XXX: make code faster ? */
6984 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6985 vtop->sym->v >= SYM_FIRST_ANOM &&
6986 /* XXX This rejects compound literals like
6987 '(void *){ptr}'. The problem is that '&sym' is
6988 represented the same way, which would be ruled out
6989 by the SYM_FIRST_ANOM check above, but also '"string"'
6990 in 'char *p = "string"' is represented the same
6991 with the type being VT_PTR and the symbol being an
6992 anonymous one. That is, there's no difference in vtop
6993 between '(void *){x}' and '&(void *){x}'. Ignore
6994 pointer typed entities here. Hopefully no real code
6995 will every use compound literals with scalar type. */
6996 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6997 /* These come from compound literals, memcpy stuff over. */
6998 Section *ssec;
6999 ElfSym *esym;
7000 ElfW_Rel *rel;
7001 esym = elfsym(vtop->sym);
7002 ssec = tcc_state->sections[esym->st_shndx];
7003 memmove (ptr, ssec->data + esym->st_value, size);
7004 if (ssec->reloc) {
7005 /* We need to copy over all memory contents, and that
7006 includes relocations. Use the fact that relocs are
7007 created it order, so look from the end of relocs
7008 until we hit one before the copied region. */
7009 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7010 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7011 while (num_relocs--) {
7012 rel--;
7013 if (rel->r_offset >= esym->st_value + size)
7014 continue;
7015 if (rel->r_offset < esym->st_value)
7016 break;
7017 /* Note: if the same fields are initialized multiple
7018 times (possible with designators) then we possibly
7019 add multiple relocations for the same offset here.
7020 That would lead to wrong code, the last reloc needs
7021 to win. We clean this up later after the whole
7022 initializer is parsed. */
7023 put_elf_reloca(symtab_section, sec,
7024 c + rel->r_offset - esym->st_value,
7025 ELFW(R_TYPE)(rel->r_info),
7026 ELFW(R_SYM)(rel->r_info),
7027 #if PTR_SIZE == 8
7028 rel->r_addend
7029 #else
7031 #endif
7035 } else {
7036 if (type->t & VT_BITFIELD) {
7037 int bit_pos, bit_size, bits, n;
7038 unsigned char *p, v, m;
7039 bit_pos = BIT_POS(vtop->type.t);
7040 bit_size = BIT_SIZE(vtop->type.t);
7041 p = (unsigned char*)ptr + (bit_pos >> 3);
7042 bit_pos &= 7, bits = 0;
7043 while (bit_size) {
7044 n = 8 - bit_pos;
7045 if (n > bit_size)
7046 n = bit_size;
7047 v = vtop->c.i >> bits << bit_pos;
7048 m = ((1 << n) - 1) << bit_pos;
7049 *p = (*p & ~m) | (v & m);
7050 bits += n, bit_size -= n, bit_pos = 0, ++p;
7052 } else
7053 switch(bt) {
7054 /* XXX: when cross-compiling we assume that each type has the
7055 same representation on host and target, which is likely to
7056 be wrong in the case of long double */
7057 case VT_BOOL:
7058 vtop->c.i = vtop->c.i != 0;
7059 case VT_BYTE:
7060 *(char *)ptr |= vtop->c.i;
7061 break;
7062 case VT_SHORT:
7063 *(short *)ptr |= vtop->c.i;
7064 break;
7065 case VT_FLOAT:
7066 *(float*)ptr = vtop->c.f;
7067 break;
7068 case VT_DOUBLE:
7069 *(double *)ptr = vtop->c.d;
7070 break;
7071 case VT_LDOUBLE:
7072 #if defined TCC_IS_NATIVE_387
7073 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7074 memcpy(ptr, &vtop->c.ld, 10);
7075 #ifdef __TINYC__
7076 else if (sizeof (long double) == sizeof (double))
7077 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7078 #endif
7079 else if (vtop->c.ld == 0.0)
7081 else
7082 #endif
7083 if (sizeof(long double) == LDOUBLE_SIZE)
7084 *(long double*)ptr = vtop->c.ld;
7085 else if (sizeof(double) == LDOUBLE_SIZE)
7086 *(double *)ptr = (double)vtop->c.ld;
7087 else
7088 tcc_error("can't cross compile long double constants");
7089 break;
7090 #if PTR_SIZE != 8
7091 case VT_LLONG:
7092 *(long long *)ptr |= vtop->c.i;
7093 break;
7094 #else
7095 case VT_LLONG:
7096 #endif
7097 case VT_PTR:
7099 addr_t val = vtop->c.i;
7100 #if PTR_SIZE == 8
7101 if (vtop->r & VT_SYM)
7102 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7103 else
7104 *(addr_t *)ptr |= val;
7105 #else
7106 if (vtop->r & VT_SYM)
7107 greloc(sec, vtop->sym, c, R_DATA_PTR);
7108 *(addr_t *)ptr |= val;
7109 #endif
7110 break;
7112 default:
7114 int val = vtop->c.i;
7115 #if PTR_SIZE == 8
7116 if (vtop->r & VT_SYM)
7117 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7118 else
7119 *(int *)ptr |= val;
7120 #else
7121 if (vtop->r & VT_SYM)
7122 greloc(sec, vtop->sym, c, R_DATA_PTR);
7123 *(int *)ptr |= val;
7124 #endif
7125 break;
7129 vtop--;
7130 } else {
7131 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7132 vswap();
7133 vstore();
7134 vpop();
7138 /* 't' contains the type and storage info. 'c' is the offset of the
7139 object in section 'sec'. If 'sec' is NULL, it means stack based
7140 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7141 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7142 size only evaluation is wanted (only for arrays). */
7143 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7144 int flags)
7146 int len, n, no_oblock, nb, i;
7147 int size1, align1;
7148 Sym *s, *f;
7149 Sym indexsym;
7150 CType *t1;
7152 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7153 /* In case of strings we have special handling for arrays, so
7154 don't consume them as initializer value (which would commit them
7155 to some anonymous symbol). */
7156 tok != TOK_LSTR && tok != TOK_STR &&
7157 !(flags & DIF_SIZE_ONLY)) {
7158 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7159 flags |= DIF_HAVE_ELEM;
7162 if ((flags & DIF_HAVE_ELEM) &&
7163 !(type->t & VT_ARRAY) &&
7164 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7165 The source type might have VT_CONSTANT set, which is
7166 of course assignable to non-const elements. */
7167 is_compatible_unqualified_types(type, &vtop->type)) {
7168 init_putv(type, sec, c);
7169 } else if (type->t & VT_ARRAY) {
7170 s = type->ref;
7171 n = s->c;
7172 t1 = pointed_type(type);
7173 size1 = type_size(t1, &align1);
7175 no_oblock = 1;
7176 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7177 tok == '{') {
7178 if (tok != '{')
7179 tcc_error("character array initializer must be a literal,"
7180 " optionally enclosed in braces");
7181 skip('{');
7182 no_oblock = 0;
7185 /* only parse strings here if correct type (otherwise: handle
7186 them as ((w)char *) expressions */
7187 if ((tok == TOK_LSTR &&
7188 #ifdef TCC_TARGET_PE
7189 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7190 #else
7191 (t1->t & VT_BTYPE) == VT_INT
7192 #endif
7193 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7194 len = 0;
7195 while (tok == TOK_STR || tok == TOK_LSTR) {
7196 int cstr_len, ch;
7198 /* compute maximum number of chars wanted */
7199 if (tok == TOK_STR)
7200 cstr_len = tokc.str.size;
7201 else
7202 cstr_len = tokc.str.size / sizeof(nwchar_t);
7203 cstr_len--;
7204 nb = cstr_len;
7205 if (n >= 0 && nb > (n - len))
7206 nb = n - len;
7207 if (!(flags & DIF_SIZE_ONLY)) {
7208 if (cstr_len > nb)
7209 tcc_warning("initializer-string for array is too long");
7210 /* in order to go faster for common case (char
7211 string in global variable, we handle it
7212 specifically */
7213 if (sec && tok == TOK_STR && size1 == 1) {
7214 if (!NODATA_WANTED)
7215 memcpy(sec->data + c + len, tokc.str.data, nb);
7216 } else {
7217 for(i=0;i<nb;i++) {
7218 if (tok == TOK_STR)
7219 ch = ((unsigned char *)tokc.str.data)[i];
7220 else
7221 ch = ((nwchar_t *)tokc.str.data)[i];
7222 vpushi(ch);
7223 init_putv(t1, sec, c + (len + i) * size1);
7227 len += nb;
7228 next();
7230 /* only add trailing zero if enough storage (no
7231 warning in this case since it is standard) */
7232 if (n < 0 || len < n) {
7233 if (!(flags & DIF_SIZE_ONLY)) {
7234 vpushi(0);
7235 init_putv(t1, sec, c + (len * size1));
7237 len++;
7239 len *= size1;
7240 } else {
7241 indexsym.c = 0;
7242 f = &indexsym;
7244 do_init_list:
7245 len = 0;
7246 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7247 len = decl_designator(type, sec, c, &f, flags, len);
7248 flags &= ~DIF_HAVE_ELEM;
7249 if (type->t & VT_ARRAY) {
7250 ++indexsym.c;
7251 /* special test for multi dimensional arrays (may not
7252 be strictly correct if designators are used at the
7253 same time) */
7254 if (no_oblock && len >= n*size1)
7255 break;
7256 } else {
7257 if (s->type.t == VT_UNION)
7258 f = NULL;
7259 else
7260 f = f->next;
7261 if (no_oblock && f == NULL)
7262 break;
7265 if (tok == '}')
7266 break;
7267 skip(',');
7270 /* put zeros at the end */
7271 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7272 init_putz(sec, c + len, n*size1 - len);
7273 if (!no_oblock)
7274 skip('}');
7275 /* patch type size if needed, which happens only for array types */
7276 if (n < 0)
7277 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7278 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7279 size1 = 1;
7280 no_oblock = 1;
7281 if ((flags & DIF_FIRST) || tok == '{') {
7282 skip('{');
7283 no_oblock = 0;
7285 s = type->ref;
7286 f = s->next;
7287 n = s->c;
7288 goto do_init_list;
7289 } else if (tok == '{') {
7290 if (flags & DIF_HAVE_ELEM)
7291 skip(';');
7292 next();
7293 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7294 skip('}');
7295 } else if ((flags & DIF_SIZE_ONLY)) {
7296 /* If we supported only ISO C we wouldn't have to accept calling
7297 this on anything than an array if DIF_SIZE_ONLY (and even then
7298 only on the outermost level, so no recursion would be needed),
7299 because initializing a flex array member isn't supported.
7300 But GNU C supports it, so we need to recurse even into
7301 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7302 /* just skip expression */
7303 skip_or_save_block(NULL);
7304 } else {
7305 if (!(flags & DIF_HAVE_ELEM)) {
7306 /* This should happen only when we haven't parsed
7307 the init element above for fear of committing a
7308 string constant to memory too early. */
7309 if (tok != TOK_STR && tok != TOK_LSTR)
7310 expect("string constant");
7311 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7313 init_putv(type, sec, c);
7317 /* parse an initializer for type 't' if 'has_init' is non zero, and
7318 allocate space in local or global data space ('r' is either
7319 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7320 variable 'v' of scope 'scope' is declared before initializers
7321 are parsed. If 'v' is zero, then a reference to the new object
7322 is put in the value stack. If 'has_init' is 2, a special parsing
7323 is done to handle string constants. */
7324 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7325 int has_init, int v, int scope)
7327 int size, align, addr;
7328 TokenString *init_str = NULL;
7330 Section *sec;
7331 Sym *flexible_array;
7332 Sym *sym = NULL;
7333 int saved_nocode_wanted = nocode_wanted;
7334 #ifdef CONFIG_TCC_BCHECK
7335 int bcheck;
7336 #endif
7338 /* Always allocate static or global variables */
7339 if (v && (r & VT_VALMASK) == VT_CONST)
7340 nocode_wanted |= 0x80000000;
7342 #ifdef CONFIG_TCC_BCHECK
7343 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7344 #endif
7346 flexible_array = NULL;
7347 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7348 Sym *field = type->ref->next;
7349 if (field) {
7350 while (field->next)
7351 field = field->next;
7352 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7353 flexible_array = field;
7357 size = type_size(type, &align);
7358 /* If unknown size, we must evaluate it before
7359 evaluating initializers because
7360 initializers can generate global data too
7361 (e.g. string pointers or ISOC99 compound
7362 literals). It also simplifies local
7363 initializers handling */
7364 if (size < 0 || (flexible_array && has_init)) {
7365 if (!has_init)
7366 tcc_error("unknown type size");
7367 /* get all init string */
7368 if (has_init == 2) {
7369 init_str = tok_str_alloc();
7370 /* only get strings */
7371 while (tok == TOK_STR || tok == TOK_LSTR) {
7372 tok_str_add_tok(init_str);
7373 next();
7375 tok_str_add(init_str, -1);
7376 tok_str_add(init_str, 0);
7377 } else {
7378 skip_or_save_block(&init_str);
7380 unget_tok(0);
7382 /* compute size */
7383 begin_macro(init_str, 1);
7384 next();
7385 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7386 /* prepare second initializer parsing */
7387 macro_ptr = init_str->str;
7388 next();
7390 /* if still unknown size, error */
7391 size = type_size(type, &align);
7392 if (size < 0)
7393 tcc_error("unknown type size");
7395 /* If there's a flex member and it was used in the initializer
7396 adjust size. */
7397 if (flexible_array &&
7398 flexible_array->type.ref->c > 0)
7399 size += flexible_array->type.ref->c
7400 * pointed_size(&flexible_array->type);
7401 /* take into account specified alignment if bigger */
7402 if (ad->a.aligned) {
7403 int speca = 1 << (ad->a.aligned - 1);
7404 if (speca > align)
7405 align = speca;
7406 } else if (ad->a.packed) {
7407 align = 1;
7410 if (!v && NODATA_WANTED)
7411 size = 0, align = 1;
7413 if ((r & VT_VALMASK) == VT_LOCAL) {
7414 sec = NULL;
7415 #ifdef CONFIG_TCC_BCHECK
7416 if (bcheck && (type->t & VT_ARRAY)) {
7417 loc--;
7419 #endif
7420 loc = (loc - size) & -align;
7421 addr = loc;
7422 #ifdef CONFIG_TCC_BCHECK
7423 /* handles bounds */
7424 /* XXX: currently, since we do only one pass, we cannot track
7425 '&' operators, so we add only arrays */
7426 if (bcheck && (type->t & VT_ARRAY)) {
7427 addr_t *bounds_ptr;
7428 /* add padding between regions */
7429 loc--;
7430 /* then add local bound info */
7431 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7432 bounds_ptr[0] = addr;
7433 bounds_ptr[1] = size;
7435 #endif
7436 if (v) {
7437 /* local variable */
7438 #ifdef CONFIG_TCC_ASM
7439 if (ad->asm_label) {
7440 int reg = asm_parse_regvar(ad->asm_label);
7441 if (reg >= 0)
7442 r = (r & ~VT_VALMASK) | reg;
7444 #endif
7445 sym = sym_push(v, type, r, addr);
7446 if (ad->cleanup_func) {
7447 Sym *cls = sym_push2(&all_cleanups,
7448 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7449 cls->prev_tok = sym;
7450 cls->next = ad->cleanup_func;
7451 cls->ncl = cur_scope->cl.s;
7452 cur_scope->cl.s = cls;
7455 sym->a = ad->a;
7456 } else {
7457 /* push local reference */
7458 vset(type, r, addr);
7460 } else {
7461 if (v && scope == VT_CONST) {
7462 /* see if the symbol was already defined */
7463 sym = sym_find(v);
7464 if (sym) {
7465 patch_storage(sym, ad, type);
7466 /* we accept several definitions of the same global variable. */
7467 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7468 goto no_alloc;
7472 /* allocate symbol in corresponding section */
7473 sec = ad->section;
7474 if (!sec) {
7475 if (has_init)
7476 sec = data_section;
7477 else if (tcc_state->nocommon)
7478 sec = bss_section;
7481 if (sec) {
7482 addr = section_add(sec, size, align);
7483 #ifdef CONFIG_TCC_BCHECK
7484 /* add padding if bound check */
7485 if (bcheck)
7486 section_add(sec, 1, 1);
7487 #endif
7488 } else {
7489 addr = align; /* SHN_COMMON is special, symbol value is align */
7490 sec = common_section;
7493 if (v) {
7494 if (!sym) {
7495 sym = sym_push(v, type, r | VT_SYM, 0);
7496 patch_storage(sym, ad, NULL);
7498 /* update symbol definition */
7499 put_extern_sym(sym, sec, addr, size);
7500 } else {
7501 /* push global reference */
7502 vpush_ref(type, sec, addr, size);
7503 sym = vtop->sym;
7504 vtop->r |= r;
7507 #ifdef CONFIG_TCC_BCHECK
7508 /* handles bounds now because the symbol must be defined
7509 before for the relocation */
7510 if (bcheck) {
7511 addr_t *bounds_ptr;
7513 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7514 /* then add global bound info */
7515 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7516 bounds_ptr[0] = 0; /* relocated */
7517 bounds_ptr[1] = size;
7519 #endif
7522 if (type->t & VT_VLA) {
7523 int a;
7525 if (NODATA_WANTED)
7526 goto no_alloc;
7528 /* save current stack pointer */
7529 if (root_scope->vla.loc == 0) {
7530 struct scope *v = cur_scope;
7531 gen_vla_sp_save(loc -= PTR_SIZE);
7532 do v->vla.loc = loc; while ((v = v->prev));
7535 vla_runtime_type_size(type, &a);
7536 gen_vla_alloc(type, a);
7537 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7538 /* on _WIN64, because of the function args scratch area, the
7539 result of alloca differs from RSP and is returned in RAX. */
7540 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7541 #endif
7542 gen_vla_sp_save(addr);
7543 cur_scope->vla.loc = addr;
7544 cur_scope->vla.num++;
7546 } else if (has_init) {
7547 size_t oldreloc_offset = 0;
7548 if (sec && sec->reloc)
7549 oldreloc_offset = sec->reloc->data_offset;
7550 decl_initializer(type, sec, addr, DIF_FIRST);
7551 if (sec && sec->reloc)
7552 squeeze_multi_relocs(sec, oldreloc_offset);
7553 /* patch flexible array member size back to -1, */
7554 /* for possible subsequent similar declarations */
7555 if (flexible_array)
7556 flexible_array->type.ref->c = -1;
7559 no_alloc:
7560 /* restore parse state if needed */
7561 if (init_str) {
7562 end_macro();
7563 next();
7566 nocode_wanted = saved_nocode_wanted;
7569 /* parse a function defined by symbol 'sym' and generate its code in
7570 'cur_text_section' */
7571 static void gen_function(Sym *sym, AttributeDef *ad)
7573 /* Initialize VLA state */
7574 struct scope f = { 0 };
7575 cur_scope = root_scope = &f;
7577 nocode_wanted = 0;
7578 ind = cur_text_section->data_offset;
7579 if (sym->a.aligned) {
7580 size_t newoff = section_add(cur_text_section, 0,
7581 1 << (sym->a.aligned - 1));
7582 gen_fill_nops(newoff - ind);
7584 /* NOTE: we patch the symbol size later */
7585 put_extern_sym(sym, cur_text_section, ind, 0);
7587 if (ad && ad->a.constructor) {
7588 add_init_array (tcc_state, sym);
7590 if (ad && ad->a.destructor) {
7591 add_fini_array (tcc_state, sym);
7594 funcname = get_tok_str(sym->v, NULL);
7595 func_ind = ind;
7597 /* put debug symbol */
7598 tcc_debug_funcstart(tcc_state, sym);
7599 /* push a dummy symbol to enable local sym storage */
7600 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7601 local_scope = 1; /* for function parameters */
7602 gfunc_prolog(&sym->type);
7603 local_scope = 0;
7604 rsym = 0;
7605 clear_temp_local_var_list();
7606 block(0);
7607 gsym(rsym);
7608 nocode_wanted = 0;
7609 gfunc_epilog();
7610 cur_text_section->data_offset = ind;
7611 /* reset local stack */
7612 sym_pop(&local_stack, NULL, 0);
7613 local_scope = 0;
7614 label_pop(&global_label_stack, NULL, 0);
7615 sym_pop(&all_cleanups, NULL, 0);
7616 /* patch symbol size */
7617 elfsym(sym)->st_size = ind - func_ind;
7618 /* end of function */
7619 tcc_debug_funcend(tcc_state, ind - func_ind);
7620 /* It's better to crash than to generate wrong code */
7621 cur_text_section = NULL;
7622 funcname = ""; /* for safety */
7623 func_vt.t = VT_VOID; /* for safety */
7624 func_var = 0; /* for safety */
7625 ind = 0; /* for safety */
7626 nocode_wanted = 0x80000000;
7627 check_vstack();
7630 static void gen_inline_functions(TCCState *s)
7632 Sym *sym;
7633 int inline_generated, i;
7634 struct InlineFunc *fn;
7636 tcc_open_bf(s, ":inline:", 0);
7637 /* iterate while inline function are referenced */
7638 do {
7639 inline_generated = 0;
7640 for (i = 0; i < s->nb_inline_fns; ++i) {
7641 fn = s->inline_fns[i];
7642 sym = fn->sym;
7643 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7644 /* the function was used or forced (and then not internal):
7645 generate its code and convert it to a normal function */
7646 fn->sym = NULL;
7647 if (file)
7648 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7649 begin_macro(fn->func_str, 1);
7650 next();
7651 cur_text_section = text_section;
7652 gen_function(sym, NULL);
7653 end_macro();
7655 inline_generated = 1;
7658 } while (inline_generated);
7659 tcc_close();
7662 ST_FUNC void free_inline_functions(TCCState *s)
7664 int i;
7665 /* free tokens of unused inline functions */
7666 for (i = 0; i < s->nb_inline_fns; ++i) {
7667 struct InlineFunc *fn = s->inline_fns[i];
7668 if (fn->sym)
7669 tok_str_free(fn->func_str);
7671 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7674 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7675 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7676 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7678 int v, has_init, r;
7679 CType type, btype;
7680 Sym *sym;
7681 AttributeDef ad, adbase;
7683 while (1) {
7684 if (tok == TOK_STATIC_ASSERT) {
7685 int c;
7687 next();
7688 skip('(');
7689 c = expr_const();
7690 skip(',');
7691 if (c == 0)
7692 tcc_error("%s", get_tok_str(tok, &tokc));
7693 next();
7694 skip(')');
7695 skip(';');
7696 continue;
7698 if (!parse_btype(&btype, &adbase)) {
7699 if (is_for_loop_init)
7700 return 0;
7701 /* skip redundant ';' if not in old parameter decl scope */
7702 if (tok == ';' && l != VT_CMP) {
7703 next();
7704 continue;
7706 if (l != VT_CONST)
7707 break;
7708 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7709 /* global asm block */
7710 asm_global_instr();
7711 continue;
7713 if (tok >= TOK_UIDENT) {
7714 /* special test for old K&R protos without explicit int
7715 type. Only accepted when defining global data */
7716 btype.t = VT_INT;
7717 } else {
7718 if (tok != TOK_EOF)
7719 expect("declaration");
7720 break;
7723 if (tok == ';') {
7724 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7725 int v = btype.ref->v;
7726 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7727 tcc_warning("unnamed struct/union that defines no instances");
7728 next();
7729 continue;
7731 if (IS_ENUM(btype.t)) {
7732 next();
7733 continue;
7736 while (1) { /* iterate thru each declaration */
7737 type = btype;
7738 /* If the base type itself was an array type of unspecified
7739 size (like in 'typedef int arr[]; arr x = {1};') then
7740 we will overwrite the unknown size by the real one for
7741 this decl. We need to unshare the ref symbol holding
7742 that size. */
7743 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7744 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7746 ad = adbase;
7747 type_decl(&type, &ad, &v, TYPE_DIRECT);
7748 #if 0
7750 char buf[500];
7751 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7752 printf("type = '%s'\n", buf);
7754 #endif
7755 if ((type.t & VT_BTYPE) == VT_FUNC) {
7756 /* if old style function prototype, we accept a
7757 declaration list */
7758 sym = type.ref;
7759 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7760 decl0(VT_CMP, 0, sym);
7761 /* always compile 'extern inline' */
7762 if (type.t & VT_EXTERN)
7763 type.t &= ~VT_INLINE;
7766 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7767 ad.asm_label = asm_label_instr();
7768 /* parse one last attribute list, after asm label */
7769 parse_attribute(&ad);
7770 #if 0
7771 /* gcc does not allow __asm__("label") with function definition,
7772 but why not ... */
7773 if (tok == '{')
7774 expect(";");
7775 #endif
7778 #ifdef TCC_TARGET_PE
7779 if (ad.a.dllimport || ad.a.dllexport) {
7780 if (type.t & VT_STATIC)
7781 tcc_error("cannot have dll linkage with static");
7782 if (type.t & VT_TYPEDEF) {
7783 tcc_warning("'%s' attribute ignored for typedef",
7784 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7785 (ad.a.dllexport = 0, "dllexport"));
7786 } else if (ad.a.dllimport) {
7787 if ((type.t & VT_BTYPE) == VT_FUNC)
7788 ad.a.dllimport = 0;
7789 else
7790 type.t |= VT_EXTERN;
7793 #endif
7794 if (tok == '{') {
7795 if (l != VT_CONST)
7796 tcc_error("cannot use local functions");
7797 if ((type.t & VT_BTYPE) != VT_FUNC)
7798 expect("function definition");
7800 /* reject abstract declarators in function definition
7801 make old style params without decl have int type */
7802 sym = type.ref;
7803 while ((sym = sym->next) != NULL) {
7804 if (!(sym->v & ~SYM_FIELD))
7805 expect("identifier");
7806 if (sym->type.t == VT_VOID)
7807 sym->type = int_type;
7810 /* put function symbol */
7811 type.t &= ~VT_EXTERN;
7812 sym = external_sym(v, &type, 0, &ad);
7813 /* static inline functions are just recorded as a kind
7814 of macro. Their code will be emitted at the end of
7815 the compilation unit only if they are used */
7816 if (sym->type.t & VT_INLINE) {
7817 struct InlineFunc *fn;
7818 const char *filename;
7820 filename = file ? file->filename : "";
7821 fn = tcc_malloc(sizeof *fn + strlen(filename));
7822 strcpy(fn->filename, filename);
7823 fn->sym = sym;
7824 skip_or_save_block(&fn->func_str);
7825 dynarray_add(&tcc_state->inline_fns,
7826 &tcc_state->nb_inline_fns, fn);
7827 } else {
7828 /* compute text section */
7829 cur_text_section = ad.section;
7830 if (!cur_text_section)
7831 cur_text_section = text_section;
7832 gen_function(sym, &ad);
7834 break;
7835 } else {
7836 if (l == VT_CMP) {
7837 /* find parameter in function parameter list */
7838 for (sym = func_sym->next; sym; sym = sym->next)
7839 if ((sym->v & ~SYM_FIELD) == v)
7840 goto found;
7841 tcc_error("declaration for parameter '%s' but no such parameter",
7842 get_tok_str(v, NULL));
7843 found:
7844 if (type.t & VT_STORAGE) /* 'register' is okay */
7845 tcc_error("storage class specified for '%s'",
7846 get_tok_str(v, NULL));
7847 if (sym->type.t != VT_VOID)
7848 tcc_error("redefinition of parameter '%s'",
7849 get_tok_str(v, NULL));
7850 convert_parameter_type(&type);
7851 sym->type = type;
7852 } else if (type.t & VT_TYPEDEF) {
7853 /* save typedefed type */
7854 /* XXX: test storage specifiers ? */
7855 sym = sym_find(v);
7856 if (sym && sym->sym_scope == local_scope) {
7857 if (!is_compatible_types(&sym->type, &type)
7858 || !(sym->type.t & VT_TYPEDEF))
7859 tcc_error("incompatible redefinition of '%s'",
7860 get_tok_str(v, NULL));
7861 sym->type = type;
7862 } else {
7863 sym = sym_push(v, &type, 0, 0);
7865 sym->a = ad.a;
7866 sym->f = ad.f;
7867 } else if ((type.t & VT_BTYPE) == VT_VOID
7868 && !(type.t & VT_EXTERN)) {
7869 tcc_error("declaration of void object");
7870 } else {
7871 r = 0;
7872 if ((type.t & VT_BTYPE) == VT_FUNC) {
7873 /* external function definition */
7874 /* specific case for func_call attribute */
7875 type.ref->f = ad.f;
7876 } else if (!(type.t & VT_ARRAY)) {
7877 /* not lvalue if array */
7878 r |= lvalue_type(type.t);
7880 has_init = (tok == '=');
7881 if (has_init && (type.t & VT_VLA))
7882 tcc_error("variable length array cannot be initialized");
7883 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7884 || (type.t & VT_BTYPE) == VT_FUNC
7885 /* as with GCC, uninitialized global arrays with no size
7886 are considered extern: */
7887 || ((type.t & VT_ARRAY) && !has_init
7888 && l == VT_CONST && type.ref->c < 0)
7890 /* external variable or function */
7891 type.t |= VT_EXTERN;
7892 sym = external_sym(v, &type, r, &ad);
7893 if (ad.alias_target) {
7894 ElfSym *esym;
7895 Sym *alias_target;
7896 alias_target = sym_find(ad.alias_target);
7897 esym = elfsym(alias_target);
7898 if (!esym)
7899 tcc_error("unsupported forward __alias__ attribute");
7900 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7902 } else {
7903 if (type.t & VT_STATIC)
7904 r |= VT_CONST;
7905 else
7906 r |= l;
7907 if (has_init)
7908 next();
7909 else if (l == VT_CONST)
7910 /* uninitialized global variables may be overridden */
7911 type.t |= VT_EXTERN;
7912 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7915 if (tok != ',') {
7916 if (is_for_loop_init)
7917 return 1;
7918 skip(';');
7919 break;
7921 next();
7925 return 0;
7928 static void decl(int l)
7930 decl0(l, 0, NULL);
7933 /* ------------------------------------------------------------------------- */
7934 #undef gjmp_addr
7935 #undef gjmp
7936 /* ------------------------------------------------------------------------- */