riscv: rewrite parameter passing
[tinycc.git] / tccgen.c
blob14c7b0146cc6092f527b3a0f8a4fcc5b8639c220
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
880 else if (vtop->r == VT_CONST)
881 vset_VT_CMP(vtop->c.i != 0);
882 else
883 tcc_error("ICE");
885 p = inv ? &vtop->jfalse : &vtop->jtrue;
886 *p = gjmp_append(*p, t);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv, int t)
894 int op, u, x;
896 gvtst_set(inv, t);
898 t = vtop->jtrue, u = vtop->jfalse;
899 if (inv)
900 x = u, u = t, t = x;
901 op = vtop->cmp_op;
903 /* jump to the wanted target */
904 if (op > 1)
905 t = gjmp_cond(op ^ inv, t);
906 else if (op != inv)
907 t = gjmp(t);
908 /* resolve complementary jumps to here */
909 gsym(u);
911 vtop--;
912 return t;
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType *type, Sym *sym)
919 CValue cval;
920 cval.i = 0;
921 vsetc(type, VT_CONST | VT_SYM, &cval);
922 vtop->sym = sym;
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
928 int v;
929 Sym *sym;
931 v = anon_sym++;
932 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
933 sym->type.t |= VT_STATIC;
934 put_extern_sym(sym, sec, offset, size);
935 return sym;
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
941 vpushsym(type, get_sym_ref(type, sec, offset, size));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym *external_global_sym(int v, CType *type)
947 Sym *s;
949 s = sym_find(v);
950 if (!s) {
951 /* push forward reference */
952 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
953 s->type.ref = type->ref;
954 } else if (IS_ASM_SYM(s)) {
955 s->type.t = type->t | (s->type.t & VT_EXTERN);
956 s->type.ref = type->ref;
957 update_storage(s);
959 return s;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
965 if (sa1->aligned && !sa->aligned)
966 sa->aligned = sa1->aligned;
967 sa->packed |= sa1->packed;
968 sa->weak |= sa1->weak;
969 if (sa1->visibility != STV_DEFAULT) {
970 int vis = sa->visibility;
971 if (vis == STV_DEFAULT
972 || vis > sa1->visibility)
973 vis = sa1->visibility;
974 sa->visibility = vis;
976 sa->dllexport |= sa1->dllexport;
977 sa->nodecorate |= sa1->nodecorate;
978 sa->dllimport |= sa1->dllimport;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
984 if (fa1->func_call && !fa->func_call)
985 fa->func_call = fa1->func_call;
986 if (fa1->func_type && !fa->func_type)
987 fa->func_type = fa1->func_type;
988 if (fa1->func_args && !fa->func_args)
989 fa->func_args = fa1->func_args;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
995 merge_symattr(&ad->a, &ad1->a);
996 merge_funcattr(&ad->f, &ad1->f);
998 if (ad1->section)
999 ad->section = ad1->section;
1000 if (ad1->alias_target)
1001 ad->alias_target = ad1->alias_target;
1002 if (ad1->asm_label)
1003 ad->asm_label = ad1->asm_label;
1004 if (ad1->attr_mode)
1005 ad->attr_mode = ad1->attr_mode;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym *sym, CType *type)
1011 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1012 if (!(sym->type.t & VT_EXTERN))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1014 sym->type.t &= ~VT_EXTERN;
1017 if (IS_ASM_SYM(sym)) {
1018 /* stay static if both are static */
1019 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1020 sym->type.ref = type->ref;
1023 if (!is_compatible_types(&sym->type, type)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym->v, NULL));
1027 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1028 int static_proto = sym->type.t & VT_STATIC;
1029 /* warn if static follows non-static function declaration */
1030 if ((type->t & VT_STATIC) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type->t | sym->type.t) & VT_INLINE))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym->v, NULL));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type->t | sym->type.t) & VT_INLINE) {
1040 if (!((type->t ^ sym->type.t) & VT_INLINE)
1041 || ((type->t | sym->type.t) & VT_STATIC))
1042 static_proto |= VT_INLINE;
1045 if (0 == (type->t & VT_EXTERN)) {
1046 /* put complete type, use static from prototype */
1047 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1048 sym->type.ref = type->ref;
1049 } else {
1050 sym->type.t &= ~VT_INLINE | static_proto;
1053 if (sym->type.ref->f.func_type == FUNC_OLD
1054 && type->ref->f.func_type != FUNC_OLD) {
1055 sym->type.ref = type->ref;
1058 } else {
1059 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym->type.ref->c = type->ref->c;
1063 if ((type->t ^ sym->type.t) & VT_STATIC)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym->v, NULL));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1072 if (type)
1073 patch_type(sym, type);
1075 #ifdef TCC_TARGET_PE
1076 if (sym->a.dllimport != ad->a.dllimport)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym->v, NULL));
1079 #endif
1080 merge_symattr(&sym->a, &ad->a);
1081 if (ad->asm_label)
1082 sym->asm_label = ad->asm_label;
1083 update_storage(sym);
1086 /* copy sym to other stack */
1087 static Sym *sym_copy(Sym *s0, Sym **ps)
1089 Sym *s;
1090 s = sym_malloc(), *s = *s0;
1091 s->prev = *ps, *ps = s;
1092 if (s->v < SYM_FIRST_ANOM) {
1093 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1094 s->prev_tok = *ps, *ps = s;
1096 return s;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym *s0, Sym **ps)
1102 Sym *s, **sp = &s0->type.ref;
1103 for (s = *sp, *sp = NULL; s; s = s->next)
1104 sp = &(*sp = sym_copy(s, ps))->next;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1110 Sym *s; int bt;
1112 /* look for global symbol */
1113 s = sym_find(v);
1114 while (s && s->sym_scope)
1115 s = s->prev_tok;
1117 if (!s) {
1118 /* push forward reference */
1119 s = global_identifier_push(v, type->t, 0);
1120 s->r |= r;
1121 s->a = ad->a;
1122 s->asm_label = ad->asm_label;
1123 s->type.ref = type->ref;
1124 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1125 /* copy type to the global stack also */
1126 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1127 sym_copy_ref(s, &global_stack);
1128 } else {
1129 patch_storage(s, ad, type);
1130 bt = s->type.t & VT_BTYPE;
1132 /* push variables to local scope if any */
1133 if (local_stack && bt != VT_FUNC)
1134 s = sym_copy(s, &local_stack);
1135 return s;
1138 /* push a reference to global symbol v */
1139 ST_FUNC void vpush_global_sym(CType *type, int v)
1141 vpushsym(type, external_global_sym(v, type));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC void save_regs(int n)
1147 SValue *p, *p1;
1148 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1149 save_reg(p->r);
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC void save_reg(int r)
1155 save_reg_upstack(r, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC void save_reg_upstack(int r, int n)
1162 int l, saved, size, align;
1163 SValue *p, *p1, sv;
1164 CType *type;
1166 if ((r &= VT_VALMASK) >= VT_CONST)
1167 return;
1168 if (nocode_wanted)
1169 return;
1171 /* modify all stack values */
1172 saved = 0;
1173 l = 0;
1174 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1175 if ((p->r & VT_VALMASK) == r || (p->r2 & VT_VALMASK) == r) {
1176 /* must save value on stack if not already done */
1177 if (!saved) {
1178 /* NOTE: must reload 'r' because r might be equal to r2 */
1179 r = p->r & VT_VALMASK;
1180 /* store register in the stack */
1181 type = &p->type;
1182 if ((p->r & VT_LVAL) ||
1183 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1184 #if PTR_SIZE == 8
1185 type = &char_pointer_type;
1186 #else
1187 type = &int_type;
1188 #endif
1189 size = type_size(type, &align);
1190 l=get_temp_local_var(size,align);
1191 sv.type.t = type->t;
1192 sv.r = VT_LOCAL | VT_LVAL;
1193 sv.c.i = l;
1194 store(r, &sv);
1195 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1196 /* x86 specific: need to pop fp register ST0 if saved */
1197 if (r == TREG_ST0) {
1198 o(0xd8dd); /* fstp %st(0) */
1200 #endif
1201 /* special long long case */
1202 if ((p->r2 & VT_VALMASK) < VT_CONST) {
1203 sv.c.i += PTR_SIZE;
1204 store(p->r2, &sv);
1206 saved = 1;
1208 /* mark that stack entry as being saved on the stack */
1209 if (p->r & VT_LVAL) {
1210 /* also clear the bounded flag because the
1211 relocation address of the function was stored in
1212 p->c.i */
1213 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1214 } else {
1215 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1217 p->r2 = VT_CONST;
1218 p->c.i = l;
1223 #ifdef TCC_TARGET_ARM
1224 /* find a register of class 'rc2' with at most one reference on stack.
1225 * If none, call get_reg(rc) */
1226 ST_FUNC int get_reg_ex(int rc, int rc2)
1228 int r;
1229 SValue *p;
1231 for(r=0;r<NB_REGS;r++) {
1232 if (reg_classes[r] & rc2) {
1233 int n;
1234 n=0;
1235 for(p = vstack; p <= vtop; p++) {
1236 if ((p->r & VT_VALMASK) == r ||
1237 (p->r2 & VT_VALMASK) == r)
1238 n++;
1240 if (n <= 1)
1241 return r;
1244 return get_reg(rc);
1246 #endif
1248 /* find a free register of class 'rc'. If none, save one register */
1249 ST_FUNC int get_reg(int rc)
1251 int r;
1252 SValue *p;
1254 /* find a free register */
1255 for(r=0;r<NB_REGS;r++) {
1256 if (reg_classes[r] & rc) {
1257 if (nocode_wanted)
1258 return r;
1259 for(p=vstack;p<=vtop;p++) {
1260 if ((p->r & VT_VALMASK) == r ||
1261 (p->r2 & VT_VALMASK) == r)
1262 goto notfound;
1264 return r;
1266 notfound: ;
1269 /* no register left : free the first one on the stack (VERY
1270 IMPORTANT to start from the bottom to ensure that we don't
1271 spill registers used in gen_opi()) */
1272 for(p=vstack;p<=vtop;p++) {
1273 /* look at second register (if long long) */
1274 r = p->r2 & VT_VALMASK;
1275 if (r < VT_CONST && (reg_classes[r] & rc))
1276 goto save_found;
1277 r = p->r & VT_VALMASK;
1278 if (r < VT_CONST && (reg_classes[r] & rc)) {
1279 save_found:
1280 save_reg(r);
1281 return r;
1284 /* Should never comes here */
1285 return -1;
1288 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1289 static int get_temp_local_var(int size,int align){
1290 int i;
1291 struct temp_local_variable *temp_var;
1292 int found_var;
1293 SValue *p;
1294 int r;
1295 char free;
1296 char found;
1297 found=0;
1298 for(i=0;i<nb_temp_local_vars;i++){
1299 temp_var=&arr_temp_local_vars[i];
1300 if(temp_var->size<size||align!=temp_var->align){
1301 continue;
1303 /*check if temp_var is free*/
1304 free=1;
1305 for(p=vstack;p<=vtop;p++) {
1306 r=p->r&VT_VALMASK;
1307 if(r==VT_LOCAL||r==VT_LLOCAL){
1308 if(p->c.i==temp_var->location){
1309 free=0;
1310 break;
1314 if(free){
1315 found_var=temp_var->location;
1316 found=1;
1317 break;
1320 if(!found){
1321 loc = (loc - size) & -align;
1322 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1323 temp_var=&arr_temp_local_vars[i];
1324 temp_var->location=loc;
1325 temp_var->size=size;
1326 temp_var->align=align;
1327 nb_temp_local_vars++;
1329 found_var=loc;
1331 return found_var;
1334 static void clear_temp_local_var_list(){
1335 nb_temp_local_vars=0;
1338 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1339 if needed */
1340 static void move_reg(int r, int s, int t)
1342 SValue sv;
1344 if (r != s) {
1345 save_reg(r);
1346 sv.type.t = t;
1347 sv.type.ref = NULL;
1348 sv.r = s;
1349 sv.c.i = 0;
1350 load(r, &sv);
1354 /* get address of vtop (vtop MUST BE an lvalue) */
1355 ST_FUNC void gaddrof(void)
1357 vtop->r &= ~VT_LVAL;
1358 /* tricky: if saved lvalue, then we can go back to lvalue */
1359 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1360 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1365 #ifdef CONFIG_TCC_BCHECK
1366 /* generate lvalue bound code */
1367 static void gbound(void)
1369 int lval_type;
1370 CType type1;
1372 vtop->r &= ~VT_MUSTBOUND;
1373 /* if lvalue, then use checking code before dereferencing */
1374 if (vtop->r & VT_LVAL) {
1375 /* if not VT_BOUNDED value, then make one */
1376 if (!(vtop->r & VT_BOUNDED)) {
1377 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1378 /* must save type because we must set it to int to get pointer */
1379 type1 = vtop->type;
1380 vtop->type.t = VT_PTR;
1381 gaddrof();
1382 vpushi(0);
1383 gen_bounded_ptr_add();
1384 vtop->r |= lval_type;
1385 vtop->type = type1;
1387 /* then check for dereferencing */
1388 gen_bounded_ptr_deref();
1391 #endif
1393 static void incr_bf_adr(int o)
1395 vtop->type = char_pointer_type;
1396 gaddrof();
1397 vpushi(o);
1398 gen_op('+');
1399 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1400 | (VT_BYTE|VT_UNSIGNED);
1401 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1402 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1405 /* single-byte load mode for packed or otherwise unaligned bitfields */
1406 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1408 int n, o, bits;
1409 save_reg_upstack(vtop->r, 1);
1410 vpush64(type->t & VT_BTYPE, 0); // B X
1411 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1412 do {
1413 vswap(); // X B
1414 incr_bf_adr(o);
1415 vdup(); // X B B
1416 n = 8 - bit_pos;
1417 if (n > bit_size)
1418 n = bit_size;
1419 if (bit_pos)
1420 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1421 if (n < 8)
1422 vpushi((1 << n) - 1), gen_op('&');
1423 gen_cast(type);
1424 if (bits)
1425 vpushi(bits), gen_op(TOK_SHL);
1426 vrotb(3); // B Y X
1427 gen_op('|'); // B X
1428 bits += n, bit_size -= n, o = 1;
1429 } while (bit_size);
1430 vswap(), vpop();
1431 if (!(type->t & VT_UNSIGNED)) {
1432 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1433 vpushi(n), gen_op(TOK_SHL);
1434 vpushi(n), gen_op(TOK_SAR);
1438 /* single-byte store mode for packed or otherwise unaligned bitfields */
1439 static void store_packed_bf(int bit_pos, int bit_size)
1441 int bits, n, o, m, c;
1443 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1444 vswap(); // X B
1445 save_reg_upstack(vtop->r, 1);
1446 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1447 do {
1448 incr_bf_adr(o); // X B
1449 vswap(); //B X
1450 c ? vdup() : gv_dup(); // B V X
1451 vrott(3); // X B V
1452 if (bits)
1453 vpushi(bits), gen_op(TOK_SHR);
1454 if (bit_pos)
1455 vpushi(bit_pos), gen_op(TOK_SHL);
1456 n = 8 - bit_pos;
1457 if (n > bit_size)
1458 n = bit_size;
1459 if (n < 8) {
1460 m = ((1 << n) - 1) << bit_pos;
1461 vpushi(m), gen_op('&'); // X B V1
1462 vpushv(vtop-1); // X B V1 B
1463 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1464 gen_op('&'); // X B V1 B1
1465 gen_op('|'); // X B V2
1467 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1468 vstore(), vpop(); // X B
1469 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1470 } while (bit_size);
1471 vpop(), vpop();
1474 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1476 int t;
1477 if (0 == sv->type.ref)
1478 return 0;
1479 t = sv->type.ref->auxtype;
1480 if (t != -1 && t != VT_STRUCT) {
1481 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1482 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1484 return t;
1487 /* store vtop a register belonging to class 'rc'. lvalues are
1488 converted to values. Cannot be used if cannot be converted to
1489 register value (such as structures). */
1490 ST_FUNC int gv(int rc)
1492 int r, bit_pos, bit_size, size, align, rc2;
1494 /* NOTE: get_reg can modify vstack[] */
1495 if (vtop->type.t & VT_BITFIELD) {
1496 CType type;
1498 bit_pos = BIT_POS(vtop->type.t);
1499 bit_size = BIT_SIZE(vtop->type.t);
1500 /* remove bit field info to avoid loops */
1501 vtop->type.t &= ~VT_STRUCT_MASK;
1503 type.ref = NULL;
1504 type.t = vtop->type.t & VT_UNSIGNED;
1505 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1506 type.t |= VT_UNSIGNED;
1508 r = adjust_bf(vtop, bit_pos, bit_size);
1510 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1511 type.t |= VT_LLONG;
1512 else
1513 type.t |= VT_INT;
1515 if (r == VT_STRUCT) {
1516 load_packed_bf(&type, bit_pos, bit_size);
1517 } else {
1518 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1519 /* cast to int to propagate signedness in following ops */
1520 gen_cast(&type);
1521 /* generate shifts */
1522 vpushi(bits - (bit_pos + bit_size));
1523 gen_op(TOK_SHL);
1524 vpushi(bits - bit_size);
1525 /* NOTE: transformed to SHR if unsigned */
1526 gen_op(TOK_SAR);
1528 r = gv(rc);
1529 } else {
1530 if (is_float(vtop->type.t) &&
1531 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1532 unsigned long offset;
1533 /* CPUs usually cannot use float constants, so we store them
1534 generically in data segment */
1535 size = type_size(&vtop->type, &align);
1536 if (NODATA_WANTED)
1537 size = 0, align = 1;
1538 offset = section_add(data_section, size, align);
1539 vpush_ref(&vtop->type, data_section, offset, size);
1540 vswap();
1541 init_putv(&vtop->type, data_section, offset);
1542 vtop->r |= VT_LVAL;
1544 #ifdef CONFIG_TCC_BCHECK
1545 if (vtop->r & VT_MUSTBOUND)
1546 gbound();
1547 #endif
1548 #ifdef TCC_TARGET_RISCV64
1549 /* XXX mega hack */
1550 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && rc == RC_FLOAT)
1551 rc = RC_INT;
1552 #endif
1554 r = vtop->r & VT_VALMASK;
1555 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1556 #ifndef TCC_TARGET_ARM64
1557 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1558 if (rc == RC_IRET)
1559 rc2 = RC_LRET;
1560 #ifdef TCC_TARGET_X86_64
1561 else if (rc == RC_FRET)
1562 rc2 = RC_QRET;
1563 #endif
1564 #endif
1565 #endif
1566 /* need to reload if:
1567 - constant
1568 - lvalue (need to dereference pointer)
1569 - already a register, but not in the right class */
1570 if (r >= VT_CONST
1571 || (vtop->r & VT_LVAL)
1572 || !(reg_classes[r] & rc)
1573 #ifdef TCC_TARGET_RISCV64
1574 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1575 || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE && (vtop->r2 >= NB_REGS || !(reg_classes[vtop->r2] & rc2)))
1576 #elif PTR_SIZE == 8
1577 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1578 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1579 #else
1580 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1581 #endif
1584 r = get_reg(rc);
1585 #ifdef TCC_TARGET_RISCV64
1586 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)) {
1587 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
1588 #elif PTR_SIZE == 8
1589 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1590 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1591 #else
1592 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1593 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1594 unsigned long long ll;
1595 #endif
1596 int r2, original_type;
1597 original_type = vtop->type.t;
1598 /* two register type load : expand to two words
1599 temporarily */
1600 #if PTR_SIZE == 4
1601 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1602 /* load constant */
1603 ll = vtop->c.i;
1604 vtop->c.i = ll; /* first word */
1605 load(r, vtop);
1606 vtop->r = r; /* save register value */
1607 vpushi(ll >> 32); /* second word */
1608 } else
1609 #endif
1610 if (vtop->r & VT_LVAL) {
1611 /* We do not want to modifier the long long
1612 pointer here, so the safest (and less
1613 efficient) is to save all the other registers
1614 in the stack. XXX: totally inefficient. */
1615 #if 0
1616 save_regs(1);
1617 #else
1618 /* lvalue_save: save only if used further down the stack */
1619 save_reg_upstack(vtop->r, 1);
1620 #endif
1621 /* load from memory */
1622 vtop->type.t = load_type;
1623 load(r, vtop);
1624 vdup();
1625 vtop[-1].r = r; /* save register value */
1626 /* increment pointer to get second word */
1627 vtop->type.t = addr_type;
1628 gaddrof();
1629 vpushi(load_size);
1630 gen_op('+');
1631 vtop->r |= VT_LVAL;
1632 vtop->type.t = load_type;
1633 } else {
1634 /* move registers */
1635 load(r, vtop);
1636 vdup();
1637 vtop[-1].r = r; /* save register value */
1638 vtop->r = vtop[-1].r2;
1640 /* Allocate second register. Here we rely on the fact that
1641 get_reg() tries first to free r2 of an SValue. */
1642 r2 = get_reg(rc2);
1643 load(r2, vtop);
1644 vpop();
1645 /* write second register */
1646 vtop->r2 = r2;
1647 vtop->type.t = original_type;
1648 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1649 int t1, t;
1650 /* lvalue of scalar type : need to use lvalue type
1651 because of possible cast */
1652 t = vtop->type.t;
1653 t1 = t;
1654 /* compute memory access type */
1655 if (vtop->r & VT_LVAL_BYTE)
1656 t = VT_BYTE;
1657 else if (vtop->r & VT_LVAL_SHORT)
1658 t = VT_SHORT;
1659 if (vtop->r & VT_LVAL_UNSIGNED)
1660 t |= VT_UNSIGNED;
1661 vtop->type.t = t;
1662 load(r, vtop);
1663 /* restore wanted type */
1664 vtop->type.t = t1;
1665 } else {
1666 if (vtop->r == VT_CMP)
1667 vset_VT_JMP();
1668 /* one register type load */
1669 load(r, vtop);
1672 vtop->r = r;
1673 #ifdef TCC_TARGET_C67
1674 /* uses register pairs for doubles */
1675 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1676 vtop->r2 = r+1;
1677 #endif
1679 return r;
1682 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1683 ST_FUNC void gv2(int rc1, int rc2)
1685 /* generate more generic register first. But VT_JMP or VT_CMP
1686 values must be generated first in all cases to avoid possible
1687 reload errors */
1688 if (vtop->r != VT_CMP && rc1 <= rc2) {
1689 vswap();
1690 gv(rc1);
1691 vswap();
1692 gv(rc2);
1693 /* test if reload is needed for first register */
1694 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1695 vswap();
1696 gv(rc1);
1697 vswap();
1699 } else {
1700 gv(rc2);
1701 vswap();
1702 gv(rc1);
1703 vswap();
1704 /* test if reload is needed for first register */
1705 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1706 gv(rc2);
1711 #ifndef TCC_TARGET_ARM64
1712 /* wrapper around RC_FRET to return a register by type */
1713 static int rc_fret(int t)
1715 #ifdef TCC_TARGET_X86_64
1716 if (t == VT_LDOUBLE) {
1717 return RC_ST0;
1719 #elif defined TCC_TARGET_RISCV64
1720 if (t == VT_LDOUBLE)
1721 return RC_IRET;
1722 #endif
1723 return RC_FRET;
1725 #endif
1727 /* wrapper around REG_FRET to return a register by type */
1728 static int reg_fret(int t)
1730 #ifdef TCC_TARGET_X86_64
1731 if (t == VT_LDOUBLE) {
1732 return TREG_ST0;
1734 #elif defined TCC_TARGET_RISCV64
1735 if (t == VT_LDOUBLE)
1736 return REG_IRET;
1737 #endif
1738 return REG_FRET;
1741 #if PTR_SIZE == 4
1742 /* expand 64bit on stack in two ints */
1743 ST_FUNC void lexpand(void)
1745 int u, v;
1746 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1747 v = vtop->r & (VT_VALMASK | VT_LVAL);
1748 if (v == VT_CONST) {
1749 vdup();
1750 vtop[0].c.i >>= 32;
1751 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1752 vdup();
1753 vtop[0].c.i += 4;
1754 } else {
1755 gv(RC_INT);
1756 vdup();
1757 vtop[0].r = vtop[-1].r2;
1758 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1760 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1762 #endif
1764 #if PTR_SIZE == 4
1765 /* build a long long from two ints */
1766 static void lbuild(int t)
1768 gv2(RC_INT, RC_INT);
1769 vtop[-1].r2 = vtop[0].r;
1770 vtop[-1].type.t = t;
1771 vpop();
1773 #endif
1775 /* convert stack entry to register and duplicate its value in another
1776 register */
1777 static void gv_dup(void)
1779 int rc, t, r, r1;
1780 SValue sv;
1782 t = vtop->type.t;
1783 #if PTR_SIZE == 4
1784 if ((t & VT_BTYPE) == VT_LLONG) {
1785 if (t & VT_BITFIELD) {
1786 gv(RC_INT);
1787 t = vtop->type.t;
1789 lexpand();
1790 gv_dup();
1791 vswap();
1792 vrotb(3);
1793 gv_dup();
1794 vrotb(4);
1795 /* stack: H L L1 H1 */
1796 lbuild(t);
1797 vrotb(3);
1798 vrotb(3);
1799 vswap();
1800 lbuild(t);
1801 vswap();
1802 } else
1803 #endif
1805 /* duplicate value */
1806 rc = RC_INT;
1807 sv.type.t = VT_INT;
1808 if (is_float(t)) {
1809 rc = RC_FLOAT;
1810 #ifdef TCC_TARGET_X86_64
1811 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1812 rc = RC_ST0;
1814 #elif defined TCC_TARGET_RISCV64
1815 if ((t & VT_BTYPE) == VT_LDOUBLE)
1816 rc = RC_INT;
1817 #endif
1818 sv.type.t = t;
1820 r = gv(rc);
1821 r1 = get_reg(rc);
1822 sv.r = r;
1823 sv.c.i = 0;
1824 load(r1, &sv); /* move r to r1 */
1825 vdup();
1826 /* duplicates value */
1827 if (r != r1)
1828 vtop->r = r1;
1832 #if PTR_SIZE == 4
1833 /* generate CPU independent (unsigned) long long operations */
1834 static void gen_opl(int op)
1836 int t, a, b, op1, c, i;
1837 int func;
1838 unsigned short reg_iret = REG_IRET;
1839 unsigned short reg_lret = REG_LRET;
1840 SValue tmp;
1842 switch(op) {
1843 case '/':
1844 case TOK_PDIV:
1845 func = TOK___divdi3;
1846 goto gen_func;
1847 case TOK_UDIV:
1848 func = TOK___udivdi3;
1849 goto gen_func;
1850 case '%':
1851 func = TOK___moddi3;
1852 goto gen_mod_func;
1853 case TOK_UMOD:
1854 func = TOK___umoddi3;
1855 gen_mod_func:
1856 #ifdef TCC_ARM_EABI
1857 reg_iret = TREG_R2;
1858 reg_lret = TREG_R3;
1859 #endif
1860 gen_func:
1861 /* call generic long long function */
1862 vpush_global_sym(&func_old_type, func);
1863 vrott(3);
1864 gfunc_call(2);
1865 vpushi(0);
1866 vtop->r = reg_iret;
1867 vtop->r2 = reg_lret;
1868 break;
1869 case '^':
1870 case '&':
1871 case '|':
1872 case '*':
1873 case '+':
1874 case '-':
1875 //pv("gen_opl A",0,2);
1876 t = vtop->type.t;
1877 vswap();
1878 lexpand();
1879 vrotb(3);
1880 lexpand();
1881 /* stack: L1 H1 L2 H2 */
1882 tmp = vtop[0];
1883 vtop[0] = vtop[-3];
1884 vtop[-3] = tmp;
1885 tmp = vtop[-2];
1886 vtop[-2] = vtop[-3];
1887 vtop[-3] = tmp;
1888 vswap();
1889 /* stack: H1 H2 L1 L2 */
1890 //pv("gen_opl B",0,4);
1891 if (op == '*') {
1892 vpushv(vtop - 1);
1893 vpushv(vtop - 1);
1894 gen_op(TOK_UMULL);
1895 lexpand();
1896 /* stack: H1 H2 L1 L2 ML MH */
1897 for(i=0;i<4;i++)
1898 vrotb(6);
1899 /* stack: ML MH H1 H2 L1 L2 */
1900 tmp = vtop[0];
1901 vtop[0] = vtop[-2];
1902 vtop[-2] = tmp;
1903 /* stack: ML MH H1 L2 H2 L1 */
1904 gen_op('*');
1905 vrotb(3);
1906 vrotb(3);
1907 gen_op('*');
1908 /* stack: ML MH M1 M2 */
1909 gen_op('+');
1910 gen_op('+');
1911 } else if (op == '+' || op == '-') {
1912 /* XXX: add non carry method too (for MIPS or alpha) */
1913 if (op == '+')
1914 op1 = TOK_ADDC1;
1915 else
1916 op1 = TOK_SUBC1;
1917 gen_op(op1);
1918 /* stack: H1 H2 (L1 op L2) */
1919 vrotb(3);
1920 vrotb(3);
1921 gen_op(op1 + 1); /* TOK_xxxC2 */
1922 } else {
1923 gen_op(op);
1924 /* stack: H1 H2 (L1 op L2) */
1925 vrotb(3);
1926 vrotb(3);
1927 /* stack: (L1 op L2) H1 H2 */
1928 gen_op(op);
1929 /* stack: (L1 op L2) (H1 op H2) */
1931 /* stack: L H */
1932 lbuild(t);
1933 break;
1934 case TOK_SAR:
1935 case TOK_SHR:
1936 case TOK_SHL:
1937 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1938 t = vtop[-1].type.t;
1939 vswap();
1940 lexpand();
1941 vrotb(3);
1942 /* stack: L H shift */
1943 c = (int)vtop->c.i;
1944 /* constant: simpler */
1945 /* NOTE: all comments are for SHL. the other cases are
1946 done by swapping words */
1947 vpop();
1948 if (op != TOK_SHL)
1949 vswap();
1950 if (c >= 32) {
1951 /* stack: L H */
1952 vpop();
1953 if (c > 32) {
1954 vpushi(c - 32);
1955 gen_op(op);
1957 if (op != TOK_SAR) {
1958 vpushi(0);
1959 } else {
1960 gv_dup();
1961 vpushi(31);
1962 gen_op(TOK_SAR);
1964 vswap();
1965 } else {
1966 vswap();
1967 gv_dup();
1968 /* stack: H L L */
1969 vpushi(c);
1970 gen_op(op);
1971 vswap();
1972 vpushi(32 - c);
1973 if (op == TOK_SHL)
1974 gen_op(TOK_SHR);
1975 else
1976 gen_op(TOK_SHL);
1977 vrotb(3);
1978 /* stack: L L H */
1979 vpushi(c);
1980 if (op == TOK_SHL)
1981 gen_op(TOK_SHL);
1982 else
1983 gen_op(TOK_SHR);
1984 gen_op('|');
1986 if (op != TOK_SHL)
1987 vswap();
1988 lbuild(t);
1989 } else {
1990 /* XXX: should provide a faster fallback on x86 ? */
1991 switch(op) {
1992 case TOK_SAR:
1993 func = TOK___ashrdi3;
1994 goto gen_func;
1995 case TOK_SHR:
1996 func = TOK___lshrdi3;
1997 goto gen_func;
1998 case TOK_SHL:
1999 func = TOK___ashldi3;
2000 goto gen_func;
2003 break;
2004 default:
2005 /* compare operations */
2006 t = vtop->type.t;
2007 vswap();
2008 lexpand();
2009 vrotb(3);
2010 lexpand();
2011 /* stack: L1 H1 L2 H2 */
2012 tmp = vtop[-1];
2013 vtop[-1] = vtop[-2];
2014 vtop[-2] = tmp;
2015 /* stack: L1 L2 H1 H2 */
2016 save_regs(4);
2017 /* compare high */
2018 op1 = op;
2019 /* when values are equal, we need to compare low words. since
2020 the jump is inverted, we invert the test too. */
2021 if (op1 == TOK_LT)
2022 op1 = TOK_LE;
2023 else if (op1 == TOK_GT)
2024 op1 = TOK_GE;
2025 else if (op1 == TOK_ULT)
2026 op1 = TOK_ULE;
2027 else if (op1 == TOK_UGT)
2028 op1 = TOK_UGE;
2029 a = 0;
2030 b = 0;
2031 gen_op(op1);
2032 if (op == TOK_NE) {
2033 b = gvtst(0, 0);
2034 } else {
2035 a = gvtst(1, 0);
2036 if (op != TOK_EQ) {
2037 /* generate non equal test */
2038 vpushi(0);
2039 vset_VT_CMP(TOK_NE);
2040 b = gvtst(0, 0);
2043 /* compare low. Always unsigned */
2044 op1 = op;
2045 if (op1 == TOK_LT)
2046 op1 = TOK_ULT;
2047 else if (op1 == TOK_LE)
2048 op1 = TOK_ULE;
2049 else if (op1 == TOK_GT)
2050 op1 = TOK_UGT;
2051 else if (op1 == TOK_GE)
2052 op1 = TOK_UGE;
2053 gen_op(op1);
2054 #if 0//def TCC_TARGET_I386
2055 if (op == TOK_NE) { gsym(b); break; }
2056 if (op == TOK_EQ) { gsym(a); break; }
2057 #endif
2058 gvtst_set(1, a);
2059 gvtst_set(0, b);
2060 break;
2063 #endif
2065 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2067 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2068 return (a ^ b) >> 63 ? -x : x;
2071 static int gen_opic_lt(uint64_t a, uint64_t b)
2073 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2076 /* handle integer constant optimizations and various machine
2077 independent opt */
2078 static void gen_opic(int op)
2080 SValue *v1 = vtop - 1;
2081 SValue *v2 = vtop;
2082 int t1 = v1->type.t & VT_BTYPE;
2083 int t2 = v2->type.t & VT_BTYPE;
2084 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2085 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2086 uint64_t l1 = c1 ? v1->c.i : 0;
2087 uint64_t l2 = c2 ? v2->c.i : 0;
2088 int shm = (t1 == VT_LLONG) ? 63 : 31;
2090 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2091 l1 = ((uint32_t)l1 |
2092 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2093 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2094 l2 = ((uint32_t)l2 |
2095 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2097 if (c1 && c2) {
2098 switch(op) {
2099 case '+': l1 += l2; break;
2100 case '-': l1 -= l2; break;
2101 case '&': l1 &= l2; break;
2102 case '^': l1 ^= l2; break;
2103 case '|': l1 |= l2; break;
2104 case '*': l1 *= l2; break;
2106 case TOK_PDIV:
2107 case '/':
2108 case '%':
2109 case TOK_UDIV:
2110 case TOK_UMOD:
2111 /* if division by zero, generate explicit division */
2112 if (l2 == 0) {
2113 if (const_wanted)
2114 tcc_error("division by zero in constant");
2115 goto general_case;
2117 switch(op) {
2118 default: l1 = gen_opic_sdiv(l1, l2); break;
2119 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2120 case TOK_UDIV: l1 = l1 / l2; break;
2121 case TOK_UMOD: l1 = l1 % l2; break;
2123 break;
2124 case TOK_SHL: l1 <<= (l2 & shm); break;
2125 case TOK_SHR: l1 >>= (l2 & shm); break;
2126 case TOK_SAR:
2127 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2128 break;
2129 /* tests */
2130 case TOK_ULT: l1 = l1 < l2; break;
2131 case TOK_UGE: l1 = l1 >= l2; break;
2132 case TOK_EQ: l1 = l1 == l2; break;
2133 case TOK_NE: l1 = l1 != l2; break;
2134 case TOK_ULE: l1 = l1 <= l2; break;
2135 case TOK_UGT: l1 = l1 > l2; break;
2136 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2137 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2138 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2139 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2140 /* logical */
2141 case TOK_LAND: l1 = l1 && l2; break;
2142 case TOK_LOR: l1 = l1 || l2; break;
2143 default:
2144 goto general_case;
2146 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2147 l1 = ((uint32_t)l1 |
2148 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2149 v1->c.i = l1;
2150 vtop--;
2151 } else {
2152 /* if commutative ops, put c2 as constant */
2153 if (c1 && (op == '+' || op == '&' || op == '^' ||
2154 op == '|' || op == '*')) {
2155 vswap();
2156 c2 = c1; //c = c1, c1 = c2, c2 = c;
2157 l2 = l1; //l = l1, l1 = l2, l2 = l;
2159 if (!const_wanted &&
2160 c1 && ((l1 == 0 &&
2161 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2162 (l1 == -1 && op == TOK_SAR))) {
2163 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2164 vtop--;
2165 } else if (!const_wanted &&
2166 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2167 (op == '|' &&
2168 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2169 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2170 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2171 if (l2 == 1)
2172 vtop->c.i = 0;
2173 vswap();
2174 vtop--;
2175 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2176 op == TOK_PDIV) &&
2177 l2 == 1) ||
2178 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2179 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2180 l2 == 0) ||
2181 (op == '&' &&
2182 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2183 /* filter out NOP operations like x*1, x-0, x&-1... */
2184 vtop--;
2185 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2186 /* try to use shifts instead of muls or divs */
2187 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2188 int n = -1;
2189 while (l2) {
2190 l2 >>= 1;
2191 n++;
2193 vtop->c.i = n;
2194 if (op == '*')
2195 op = TOK_SHL;
2196 else if (op == TOK_PDIV)
2197 op = TOK_SAR;
2198 else
2199 op = TOK_SHR;
2201 goto general_case;
2202 } else if (c2 && (op == '+' || op == '-') &&
2203 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2204 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2205 /* symbol + constant case */
2206 if (op == '-')
2207 l2 = -l2;
2208 l2 += vtop[-1].c.i;
2209 /* The backends can't always deal with addends to symbols
2210 larger than +-1<<31. Don't construct such. */
2211 if ((int)l2 != l2)
2212 goto general_case;
2213 vtop--;
2214 vtop->c.i = l2;
2215 } else {
2216 general_case:
2217 /* call low level op generator */
2218 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2219 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2220 gen_opl(op);
2221 else
2222 gen_opi(op);
2227 /* generate a floating point operation with constant propagation */
2228 static void gen_opif(int op)
2230 int c1, c2;
2231 SValue *v1, *v2;
2232 #if defined _MSC_VER && defined __x86_64__
2233 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2234 volatile
2235 #endif
2236 long double f1, f2;
2238 v1 = vtop - 1;
2239 v2 = vtop;
2240 /* currently, we cannot do computations with forward symbols */
2241 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2242 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2243 if (c1 && c2) {
2244 if (v1->type.t == VT_FLOAT) {
2245 f1 = v1->c.f;
2246 f2 = v2->c.f;
2247 } else if (v1->type.t == VT_DOUBLE) {
2248 f1 = v1->c.d;
2249 f2 = v2->c.d;
2250 } else {
2251 f1 = v1->c.ld;
2252 f2 = v2->c.ld;
2255 /* NOTE: we only do constant propagation if finite number (not
2256 NaN or infinity) (ANSI spec) */
2257 if (!ieee_finite(f1) || !ieee_finite(f2))
2258 goto general_case;
2260 switch(op) {
2261 case '+': f1 += f2; break;
2262 case '-': f1 -= f2; break;
2263 case '*': f1 *= f2; break;
2264 case '/':
2265 if (f2 == 0.0) {
2266 /* If not in initializer we need to potentially generate
2267 FP exceptions at runtime, otherwise we want to fold. */
2268 if (!const_wanted)
2269 goto general_case;
2271 f1 /= f2;
2272 break;
2273 /* XXX: also handles tests ? */
2274 default:
2275 goto general_case;
2277 /* XXX: overflow test ? */
2278 if (v1->type.t == VT_FLOAT) {
2279 v1->c.f = f1;
2280 } else if (v1->type.t == VT_DOUBLE) {
2281 v1->c.d = f1;
2282 } else {
2283 v1->c.ld = f1;
2285 vtop--;
2286 } else {
2287 general_case:
2288 gen_opf(op);
2292 static int pointed_size(CType *type)
2294 int align;
2295 return type_size(pointed_type(type), &align);
2298 static void vla_runtime_pointed_size(CType *type)
2300 int align;
2301 vla_runtime_type_size(pointed_type(type), &align);
2304 static inline int is_null_pointer(SValue *p)
2306 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2307 return 0;
2308 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2309 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2310 ((p->type.t & VT_BTYPE) == VT_PTR &&
2311 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2312 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2313 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2316 static inline int is_integer_btype(int bt)
2318 return (bt == VT_BYTE || bt == VT_SHORT ||
2319 bt == VT_INT || bt == VT_LLONG);
2322 /* check types for comparison or subtraction of pointers */
2323 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2325 CType *type1, *type2, tmp_type1, tmp_type2;
2326 int bt1, bt2;
2328 /* null pointers are accepted for all comparisons as gcc */
2329 if (is_null_pointer(p1) || is_null_pointer(p2))
2330 return;
2331 type1 = &p1->type;
2332 type2 = &p2->type;
2333 bt1 = type1->t & VT_BTYPE;
2334 bt2 = type2->t & VT_BTYPE;
2335 /* accept comparison between pointer and integer with a warning */
2336 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2337 if (op != TOK_LOR && op != TOK_LAND )
2338 tcc_warning("comparison between pointer and integer");
2339 return;
2342 /* both must be pointers or implicit function pointers */
2343 if (bt1 == VT_PTR) {
2344 type1 = pointed_type(type1);
2345 } else if (bt1 != VT_FUNC)
2346 goto invalid_operands;
2348 if (bt2 == VT_PTR) {
2349 type2 = pointed_type(type2);
2350 } else if (bt2 != VT_FUNC) {
2351 invalid_operands:
2352 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2354 if ((type1->t & VT_BTYPE) == VT_VOID ||
2355 (type2->t & VT_BTYPE) == VT_VOID)
2356 return;
2357 tmp_type1 = *type1;
2358 tmp_type2 = *type2;
2359 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2360 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2361 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2362 /* gcc-like error if '-' is used */
2363 if (op == '-')
2364 goto invalid_operands;
2365 else
2366 tcc_warning("comparison of distinct pointer types lacks a cast");
2370 /* generic gen_op: handles types problems */
2371 ST_FUNC void gen_op(int op)
2373 int u, t1, t2, bt1, bt2, t;
2374 CType type1;
2376 redo:
2377 t1 = vtop[-1].type.t;
2378 t2 = vtop[0].type.t;
2379 bt1 = t1 & VT_BTYPE;
2380 bt2 = t2 & VT_BTYPE;
2382 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2383 tcc_error("operation on a struct");
2384 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2385 if (bt2 == VT_FUNC) {
2386 mk_pointer(&vtop->type);
2387 gaddrof();
2389 if (bt1 == VT_FUNC) {
2390 vswap();
2391 mk_pointer(&vtop->type);
2392 gaddrof();
2393 vswap();
2395 goto redo;
2396 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2397 /* at least one operand is a pointer */
2398 /* relational op: must be both pointers */
2399 if (op >= TOK_ULT && op <= TOK_LOR) {
2400 check_comparison_pointer_types(vtop - 1, vtop, op);
2401 /* pointers are handled are unsigned */
2402 #if PTR_SIZE == 8
2403 t = VT_LLONG | VT_UNSIGNED;
2404 #else
2405 t = VT_INT | VT_UNSIGNED;
2406 #endif
2407 goto std_op;
2409 /* if both pointers, then it must be the '-' op */
2410 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2411 if (op != '-')
2412 tcc_error("cannot use pointers here");
2413 check_comparison_pointer_types(vtop - 1, vtop, op);
2414 /* XXX: check that types are compatible */
2415 if (vtop[-1].type.t & VT_VLA) {
2416 vla_runtime_pointed_size(&vtop[-1].type);
2417 } else {
2418 vpushi(pointed_size(&vtop[-1].type));
2420 vrott(3);
2421 gen_opic(op);
2422 vtop->type.t = ptrdiff_type.t;
2423 vswap();
2424 gen_op(TOK_PDIV);
2425 } else {
2426 /* exactly one pointer : must be '+' or '-'. */
2427 if (op != '-' && op != '+')
2428 tcc_error("cannot use pointers here");
2429 /* Put pointer as first operand */
2430 if (bt2 == VT_PTR) {
2431 vswap();
2432 t = t1, t1 = t2, t2 = t;
2434 #if PTR_SIZE == 4
2435 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2436 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2437 gen_cast_s(VT_INT);
2438 #endif
2439 type1 = vtop[-1].type;
2440 type1.t &= ~VT_ARRAY;
2441 if (vtop[-1].type.t & VT_VLA)
2442 vla_runtime_pointed_size(&vtop[-1].type);
2443 else {
2444 u = pointed_size(&vtop[-1].type);
2445 if (u < 0)
2446 tcc_error("unknown array element size");
2447 #if PTR_SIZE == 8
2448 vpushll(u);
2449 #else
2450 /* XXX: cast to int ? (long long case) */
2451 vpushi(u);
2452 #endif
2454 gen_op('*');
2455 #if 0
2456 /* #ifdef CONFIG_TCC_BCHECK
2457 The main reason to removing this code:
2458 #include <stdio.h>
2459 int main ()
2461 int v[10];
2462 int i = 10;
2463 int j = 9;
2464 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2465 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2467 When this code is on. then the output looks like
2468 v+i-j = 0xfffffffe
2469 v+(i-j) = 0xbff84000
2471 /* if evaluating constant expression, no code should be
2472 generated, so no bound check */
2473 if (tcc_state->do_bounds_check && !const_wanted) {
2474 /* if bounded pointers, we generate a special code to
2475 test bounds */
2476 if (op == '-') {
2477 vpushi(0);
2478 vswap();
2479 gen_op('-');
2481 gen_bounded_ptr_add();
2482 } else
2483 #endif
2485 gen_opic(op);
2487 /* put again type if gen_opic() swaped operands */
2488 vtop->type = type1;
2490 } else if (is_float(bt1) || is_float(bt2)) {
2491 /* compute bigger type and do implicit casts */
2492 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2493 t = VT_LDOUBLE;
2494 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2495 t = VT_DOUBLE;
2496 } else {
2497 t = VT_FLOAT;
2499 /* floats can only be used for a few operations */
2500 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2501 (op < TOK_ULT || op > TOK_GT))
2502 tcc_error("invalid operands for binary operation");
2503 goto std_op;
2504 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2505 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2506 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2507 t |= VT_UNSIGNED;
2508 t |= (VT_LONG & t1);
2509 goto std_op;
2510 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2511 /* cast to biggest op */
2512 t = VT_LLONG | VT_LONG;
2513 if (bt1 == VT_LLONG)
2514 t &= t1;
2515 if (bt2 == VT_LLONG)
2516 t &= t2;
2517 /* convert to unsigned if it does not fit in a long long */
2518 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2519 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2520 t |= VT_UNSIGNED;
2521 goto std_op;
2522 } else {
2523 /* integer operations */
2524 t = VT_INT | (VT_LONG & (t1 | t2));
2525 /* convert to unsigned if it does not fit in an integer */
2526 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2527 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2528 t |= VT_UNSIGNED;
2529 std_op:
2530 /* XXX: currently, some unsigned operations are explicit, so
2531 we modify them here */
2532 if (t & VT_UNSIGNED) {
2533 if (op == TOK_SAR)
2534 op = TOK_SHR;
2535 else if (op == '/')
2536 op = TOK_UDIV;
2537 else if (op == '%')
2538 op = TOK_UMOD;
2539 else if (op == TOK_LT)
2540 op = TOK_ULT;
2541 else if (op == TOK_GT)
2542 op = TOK_UGT;
2543 else if (op == TOK_LE)
2544 op = TOK_ULE;
2545 else if (op == TOK_GE)
2546 op = TOK_UGE;
2548 vswap();
2549 type1.t = t;
2550 type1.ref = NULL;
2551 gen_cast(&type1);
2552 vswap();
2553 /* special case for shifts and long long: we keep the shift as
2554 an integer */
2555 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2556 type1.t = VT_INT;
2557 gen_cast(&type1);
2558 if (is_float(t))
2559 gen_opif(op);
2560 else
2561 gen_opic(op);
2562 if (op >= TOK_ULT && op <= TOK_GT) {
2563 /* relational op: the result is an int */
2564 vtop->type.t = VT_INT;
2565 } else {
2566 vtop->type.t = t;
2569 // Make sure that we have converted to an rvalue:
2570 if (vtop->r & VT_LVAL)
2571 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2574 #ifndef TCC_TARGET_ARM
2575 /* generic itof for unsigned long long case */
2576 static void gen_cvt_itof1(int t)
2578 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2579 gen_cvt_itof(t);
2580 #else
2581 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2582 (VT_LLONG | VT_UNSIGNED)) {
2584 if (t == VT_FLOAT)
2585 vpush_global_sym(&func_old_type, TOK___floatundisf);
2586 #if LDOUBLE_SIZE != 8
2587 else if (t == VT_LDOUBLE)
2588 vpush_global_sym(&func_old_type, TOK___floatundixf);
2589 #endif
2590 else
2591 vpush_global_sym(&func_old_type, TOK___floatundidf);
2592 vrott(2);
2593 gfunc_call(1);
2594 vpushi(0);
2595 vtop->r = reg_fret(t);
2596 } else {
2597 gen_cvt_itof(t);
2599 #endif
2601 #endif
2603 /* generic ftoi for unsigned long long case */
2604 static void gen_cvt_ftoi1(int t)
2606 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2607 gen_cvt_ftoi(t);
2608 #else
2609 int st;
2611 if (t == (VT_LLONG | VT_UNSIGNED)) {
2612 /* not handled natively */
2613 st = vtop->type.t & VT_BTYPE;
2614 if (st == VT_FLOAT)
2615 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2616 #if LDOUBLE_SIZE != 8
2617 else if (st == VT_LDOUBLE)
2618 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2619 #endif
2620 else
2621 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2622 vrott(2);
2623 gfunc_call(1);
2624 vpushi(0);
2625 vtop->r = REG_IRET;
2626 vtop->r2 = REG_LRET;
2627 } else {
2628 gen_cvt_ftoi(t);
2630 #endif
2633 /* force char or short cast */
2634 static void force_charshort_cast(int t)
2636 int bits, dbt;
2638 /* cannot cast static initializers */
2639 if (STATIC_DATA_WANTED)
2640 return;
2642 dbt = t & VT_BTYPE;
2643 /* XXX: add optimization if lvalue : just change type and offset */
2644 if (dbt == VT_BYTE)
2645 bits = 8;
2646 else
2647 bits = 16;
2648 if (t & VT_UNSIGNED) {
2649 vpushi((1 << bits) - 1);
2650 gen_op('&');
2651 } else {
2652 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2653 bits = 64 - bits;
2654 else
2655 bits = 32 - bits;
2656 vpushi(bits);
2657 gen_op(TOK_SHL);
2658 /* result must be signed or the SAR is converted to an SHL
2659 This was not the case when "t" was a signed short
2660 and the last value on the stack was an unsigned int */
2661 vtop->type.t &= ~VT_UNSIGNED;
2662 vpushi(bits);
2663 gen_op(TOK_SAR);
2667 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2668 static void gen_cast_s(int t)
2670 CType type;
2671 type.t = t;
2672 type.ref = NULL;
2673 gen_cast(&type);
2676 static void gen_cast(CType *type)
2678 int sbt, dbt, sf, df, c, p;
2680 /* special delayed cast for char/short */
2681 /* XXX: in some cases (multiple cascaded casts), it may still
2682 be incorrect */
2683 if (vtop->r & VT_MUSTCAST) {
2684 vtop->r &= ~VT_MUSTCAST;
2685 force_charshort_cast(vtop->type.t);
2688 /* bitfields first get cast to ints */
2689 if (vtop->type.t & VT_BITFIELD) {
2690 gv(RC_INT);
2693 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2694 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2696 if (sbt != dbt) {
2697 sf = is_float(sbt);
2698 df = is_float(dbt);
2699 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2700 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2701 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2702 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
2703 #endif
2704 if (c) {
2705 /* constant case: we can do it now */
2706 /* XXX: in ISOC, cannot do it if error in convert */
2707 if (sbt == VT_FLOAT)
2708 vtop->c.ld = vtop->c.f;
2709 else if (sbt == VT_DOUBLE)
2710 vtop->c.ld = vtop->c.d;
2712 if (df) {
2713 if ((sbt & VT_BTYPE) == VT_LLONG) {
2714 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2715 vtop->c.ld = vtop->c.i;
2716 else
2717 vtop->c.ld = -(long double)-vtop->c.i;
2718 } else if(!sf) {
2719 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2720 vtop->c.ld = (uint32_t)vtop->c.i;
2721 else
2722 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2725 if (dbt == VT_FLOAT)
2726 vtop->c.f = (float)vtop->c.ld;
2727 else if (dbt == VT_DOUBLE)
2728 vtop->c.d = (double)vtop->c.ld;
2729 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2730 vtop->c.i = vtop->c.ld;
2731 } else if (sf && dbt == VT_BOOL) {
2732 vtop->c.i = (vtop->c.ld != 0);
2733 } else {
2734 if(sf)
2735 vtop->c.i = vtop->c.ld;
2736 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2738 else if (sbt & VT_UNSIGNED)
2739 vtop->c.i = (uint32_t)vtop->c.i;
2740 #if PTR_SIZE == 8
2741 else if (sbt == VT_PTR)
2743 #endif
2744 else if (sbt != VT_LLONG)
2745 vtop->c.i = ((uint32_t)vtop->c.i |
2746 -(vtop->c.i & 0x80000000));
2748 if (dbt == (VT_LLONG|VT_UNSIGNED))
2750 else if (dbt == VT_BOOL)
2751 vtop->c.i = (vtop->c.i != 0);
2752 #if PTR_SIZE == 8
2753 else if (dbt == VT_PTR)
2755 #endif
2756 else if (dbt != VT_LLONG) {
2757 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2758 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2759 0xffffffff);
2760 vtop->c.i &= m;
2761 if (!(dbt & VT_UNSIGNED))
2762 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2765 } else if (p && dbt == VT_BOOL) {
2766 vtop->r = VT_CONST;
2767 vtop->c.i = 1;
2768 } else {
2769 /* non constant case: generate code */
2770 if (sf && df) {
2771 /* convert from fp to fp */
2772 gen_cvt_ftof(dbt);
2773 } else if (df) {
2774 /* convert int to fp */
2775 gen_cvt_itof1(dbt);
2776 } else if (sf) {
2777 /* convert fp to int */
2778 if (dbt == VT_BOOL) {
2779 vpushi(0);
2780 gen_op(TOK_NE);
2781 } else {
2782 /* we handle char/short/etc... with generic code */
2783 if (dbt != (VT_INT | VT_UNSIGNED) &&
2784 dbt != (VT_LLONG | VT_UNSIGNED) &&
2785 dbt != VT_LLONG)
2786 dbt = VT_INT;
2787 gen_cvt_ftoi1(dbt);
2788 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2789 /* additional cast for char/short... */
2790 vtop->type.t = dbt;
2791 gen_cast(type);
2794 #if PTR_SIZE == 4
2795 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2796 if ((sbt & VT_BTYPE) != VT_LLONG) {
2797 /* scalar to long long */
2798 /* machine independent conversion */
2799 gv(RC_INT);
2800 /* generate high word */
2801 if (sbt == (VT_INT | VT_UNSIGNED)) {
2802 vpushi(0);
2803 gv(RC_INT);
2804 } else {
2805 if (sbt == VT_PTR) {
2806 /* cast from pointer to int before we apply
2807 shift operation, which pointers don't support*/
2808 gen_cast_s(VT_INT);
2810 gv_dup();
2811 vpushi(31);
2812 gen_op(TOK_SAR);
2814 /* patch second register */
2815 vtop[-1].r2 = vtop->r;
2816 vpop();
2818 #else
2819 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2820 (dbt & VT_BTYPE) == VT_PTR ||
2821 (dbt & VT_BTYPE) == VT_FUNC) {
2822 if ((sbt & VT_BTYPE) != VT_LLONG &&
2823 (sbt & VT_BTYPE) != VT_PTR &&
2824 (sbt & VT_BTYPE) != VT_FUNC) {
2825 /* need to convert from 32bit to 64bit */
2826 gv(RC_INT);
2827 if (sbt != (VT_INT | VT_UNSIGNED)) {
2828 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2829 gen_cvt_sxtw();
2830 #elif defined(TCC_TARGET_X86_64)
2831 int r = gv(RC_INT);
2832 /* x86_64 specific: movslq */
2833 o(0x6348);
2834 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2835 #else
2836 #error
2837 #endif
2838 } else if (sbt & VT_UNSIGNED) {
2839 #if defined(TCC_TARGET_RISCV64)
2840 /* RISC-V keeps 32bit vals in registers sign-extended.
2841 So here we need a zero-extension. */
2842 vtop->type.t = VT_LLONG;
2843 vpushi(32);
2844 gen_op(TOK_SHL);
2845 vpushi(32);
2846 gen_op(TOK_SHR);
2847 #endif
2850 #endif
2851 } else if (dbt == VT_BOOL) {
2852 /* scalar to bool */
2853 vpushi(0);
2854 gen_op(TOK_NE);
2855 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2856 (dbt & VT_BTYPE) == VT_SHORT) {
2857 if (sbt == VT_PTR) {
2858 vtop->type.t = VT_INT;
2859 tcc_warning("nonportable conversion from pointer to char/short");
2861 force_charshort_cast(dbt);
2862 } else if ((dbt & VT_BTYPE) == VT_INT) {
2863 /* scalar to int */
2864 if ((sbt & VT_BTYPE) == VT_LLONG) {
2865 #if PTR_SIZE == 4
2866 /* from long long: just take low order word */
2867 lexpand();
2868 vpop();
2869 #else
2870 if (dbt & VT_UNSIGNED) {
2871 /* XXX some architectures (e.g. risc-v) would like it
2872 better for this merely being a 32-to-64 sign or zero-
2873 extension. */
2874 vpushi(0xffffffff);
2875 vtop->type.t |= VT_UNSIGNED;
2876 gen_op('&');
2877 } else {
2879 #endif
2881 /* if lvalue and single word type, nothing to do because
2882 the lvalue already contains the real type size (see
2883 VT_LVAL_xxx constants) */
2886 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2887 /* if we are casting between pointer types,
2888 we must update the VT_LVAL_xxx size */
2889 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2890 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2892 vtop->type = *type;
2893 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2896 /* return type size as known at compile time. Put alignment at 'a' */
2897 ST_FUNC int type_size(CType *type, int *a)
2899 Sym *s;
2900 int bt;
2902 bt = type->t & VT_BTYPE;
2903 if (bt == VT_STRUCT) {
2904 /* struct/union */
2905 s = type->ref;
2906 *a = s->r;
2907 return s->c;
2908 } else if (bt == VT_PTR) {
2909 if (type->t & VT_ARRAY) {
2910 int ts;
2912 s = type->ref;
2913 ts = type_size(&s->type, a);
2915 if (ts < 0 && s->c < 0)
2916 ts = -ts;
2918 return ts * s->c;
2919 } else {
2920 *a = PTR_SIZE;
2921 return PTR_SIZE;
2923 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2924 return -1; /* incomplete enum */
2925 } else if (bt == VT_LDOUBLE) {
2926 *a = LDOUBLE_ALIGN;
2927 return LDOUBLE_SIZE;
2928 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2929 #ifdef TCC_TARGET_I386
2930 #ifdef TCC_TARGET_PE
2931 *a = 8;
2932 #else
2933 *a = 4;
2934 #endif
2935 #elif defined(TCC_TARGET_ARM)
2936 #ifdef TCC_ARM_EABI
2937 *a = 8;
2938 #else
2939 *a = 4;
2940 #endif
2941 #else
2942 *a = 8;
2943 #endif
2944 return 8;
2945 } else if (bt == VT_INT || bt == VT_FLOAT) {
2946 *a = 4;
2947 return 4;
2948 } else if (bt == VT_SHORT) {
2949 *a = 2;
2950 return 2;
2951 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2952 *a = 8;
2953 return 16;
2954 } else {
2955 /* char, void, function, _Bool */
2956 *a = 1;
2957 return 1;
2961 /* push type size as known at runtime time on top of value stack. Put
2962 alignment at 'a' */
2963 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2965 if (type->t & VT_VLA) {
2966 type_size(&type->ref->type, a);
2967 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2968 } else {
2969 vpushi(type_size(type, a));
2973 /* return the pointed type of t */
2974 static inline CType *pointed_type(CType *type)
2976 return &type->ref->type;
2979 /* modify type so that its it is a pointer to type. */
2980 ST_FUNC void mk_pointer(CType *type)
2982 Sym *s;
2983 s = sym_push(SYM_FIELD, type, 0, -1);
2984 type->t = VT_PTR | (type->t & VT_STORAGE);
2985 type->ref = s;
2988 /* compare function types. OLD functions match any new functions */
2989 static int is_compatible_func(CType *type1, CType *type2)
2991 Sym *s1, *s2;
2993 s1 = type1->ref;
2994 s2 = type2->ref;
2995 if (s1->f.func_call != s2->f.func_call)
2996 return 0;
2997 if (s1->f.func_type != s2->f.func_type
2998 && s1->f.func_type != FUNC_OLD
2999 && s2->f.func_type != FUNC_OLD)
3000 return 0;
3001 /* we should check the function return type for FUNC_OLD too
3002 but that causes problems with the internally used support
3003 functions such as TOK_memmove */
3004 if (s1->f.func_type == FUNC_OLD && !s1->next)
3005 return 1;
3006 if (s2->f.func_type == FUNC_OLD && !s2->next)
3007 return 1;
3008 for (;;) {
3009 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3010 return 0;
3011 s1 = s1->next;
3012 s2 = s2->next;
3013 if (!s1)
3014 return !s2;
3015 if (!s2)
3016 return 0;
3020 /* return true if type1 and type2 are the same. If unqualified is
3021 true, qualifiers on the types are ignored.
3023 static int compare_types(CType *type1, CType *type2, int unqualified)
3025 int bt1, t1, t2;
3027 t1 = type1->t & VT_TYPE;
3028 t2 = type2->t & VT_TYPE;
3029 if (unqualified) {
3030 /* strip qualifiers before comparing */
3031 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3032 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3035 /* Default Vs explicit signedness only matters for char */
3036 if ((t1 & VT_BTYPE) != VT_BYTE) {
3037 t1 &= ~VT_DEFSIGN;
3038 t2 &= ~VT_DEFSIGN;
3040 /* XXX: bitfields ? */
3041 if (t1 != t2)
3042 return 0;
3044 if ((t1 & VT_ARRAY)
3045 && !(type1->ref->c < 0
3046 || type2->ref->c < 0
3047 || type1->ref->c == type2->ref->c))
3048 return 0;
3050 /* test more complicated cases */
3051 bt1 = t1 & VT_BTYPE;
3052 if (bt1 == VT_PTR) {
3053 type1 = pointed_type(type1);
3054 type2 = pointed_type(type2);
3055 return is_compatible_types(type1, type2);
3056 } else if (bt1 == VT_STRUCT) {
3057 return (type1->ref == type2->ref);
3058 } else if (bt1 == VT_FUNC) {
3059 return is_compatible_func(type1, type2);
3060 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3061 return type1->ref == type2->ref;
3062 } else {
3063 return 1;
3067 /* return true if type1 and type2 are exactly the same (including
3068 qualifiers).
3070 static int is_compatible_types(CType *type1, CType *type2)
3072 return compare_types(type1,type2,0);
3075 /* return true if type1 and type2 are the same (ignoring qualifiers).
3077 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3079 return compare_types(type1,type2,1);
3082 /* print a type. If 'varstr' is not NULL, then the variable is also
3083 printed in the type */
3084 /* XXX: union */
3085 /* XXX: add array and function pointers */
3086 static void type_to_str(char *buf, int buf_size,
3087 CType *type, const char *varstr)
3089 int bt, v, t;
3090 Sym *s, *sa;
3091 char buf1[256];
3092 const char *tstr;
3094 t = type->t;
3095 bt = t & VT_BTYPE;
3096 buf[0] = '\0';
3098 if (t & VT_EXTERN)
3099 pstrcat(buf, buf_size, "extern ");
3100 if (t & VT_STATIC)
3101 pstrcat(buf, buf_size, "static ");
3102 if (t & VT_TYPEDEF)
3103 pstrcat(buf, buf_size, "typedef ");
3104 if (t & VT_INLINE)
3105 pstrcat(buf, buf_size, "inline ");
3106 if (t & VT_VOLATILE)
3107 pstrcat(buf, buf_size, "volatile ");
3108 if (t & VT_CONSTANT)
3109 pstrcat(buf, buf_size, "const ");
3111 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3112 || ((t & VT_UNSIGNED)
3113 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3114 && !IS_ENUM(t)
3116 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3118 buf_size -= strlen(buf);
3119 buf += strlen(buf);
3121 switch(bt) {
3122 case VT_VOID:
3123 tstr = "void";
3124 goto add_tstr;
3125 case VT_BOOL:
3126 tstr = "_Bool";
3127 goto add_tstr;
3128 case VT_BYTE:
3129 tstr = "char";
3130 goto add_tstr;
3131 case VT_SHORT:
3132 tstr = "short";
3133 goto add_tstr;
3134 case VT_INT:
3135 tstr = "int";
3136 goto maybe_long;
3137 case VT_LLONG:
3138 tstr = "long long";
3139 maybe_long:
3140 if (t & VT_LONG)
3141 tstr = "long";
3142 if (!IS_ENUM(t))
3143 goto add_tstr;
3144 tstr = "enum ";
3145 goto tstruct;
3146 case VT_FLOAT:
3147 tstr = "float";
3148 goto add_tstr;
3149 case VT_DOUBLE:
3150 tstr = "double";
3151 goto add_tstr;
3152 case VT_LDOUBLE:
3153 tstr = "long double";
3154 add_tstr:
3155 pstrcat(buf, buf_size, tstr);
3156 break;
3157 case VT_STRUCT:
3158 tstr = "struct ";
3159 if (IS_UNION(t))
3160 tstr = "union ";
3161 tstruct:
3162 pstrcat(buf, buf_size, tstr);
3163 v = type->ref->v & ~SYM_STRUCT;
3164 if (v >= SYM_FIRST_ANOM)
3165 pstrcat(buf, buf_size, "<anonymous>");
3166 else
3167 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3168 break;
3169 case VT_FUNC:
3170 s = type->ref;
3171 buf1[0]=0;
3172 if (varstr && '*' == *varstr) {
3173 pstrcat(buf1, sizeof(buf1), "(");
3174 pstrcat(buf1, sizeof(buf1), varstr);
3175 pstrcat(buf1, sizeof(buf1), ")");
3177 pstrcat(buf1, buf_size, "(");
3178 sa = s->next;
3179 while (sa != NULL) {
3180 char buf2[256];
3181 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3182 pstrcat(buf1, sizeof(buf1), buf2);
3183 sa = sa->next;
3184 if (sa)
3185 pstrcat(buf1, sizeof(buf1), ", ");
3187 if (s->f.func_type == FUNC_ELLIPSIS)
3188 pstrcat(buf1, sizeof(buf1), ", ...");
3189 pstrcat(buf1, sizeof(buf1), ")");
3190 type_to_str(buf, buf_size, &s->type, buf1);
3191 goto no_var;
3192 case VT_PTR:
3193 s = type->ref;
3194 if (t & VT_ARRAY) {
3195 if (varstr && '*' == *varstr)
3196 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3197 else
3198 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3199 type_to_str(buf, buf_size, &s->type, buf1);
3200 goto no_var;
3202 pstrcpy(buf1, sizeof(buf1), "*");
3203 if (t & VT_CONSTANT)
3204 pstrcat(buf1, buf_size, "const ");
3205 if (t & VT_VOLATILE)
3206 pstrcat(buf1, buf_size, "volatile ");
3207 if (varstr)
3208 pstrcat(buf1, sizeof(buf1), varstr);
3209 type_to_str(buf, buf_size, &s->type, buf1);
3210 goto no_var;
3212 if (varstr) {
3213 pstrcat(buf, buf_size, " ");
3214 pstrcat(buf, buf_size, varstr);
3216 no_var: ;
3219 /* verify type compatibility to store vtop in 'dt' type, and generate
3220 casts if needed. */
3221 static void gen_assign_cast(CType *dt)
3223 CType *st, *type1, *type2;
3224 char buf1[256], buf2[256];
3225 int dbt, sbt, qualwarn, lvl;
3227 st = &vtop->type; /* source type */
3228 dbt = dt->t & VT_BTYPE;
3229 sbt = st->t & VT_BTYPE;
3230 if (sbt == VT_VOID || dbt == VT_VOID) {
3231 if (sbt == VT_VOID && dbt == VT_VOID)
3232 ; /* It is Ok if both are void */
3233 else
3234 tcc_error("cannot cast from/to void");
3236 if (dt->t & VT_CONSTANT)
3237 tcc_warning("assignment of read-only location");
3238 switch(dbt) {
3239 case VT_PTR:
3240 /* special cases for pointers */
3241 /* '0' can also be a pointer */
3242 if (is_null_pointer(vtop))
3243 break;
3244 /* accept implicit pointer to integer cast with warning */
3245 if (is_integer_btype(sbt)) {
3246 tcc_warning("assignment makes pointer from integer without a cast");
3247 break;
3249 type1 = pointed_type(dt);
3250 if (sbt == VT_PTR)
3251 type2 = pointed_type(st);
3252 else if (sbt == VT_FUNC)
3253 type2 = st; /* a function is implicitly a function pointer */
3254 else
3255 goto error;
3256 if (is_compatible_types(type1, type2))
3257 break;
3258 for (qualwarn = lvl = 0;; ++lvl) {
3259 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3260 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3261 qualwarn = 1;
3262 dbt = type1->t & (VT_BTYPE|VT_LONG);
3263 sbt = type2->t & (VT_BTYPE|VT_LONG);
3264 if (dbt != VT_PTR || sbt != VT_PTR)
3265 break;
3266 type1 = pointed_type(type1);
3267 type2 = pointed_type(type2);
3269 if (!is_compatible_unqualified_types(type1, type2)) {
3270 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3271 /* void * can match anything */
3272 } else if (dbt == sbt
3273 && is_integer_btype(sbt & VT_BTYPE)
3274 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3275 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3276 /* Like GCC don't warn by default for merely changes
3277 in pointer target signedness. Do warn for different
3278 base types, though, in particular for unsigned enums
3279 and signed int targets. */
3280 } else {
3281 tcc_warning("assignment from incompatible pointer type");
3282 break;
3285 if (qualwarn)
3286 tcc_warning("assignment discards qualifiers from pointer target type");
3287 break;
3288 case VT_BYTE:
3289 case VT_SHORT:
3290 case VT_INT:
3291 case VT_LLONG:
3292 if (sbt == VT_PTR || sbt == VT_FUNC) {
3293 tcc_warning("assignment makes integer from pointer without a cast");
3294 } else if (sbt == VT_STRUCT) {
3295 goto case_VT_STRUCT;
3297 /* XXX: more tests */
3298 break;
3299 case VT_STRUCT:
3300 case_VT_STRUCT:
3301 if (!is_compatible_unqualified_types(dt, st)) {
3302 error:
3303 type_to_str(buf1, sizeof(buf1), st, NULL);
3304 type_to_str(buf2, sizeof(buf2), dt, NULL);
3305 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3307 break;
3309 gen_cast(dt);
3312 /* store vtop in lvalue pushed on stack */
3313 ST_FUNC void vstore(void)
3315 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3317 ft = vtop[-1].type.t;
3318 sbt = vtop->type.t & VT_BTYPE;
3319 dbt = ft & VT_BTYPE;
3320 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3321 (sbt == VT_INT && dbt == VT_SHORT))
3322 && !(vtop->type.t & VT_BITFIELD)) {
3323 /* optimize char/short casts */
3324 delayed_cast = VT_MUSTCAST;
3325 vtop->type.t = ft & VT_TYPE;
3326 /* XXX: factorize */
3327 if (ft & VT_CONSTANT)
3328 tcc_warning("assignment of read-only location");
3329 } else {
3330 delayed_cast = 0;
3331 if (!(ft & VT_BITFIELD))
3332 gen_assign_cast(&vtop[-1].type);
3335 if (sbt == VT_STRUCT) {
3336 /* if structure, only generate pointer */
3337 /* structure assignment : generate memcpy */
3338 /* XXX: optimize if small size */
3339 size = type_size(&vtop->type, &align);
3341 /* destination */
3342 vswap();
3343 vtop->type.t = VT_PTR;
3344 gaddrof();
3346 /* address of memcpy() */
3347 #ifdef TCC_ARM_EABI
3348 if(!(align & 7))
3349 vpush_global_sym(&func_old_type, TOK_memcpy8);
3350 else if(!(align & 3))
3351 vpush_global_sym(&func_old_type, TOK_memcpy4);
3352 else
3353 #endif
3354 /* Use memmove, rather than memcpy, as dest and src may be same: */
3355 vpush_global_sym(&func_old_type, TOK_memmove);
3357 vswap();
3358 /* source */
3359 vpushv(vtop - 2);
3360 vtop->type.t = VT_PTR;
3361 gaddrof();
3362 /* type size */
3363 vpushi(size);
3364 gfunc_call(3);
3366 /* leave source on stack */
3367 } else if (ft & VT_BITFIELD) {
3368 /* bitfield store handling */
3370 /* save lvalue as expression result (example: s.b = s.a = n;) */
3371 vdup(), vtop[-1] = vtop[-2];
3373 bit_pos = BIT_POS(ft);
3374 bit_size = BIT_SIZE(ft);
3375 /* remove bit field info to avoid loops */
3376 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3378 if ((ft & VT_BTYPE) == VT_BOOL) {
3379 gen_cast(&vtop[-1].type);
3380 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3383 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3384 if (r == VT_STRUCT) {
3385 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3386 store_packed_bf(bit_pos, bit_size);
3387 } else {
3388 unsigned long long mask = (1ULL << bit_size) - 1;
3389 if ((ft & VT_BTYPE) != VT_BOOL) {
3390 /* mask source */
3391 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3392 vpushll(mask);
3393 else
3394 vpushi((unsigned)mask);
3395 gen_op('&');
3397 /* shift source */
3398 vpushi(bit_pos);
3399 gen_op(TOK_SHL);
3400 vswap();
3401 /* duplicate destination */
3402 vdup();
3403 vrott(3);
3404 /* load destination, mask and or with source */
3405 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3406 vpushll(~(mask << bit_pos));
3407 else
3408 vpushi(~((unsigned)mask << bit_pos));
3409 gen_op('&');
3410 gen_op('|');
3411 /* store result */
3412 vstore();
3413 /* ... and discard */
3414 vpop();
3416 } else if (dbt == VT_VOID) {
3417 --vtop;
3418 } else {
3419 #ifdef CONFIG_TCC_BCHECK
3420 /* bound check case */
3421 if (vtop[-1].r & VT_MUSTBOUND) {
3422 vswap();
3423 gbound();
3424 vswap();
3426 #endif
3427 rc = RC_INT;
3428 if (is_float(ft)) {
3429 rc = RC_FLOAT;
3430 #ifdef TCC_TARGET_X86_64
3431 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3432 rc = RC_ST0;
3433 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3434 rc = RC_FRET;
3436 #elif defined TCC_TARGET_RISCV64
3437 if (dbt == VT_LDOUBLE)
3438 rc = RC_INT;
3439 #endif
3441 r = gv(rc); /* generate value */
3442 /* if lvalue was saved on stack, must read it */
3443 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3444 SValue sv;
3445 t = get_reg(RC_INT);
3446 #if PTR_SIZE == 8
3447 sv.type.t = VT_PTR;
3448 #else
3449 sv.type.t = VT_INT;
3450 #endif
3451 sv.r = VT_LOCAL | VT_LVAL;
3452 sv.c.i = vtop[-1].c.i;
3453 load(t, &sv);
3454 vtop[-1].r = t | VT_LVAL;
3456 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3457 #ifdef TCC_TARGET_RISCV64
3458 if (dbt == VT_QLONG || dbt == VT_LDOUBLE) {
3459 int addr_type = VT_LLONG, load_size = 8, load_type = VT_LLONG;
3460 #elif PTR_SIZE == 8
3461 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3462 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3463 #else
3464 if ((ft & VT_BTYPE) == VT_LLONG) {
3465 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3466 #endif
3467 vtop[-1].type.t = load_type;
3468 store(r, vtop - 1);
3469 vswap();
3470 /* convert to int to increment easily */
3471 vtop->type.t = addr_type;
3472 gaddrof();
3473 vpushi(load_size);
3474 gen_op('+');
3475 vtop->r |= VT_LVAL;
3476 vswap();
3477 vtop[-1].type.t = load_type;
3478 /* XXX: it works because r2 is spilled last ! */
3479 store(vtop->r2, vtop - 1);
3480 } else {
3481 store(r, vtop - 1);
3484 vswap();
3485 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3486 vtop->r |= delayed_cast;
3490 /* post defines POST/PRE add. c is the token ++ or -- */
3491 ST_FUNC void inc(int post, int c)
3493 test_lvalue();
3494 vdup(); /* save lvalue */
3495 if (post) {
3496 gv_dup(); /* duplicate value */
3497 vrotb(3);
3498 vrotb(3);
3500 /* add constant */
3501 vpushi(c - TOK_MID);
3502 gen_op('+');
3503 vstore(); /* store value */
3504 if (post)
3505 vpop(); /* if post op, return saved value */
3508 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3510 /* read the string */
3511 if (tok != TOK_STR)
3512 expect(msg);
3513 cstr_new(astr);
3514 while (tok == TOK_STR) {
3515 /* XXX: add \0 handling too ? */
3516 cstr_cat(astr, tokc.str.data, -1);
3517 next();
3519 cstr_ccat(astr, '\0');
3522 /* If I is >= 1 and a power of two, returns log2(i)+1.
3523 If I is 0 returns 0. */
3524 static int exact_log2p1(int i)
3526 int ret;
3527 if (!i)
3528 return 0;
3529 for (ret = 1; i >= 1 << 8; ret += 8)
3530 i >>= 8;
3531 if (i >= 1 << 4)
3532 ret += 4, i >>= 4;
3533 if (i >= 1 << 2)
3534 ret += 2, i >>= 2;
3535 if (i >= 1 << 1)
3536 ret++;
3537 return ret;
3540 /* Parse __attribute__((...)) GNUC extension. */
3541 static void parse_attribute(AttributeDef *ad)
3543 int t, n;
3544 CString astr;
3546 redo:
3547 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3548 return;
3549 next();
3550 skip('(');
3551 skip('(');
3552 while (tok != ')') {
3553 if (tok < TOK_IDENT)
3554 expect("attribute name");
3555 t = tok;
3556 next();
3557 switch(t) {
3558 case TOK_CLEANUP1:
3559 case TOK_CLEANUP2:
3561 Sym *s;
3563 skip('(');
3564 s = sym_find(tok);
3565 if (!s) {
3566 tcc_warning("implicit declaration of function '%s'",
3567 get_tok_str(tok, &tokc));
3568 s = external_global_sym(tok, &func_old_type);
3570 ad->cleanup_func = s;
3571 next();
3572 skip(')');
3573 break;
3575 case TOK_SECTION1:
3576 case TOK_SECTION2:
3577 skip('(');
3578 parse_mult_str(&astr, "section name");
3579 ad->section = find_section(tcc_state, (char *)astr.data);
3580 skip(')');
3581 cstr_free(&astr);
3582 break;
3583 case TOK_ALIAS1:
3584 case TOK_ALIAS2:
3585 skip('(');
3586 parse_mult_str(&astr, "alias(\"target\")");
3587 ad->alias_target = /* save string as token, for later */
3588 tok_alloc((char*)astr.data, astr.size-1)->tok;
3589 skip(')');
3590 cstr_free(&astr);
3591 break;
3592 case TOK_VISIBILITY1:
3593 case TOK_VISIBILITY2:
3594 skip('(');
3595 parse_mult_str(&astr,
3596 "visibility(\"default|hidden|internal|protected\")");
3597 if (!strcmp (astr.data, "default"))
3598 ad->a.visibility = STV_DEFAULT;
3599 else if (!strcmp (astr.data, "hidden"))
3600 ad->a.visibility = STV_HIDDEN;
3601 else if (!strcmp (astr.data, "internal"))
3602 ad->a.visibility = STV_INTERNAL;
3603 else if (!strcmp (astr.data, "protected"))
3604 ad->a.visibility = STV_PROTECTED;
3605 else
3606 expect("visibility(\"default|hidden|internal|protected\")");
3607 skip(')');
3608 cstr_free(&astr);
3609 break;
3610 case TOK_ALIGNED1:
3611 case TOK_ALIGNED2:
3612 if (tok == '(') {
3613 next();
3614 n = expr_const();
3615 if (n <= 0 || (n & (n - 1)) != 0)
3616 tcc_error("alignment must be a positive power of two");
3617 skip(')');
3618 } else {
3619 n = MAX_ALIGN;
3621 ad->a.aligned = exact_log2p1(n);
3622 if (n != 1 << (ad->a.aligned - 1))
3623 tcc_error("alignment of %d is larger than implemented", n);
3624 break;
3625 case TOK_PACKED1:
3626 case TOK_PACKED2:
3627 ad->a.packed = 1;
3628 break;
3629 case TOK_WEAK1:
3630 case TOK_WEAK2:
3631 ad->a.weak = 1;
3632 break;
3633 case TOK_UNUSED1:
3634 case TOK_UNUSED2:
3635 /* currently, no need to handle it because tcc does not
3636 track unused objects */
3637 break;
3638 case TOK_NORETURN1:
3639 case TOK_NORETURN2:
3640 ad->f.func_noreturn = 1;
3641 break;
3642 case TOK_CDECL1:
3643 case TOK_CDECL2:
3644 case TOK_CDECL3:
3645 ad->f.func_call = FUNC_CDECL;
3646 break;
3647 case TOK_STDCALL1:
3648 case TOK_STDCALL2:
3649 case TOK_STDCALL3:
3650 ad->f.func_call = FUNC_STDCALL;
3651 break;
3652 #ifdef TCC_TARGET_I386
3653 case TOK_REGPARM1:
3654 case TOK_REGPARM2:
3655 skip('(');
3656 n = expr_const();
3657 if (n > 3)
3658 n = 3;
3659 else if (n < 0)
3660 n = 0;
3661 if (n > 0)
3662 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3663 skip(')');
3664 break;
3665 case TOK_FASTCALL1:
3666 case TOK_FASTCALL2:
3667 case TOK_FASTCALL3:
3668 ad->f.func_call = FUNC_FASTCALLW;
3669 break;
3670 #endif
3671 case TOK_MODE:
3672 skip('(');
3673 switch(tok) {
3674 case TOK_MODE_DI:
3675 ad->attr_mode = VT_LLONG + 1;
3676 break;
3677 case TOK_MODE_QI:
3678 ad->attr_mode = VT_BYTE + 1;
3679 break;
3680 case TOK_MODE_HI:
3681 ad->attr_mode = VT_SHORT + 1;
3682 break;
3683 case TOK_MODE_SI:
3684 case TOK_MODE_word:
3685 ad->attr_mode = VT_INT + 1;
3686 break;
3687 default:
3688 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3689 break;
3691 next();
3692 skip(')');
3693 break;
3694 case TOK_DLLEXPORT:
3695 ad->a.dllexport = 1;
3696 break;
3697 case TOK_NODECORATE:
3698 ad->a.nodecorate = 1;
3699 break;
3700 case TOK_DLLIMPORT:
3701 ad->a.dllimport = 1;
3702 break;
3703 default:
3704 if (tcc_state->warn_unsupported)
3705 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3706 /* skip parameters */
3707 if (tok == '(') {
3708 int parenthesis = 0;
3709 do {
3710 if (tok == '(')
3711 parenthesis++;
3712 else if (tok == ')')
3713 parenthesis--;
3714 next();
3715 } while (parenthesis && tok != -1);
3717 break;
3719 if (tok != ',')
3720 break;
3721 next();
3723 skip(')');
3724 skip(')');
3725 goto redo;
3728 static Sym * find_field (CType *type, int v, int *cumofs)
3730 Sym *s = type->ref;
3731 v |= SYM_FIELD;
3732 while ((s = s->next) != NULL) {
3733 if ((s->v & SYM_FIELD) &&
3734 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3735 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3736 Sym *ret = find_field (&s->type, v, cumofs);
3737 if (ret) {
3738 *cumofs += s->c;
3739 return ret;
3742 if (s->v == v)
3743 break;
3745 return s;
3748 static void struct_layout(CType *type, AttributeDef *ad)
3750 int size, align, maxalign, offset, c, bit_pos, bit_size;
3751 int packed, a, bt, prevbt, prev_bit_size;
3752 int pcc = !tcc_state->ms_bitfields;
3753 int pragma_pack = *tcc_state->pack_stack_ptr;
3754 Sym *f;
3756 maxalign = 1;
3757 offset = 0;
3758 c = 0;
3759 bit_pos = 0;
3760 prevbt = VT_STRUCT; /* make it never match */
3761 prev_bit_size = 0;
3763 //#define BF_DEBUG
3765 for (f = type->ref->next; f; f = f->next) {
3766 if (f->type.t & VT_BITFIELD)
3767 bit_size = BIT_SIZE(f->type.t);
3768 else
3769 bit_size = -1;
3770 size = type_size(&f->type, &align);
3771 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3772 packed = 0;
3774 if (pcc && bit_size == 0) {
3775 /* in pcc mode, packing does not affect zero-width bitfields */
3777 } else {
3778 /* in pcc mode, attribute packed overrides if set. */
3779 if (pcc && (f->a.packed || ad->a.packed))
3780 align = packed = 1;
3782 /* pragma pack overrides align if lesser and packs bitfields always */
3783 if (pragma_pack) {
3784 packed = 1;
3785 if (pragma_pack < align)
3786 align = pragma_pack;
3787 /* in pcc mode pragma pack also overrides individual align */
3788 if (pcc && pragma_pack < a)
3789 a = 0;
3792 /* some individual align was specified */
3793 if (a)
3794 align = a;
3796 if (type->ref->type.t == VT_UNION) {
3797 if (pcc && bit_size >= 0)
3798 size = (bit_size + 7) >> 3;
3799 offset = 0;
3800 if (size > c)
3801 c = size;
3803 } else if (bit_size < 0) {
3804 if (pcc)
3805 c += (bit_pos + 7) >> 3;
3806 c = (c + align - 1) & -align;
3807 offset = c;
3808 if (size > 0)
3809 c += size;
3810 bit_pos = 0;
3811 prevbt = VT_STRUCT;
3812 prev_bit_size = 0;
3814 } else {
3815 /* A bit-field. Layout is more complicated. There are two
3816 options: PCC (GCC) compatible and MS compatible */
3817 if (pcc) {
3818 /* In PCC layout a bit-field is placed adjacent to the
3819 preceding bit-fields, except if:
3820 - it has zero-width
3821 - an individual alignment was given
3822 - it would overflow its base type container and
3823 there is no packing */
3824 if (bit_size == 0) {
3825 new_field:
3826 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3827 bit_pos = 0;
3828 } else if (f->a.aligned) {
3829 goto new_field;
3830 } else if (!packed) {
3831 int a8 = align * 8;
3832 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3833 if (ofs > size / align)
3834 goto new_field;
3837 /* in pcc mode, long long bitfields have type int if they fit */
3838 if (size == 8 && bit_size <= 32)
3839 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3841 while (bit_pos >= align * 8)
3842 c += align, bit_pos -= align * 8;
3843 offset = c;
3845 /* In PCC layout named bit-fields influence the alignment
3846 of the containing struct using the base types alignment,
3847 except for packed fields (which here have correct align). */
3848 if (f->v & SYM_FIRST_ANOM
3849 // && bit_size // ??? gcc on ARM/rpi does that
3851 align = 1;
3853 } else {
3854 bt = f->type.t & VT_BTYPE;
3855 if ((bit_pos + bit_size > size * 8)
3856 || (bit_size > 0) == (bt != prevbt)
3858 c = (c + align - 1) & -align;
3859 offset = c;
3860 bit_pos = 0;
3861 /* In MS bitfield mode a bit-field run always uses
3862 at least as many bits as the underlying type.
3863 To start a new run it's also required that this
3864 or the last bit-field had non-zero width. */
3865 if (bit_size || prev_bit_size)
3866 c += size;
3868 /* In MS layout the records alignment is normally
3869 influenced by the field, except for a zero-width
3870 field at the start of a run (but by further zero-width
3871 fields it is again). */
3872 if (bit_size == 0 && prevbt != bt)
3873 align = 1;
3874 prevbt = bt;
3875 prev_bit_size = bit_size;
3878 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3879 | (bit_pos << VT_STRUCT_SHIFT);
3880 bit_pos += bit_size;
3882 if (align > maxalign)
3883 maxalign = align;
3885 #ifdef BF_DEBUG
3886 printf("set field %s offset %-2d size %-2d align %-2d",
3887 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3888 if (f->type.t & VT_BITFIELD) {
3889 printf(" pos %-2d bits %-2d",
3890 BIT_POS(f->type.t),
3891 BIT_SIZE(f->type.t)
3894 printf("\n");
3895 #endif
3897 f->c = offset;
3898 f->r = 0;
3901 if (pcc)
3902 c += (bit_pos + 7) >> 3;
3904 /* store size and alignment */
3905 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3906 if (a < maxalign)
3907 a = maxalign;
3908 type->ref->r = a;
3909 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3910 /* can happen if individual align for some member was given. In
3911 this case MSVC ignores maxalign when aligning the size */
3912 a = pragma_pack;
3913 if (a < bt)
3914 a = bt;
3916 c = (c + a - 1) & -a;
3917 type->ref->c = c;
3919 #ifdef BF_DEBUG
3920 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3921 #endif
3923 /* check whether we can access bitfields by their type */
3924 for (f = type->ref->next; f; f = f->next) {
3925 int s, px, cx, c0;
3926 CType t;
3928 if (0 == (f->type.t & VT_BITFIELD))
3929 continue;
3930 f->type.ref = f;
3931 f->auxtype = -1;
3932 bit_size = BIT_SIZE(f->type.t);
3933 if (bit_size == 0)
3934 continue;
3935 bit_pos = BIT_POS(f->type.t);
3936 size = type_size(&f->type, &align);
3937 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3938 continue;
3940 /* try to access the field using a different type */
3941 c0 = -1, s = align = 1;
3942 for (;;) {
3943 px = f->c * 8 + bit_pos;
3944 cx = (px >> 3) & -align;
3945 px = px - (cx << 3);
3946 if (c0 == cx)
3947 break;
3948 s = (px + bit_size + 7) >> 3;
3949 if (s > 4) {
3950 t.t = VT_LLONG;
3951 } else if (s > 2) {
3952 t.t = VT_INT;
3953 } else if (s > 1) {
3954 t.t = VT_SHORT;
3955 } else {
3956 t.t = VT_BYTE;
3958 s = type_size(&t, &align);
3959 c0 = cx;
3962 if (px + bit_size <= s * 8 && cx + s <= c) {
3963 /* update offset and bit position */
3964 f->c = cx;
3965 bit_pos = px;
3966 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3967 | (bit_pos << VT_STRUCT_SHIFT);
3968 if (s != size)
3969 f->auxtype = t.t;
3970 #ifdef BF_DEBUG
3971 printf("FIX field %s offset %-2d size %-2d align %-2d "
3972 "pos %-2d bits %-2d\n",
3973 get_tok_str(f->v & ~SYM_FIELD, NULL),
3974 cx, s, align, px, bit_size);
3975 #endif
3976 } else {
3977 /* fall back to load/store single-byte wise */
3978 f->auxtype = VT_STRUCT;
3979 #ifdef BF_DEBUG
3980 printf("FIX field %s : load byte-wise\n",
3981 get_tok_str(f->v & ~SYM_FIELD, NULL));
3982 #endif
3987 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3988 static void struct_decl(CType *type, int u)
3990 int v, c, size, align, flexible;
3991 int bit_size, bsize, bt;
3992 Sym *s, *ss, **ps;
3993 AttributeDef ad, ad1;
3994 CType type1, btype;
3996 memset(&ad, 0, sizeof ad);
3997 next();
3998 parse_attribute(&ad);
3999 if (tok != '{') {
4000 v = tok;
4001 next();
4002 /* struct already defined ? return it */
4003 if (v < TOK_IDENT)
4004 expect("struct/union/enum name");
4005 s = struct_find(v);
4006 if (s && (s->sym_scope == local_scope || tok != '{')) {
4007 if (u == s->type.t)
4008 goto do_decl;
4009 if (u == VT_ENUM && IS_ENUM(s->type.t))
4010 goto do_decl;
4011 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4013 } else {
4014 v = anon_sym++;
4016 /* Record the original enum/struct/union token. */
4017 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4018 type1.ref = NULL;
4019 /* we put an undefined size for struct/union */
4020 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4021 s->r = 0; /* default alignment is zero as gcc */
4022 do_decl:
4023 type->t = s->type.t;
4024 type->ref = s;
4026 if (tok == '{') {
4027 next();
4028 if (s->c != -1)
4029 tcc_error("struct/union/enum already defined");
4030 s->c = -2;
4031 /* cannot be empty */
4032 /* non empty enums are not allowed */
4033 ps = &s->next;
4034 if (u == VT_ENUM) {
4035 long long ll = 0, pl = 0, nl = 0;
4036 CType t;
4037 t.ref = s;
4038 /* enum symbols have static storage */
4039 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4040 for(;;) {
4041 v = tok;
4042 if (v < TOK_UIDENT)
4043 expect("identifier");
4044 ss = sym_find(v);
4045 if (ss && !local_stack)
4046 tcc_error("redefinition of enumerator '%s'",
4047 get_tok_str(v, NULL));
4048 next();
4049 if (tok == '=') {
4050 next();
4051 ll = expr_const64();
4053 ss = sym_push(v, &t, VT_CONST, 0);
4054 ss->enum_val = ll;
4055 *ps = ss, ps = &ss->next;
4056 if (ll < nl)
4057 nl = ll;
4058 if (ll > pl)
4059 pl = ll;
4060 if (tok != ',')
4061 break;
4062 next();
4063 ll++;
4064 /* NOTE: we accept a trailing comma */
4065 if (tok == '}')
4066 break;
4068 skip('}');
4069 /* set integral type of the enum */
4070 t.t = VT_INT;
4071 if (nl >= 0) {
4072 if (pl != (unsigned)pl)
4073 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4074 t.t |= VT_UNSIGNED;
4075 } else if (pl != (int)pl || nl != (int)nl)
4076 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4077 s->type.t = type->t = t.t | VT_ENUM;
4078 s->c = 0;
4079 /* set type for enum members */
4080 for (ss = s->next; ss; ss = ss->next) {
4081 ll = ss->enum_val;
4082 if (ll == (int)ll) /* default is int if it fits */
4083 continue;
4084 if (t.t & VT_UNSIGNED) {
4085 ss->type.t |= VT_UNSIGNED;
4086 if (ll == (unsigned)ll)
4087 continue;
4089 ss->type.t = (ss->type.t & ~VT_BTYPE)
4090 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4092 } else {
4093 c = 0;
4094 flexible = 0;
4095 while (tok != '}') {
4096 if (!parse_btype(&btype, &ad1)) {
4097 skip(';');
4098 continue;
4100 while (1) {
4101 if (flexible)
4102 tcc_error("flexible array member '%s' not at the end of struct",
4103 get_tok_str(v, NULL));
4104 bit_size = -1;
4105 v = 0;
4106 type1 = btype;
4107 if (tok != ':') {
4108 if (tok != ';')
4109 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4110 if (v == 0) {
4111 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4112 expect("identifier");
4113 else {
4114 int v = btype.ref->v;
4115 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4116 if (tcc_state->ms_extensions == 0)
4117 expect("identifier");
4121 if (type_size(&type1, &align) < 0) {
4122 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4123 flexible = 1;
4124 else
4125 tcc_error("field '%s' has incomplete type",
4126 get_tok_str(v, NULL));
4128 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4129 (type1.t & VT_BTYPE) == VT_VOID ||
4130 (type1.t & VT_STORAGE))
4131 tcc_error("invalid type for '%s'",
4132 get_tok_str(v, NULL));
4134 if (tok == ':') {
4135 next();
4136 bit_size = expr_const();
4137 /* XXX: handle v = 0 case for messages */
4138 if (bit_size < 0)
4139 tcc_error("negative width in bit-field '%s'",
4140 get_tok_str(v, NULL));
4141 if (v && bit_size == 0)
4142 tcc_error("zero width for bit-field '%s'",
4143 get_tok_str(v, NULL));
4144 parse_attribute(&ad1);
4146 size = type_size(&type1, &align);
4147 if (bit_size >= 0) {
4148 bt = type1.t & VT_BTYPE;
4149 if (bt != VT_INT &&
4150 bt != VT_BYTE &&
4151 bt != VT_SHORT &&
4152 bt != VT_BOOL &&
4153 bt != VT_LLONG)
4154 tcc_error("bitfields must have scalar type");
4155 bsize = size * 8;
4156 if (bit_size > bsize) {
4157 tcc_error("width of '%s' exceeds its type",
4158 get_tok_str(v, NULL));
4159 } else if (bit_size == bsize
4160 && !ad.a.packed && !ad1.a.packed) {
4161 /* no need for bit fields */
4163 } else if (bit_size == 64) {
4164 tcc_error("field width 64 not implemented");
4165 } else {
4166 type1.t = (type1.t & ~VT_STRUCT_MASK)
4167 | VT_BITFIELD
4168 | (bit_size << (VT_STRUCT_SHIFT + 6));
4171 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4172 /* Remember we've seen a real field to check
4173 for placement of flexible array member. */
4174 c = 1;
4176 /* If member is a struct or bit-field, enforce
4177 placing into the struct (as anonymous). */
4178 if (v == 0 &&
4179 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4180 bit_size >= 0)) {
4181 v = anon_sym++;
4183 if (v) {
4184 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4185 ss->a = ad1.a;
4186 *ps = ss;
4187 ps = &ss->next;
4189 if (tok == ';' || tok == TOK_EOF)
4190 break;
4191 skip(',');
4193 skip(';');
4195 skip('}');
4196 parse_attribute(&ad);
4197 struct_layout(type, &ad);
4202 static void sym_to_attr(AttributeDef *ad, Sym *s)
4204 merge_symattr(&ad->a, &s->a);
4205 merge_funcattr(&ad->f, &s->f);
4208 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4209 are added to the element type, copied because it could be a typedef. */
4210 static void parse_btype_qualify(CType *type, int qualifiers)
4212 while (type->t & VT_ARRAY) {
4213 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4214 type = &type->ref->type;
4216 type->t |= qualifiers;
4219 /* return 0 if no type declaration. otherwise, return the basic type
4220 and skip it.
4222 static int parse_btype(CType *type, AttributeDef *ad)
4224 int t, u, bt, st, type_found, typespec_found, g, n;
4225 Sym *s;
4226 CType type1;
4228 memset(ad, 0, sizeof(AttributeDef));
4229 type_found = 0;
4230 typespec_found = 0;
4231 t = VT_INT;
4232 bt = st = -1;
4233 type->ref = NULL;
4235 while(1) {
4236 switch(tok) {
4237 case TOK_EXTENSION:
4238 /* currently, we really ignore extension */
4239 next();
4240 continue;
4242 /* basic types */
4243 case TOK_CHAR:
4244 u = VT_BYTE;
4245 basic_type:
4246 next();
4247 basic_type1:
4248 if (u == VT_SHORT || u == VT_LONG) {
4249 if (st != -1 || (bt != -1 && bt != VT_INT))
4250 tmbt: tcc_error("too many basic types");
4251 st = u;
4252 } else {
4253 if (bt != -1 || (st != -1 && u != VT_INT))
4254 goto tmbt;
4255 bt = u;
4257 if (u != VT_INT)
4258 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4259 typespec_found = 1;
4260 break;
4261 case TOK_VOID:
4262 u = VT_VOID;
4263 goto basic_type;
4264 case TOK_SHORT:
4265 u = VT_SHORT;
4266 goto basic_type;
4267 case TOK_INT:
4268 u = VT_INT;
4269 goto basic_type;
4270 case TOK_ALIGNAS:
4271 { int n;
4272 AttributeDef ad1;
4273 next();
4274 skip('(');
4275 memset(&ad1, 0, sizeof(AttributeDef));
4276 if (parse_btype(&type1, &ad1)) {
4277 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4278 if (ad1.a.aligned)
4279 n = 1 << (ad1.a.aligned - 1);
4280 else
4281 type_size(&type1, &n);
4282 } else {
4283 n = expr_const();
4284 if (n <= 0 || (n & (n - 1)) != 0)
4285 tcc_error("alignment must be a positive power of two");
4287 skip(')');
4288 ad->a.aligned = exact_log2p1(n);
4290 continue;
4291 case TOK_LONG:
4292 if ((t & VT_BTYPE) == VT_DOUBLE) {
4293 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4294 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4295 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4296 } else {
4297 u = VT_LONG;
4298 goto basic_type;
4300 next();
4301 break;
4302 #ifdef TCC_TARGET_ARM64
4303 case TOK_UINT128:
4304 /* GCC's __uint128_t appears in some Linux header files. Make it a
4305 synonym for long double to get the size and alignment right. */
4306 u = VT_LDOUBLE;
4307 goto basic_type;
4308 #endif
4309 case TOK_BOOL:
4310 u = VT_BOOL;
4311 goto basic_type;
4312 case TOK_FLOAT:
4313 u = VT_FLOAT;
4314 goto basic_type;
4315 case TOK_DOUBLE:
4316 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4317 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4318 } else {
4319 u = VT_DOUBLE;
4320 goto basic_type;
4322 next();
4323 break;
4324 case TOK_ENUM:
4325 struct_decl(&type1, VT_ENUM);
4326 basic_type2:
4327 u = type1.t;
4328 type->ref = type1.ref;
4329 goto basic_type1;
4330 case TOK_STRUCT:
4331 struct_decl(&type1, VT_STRUCT);
4332 goto basic_type2;
4333 case TOK_UNION:
4334 struct_decl(&type1, VT_UNION);
4335 goto basic_type2;
4337 /* type modifiers */
4338 case TOK_CONST1:
4339 case TOK_CONST2:
4340 case TOK_CONST3:
4341 type->t = t;
4342 parse_btype_qualify(type, VT_CONSTANT);
4343 t = type->t;
4344 next();
4345 break;
4346 case TOK_VOLATILE1:
4347 case TOK_VOLATILE2:
4348 case TOK_VOLATILE3:
4349 type->t = t;
4350 parse_btype_qualify(type, VT_VOLATILE);
4351 t = type->t;
4352 next();
4353 break;
4354 case TOK_SIGNED1:
4355 case TOK_SIGNED2:
4356 case TOK_SIGNED3:
4357 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4358 tcc_error("signed and unsigned modifier");
4359 t |= VT_DEFSIGN;
4360 next();
4361 typespec_found = 1;
4362 break;
4363 case TOK_REGISTER:
4364 case TOK_AUTO:
4365 case TOK_RESTRICT1:
4366 case TOK_RESTRICT2:
4367 case TOK_RESTRICT3:
4368 next();
4369 break;
4370 case TOK_UNSIGNED:
4371 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4372 tcc_error("signed and unsigned modifier");
4373 t |= VT_DEFSIGN | VT_UNSIGNED;
4374 next();
4375 typespec_found = 1;
4376 break;
4378 /* storage */
4379 case TOK_EXTERN:
4380 g = VT_EXTERN;
4381 goto storage;
4382 case TOK_STATIC:
4383 g = VT_STATIC;
4384 goto storage;
4385 case TOK_TYPEDEF:
4386 g = VT_TYPEDEF;
4387 goto storage;
4388 storage:
4389 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4390 tcc_error("multiple storage classes");
4391 t |= g;
4392 next();
4393 break;
4394 case TOK_INLINE1:
4395 case TOK_INLINE2:
4396 case TOK_INLINE3:
4397 t |= VT_INLINE;
4398 next();
4399 break;
4400 case TOK_NORETURN3:
4401 /* currently, no need to handle it because tcc does not
4402 track unused objects */
4403 next();
4404 break;
4405 /* GNUC attribute */
4406 case TOK_ATTRIBUTE1:
4407 case TOK_ATTRIBUTE2:
4408 parse_attribute(ad);
4409 if (ad->attr_mode) {
4410 u = ad->attr_mode -1;
4411 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4413 continue;
4414 /* GNUC typeof */
4415 case TOK_TYPEOF1:
4416 case TOK_TYPEOF2:
4417 case TOK_TYPEOF3:
4418 next();
4419 parse_expr_type(&type1);
4420 /* remove all storage modifiers except typedef */
4421 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4422 if (type1.ref)
4423 sym_to_attr(ad, type1.ref);
4424 goto basic_type2;
4425 default:
4426 if (typespec_found)
4427 goto the_end;
4428 s = sym_find(tok);
4429 if (!s || !(s->type.t & VT_TYPEDEF))
4430 goto the_end;
4432 n = tok, next();
4433 if (tok == ':' && !in_generic) {
4434 /* ignore if it's a label */
4435 unget_tok(n);
4436 goto the_end;
4439 t &= ~(VT_BTYPE|VT_LONG);
4440 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4441 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4442 type->ref = s->type.ref;
4443 if (t)
4444 parse_btype_qualify(type, t);
4445 t = type->t;
4446 /* get attributes from typedef */
4447 sym_to_attr(ad, s);
4448 typespec_found = 1;
4449 st = bt = -2;
4450 break;
4452 type_found = 1;
4454 the_end:
4455 if (tcc_state->char_is_unsigned) {
4456 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4457 t |= VT_UNSIGNED;
4459 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4460 bt = t & (VT_BTYPE|VT_LONG);
4461 if (bt == VT_LONG)
4462 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4463 #ifdef TCC_TARGET_PE
4464 if (bt == VT_LDOUBLE)
4465 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4466 #endif
4467 type->t = t;
4468 return type_found;
4471 /* convert a function parameter type (array to pointer and function to
4472 function pointer) */
4473 static inline void convert_parameter_type(CType *pt)
4475 /* remove const and volatile qualifiers (XXX: const could be used
4476 to indicate a const function parameter */
4477 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4478 /* array must be transformed to pointer according to ANSI C */
4479 pt->t &= ~VT_ARRAY;
4480 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4481 mk_pointer(pt);
4485 ST_FUNC void parse_asm_str(CString *astr)
4487 skip('(');
4488 parse_mult_str(astr, "string constant");
4491 /* Parse an asm label and return the token */
4492 static int asm_label_instr(void)
4494 int v;
4495 CString astr;
4497 next();
4498 parse_asm_str(&astr);
4499 skip(')');
4500 #ifdef ASM_DEBUG
4501 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4502 #endif
4503 v = tok_alloc(astr.data, astr.size - 1)->tok;
4504 cstr_free(&astr);
4505 return v;
4508 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4510 int n, l, t1, arg_size, align, unused_align;
4511 Sym **plast, *s, *first;
4512 AttributeDef ad1;
4513 CType pt;
4515 if (tok == '(') {
4516 /* function type, or recursive declarator (return if so) */
4517 next();
4518 if (td && !(td & TYPE_ABSTRACT))
4519 return 0;
4520 if (tok == ')')
4521 l = 0;
4522 else if (parse_btype(&pt, &ad1))
4523 l = FUNC_NEW;
4524 else if (td) {
4525 merge_attr (ad, &ad1);
4526 return 0;
4527 } else
4528 l = FUNC_OLD;
4529 first = NULL;
4530 plast = &first;
4531 arg_size = 0;
4532 if (l) {
4533 for(;;) {
4534 /* read param name and compute offset */
4535 if (l != FUNC_OLD) {
4536 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4537 break;
4538 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4539 if ((pt.t & VT_BTYPE) == VT_VOID)
4540 tcc_error("parameter declared as void");
4541 } else {
4542 n = tok;
4543 if (n < TOK_UIDENT)
4544 expect("identifier");
4545 pt.t = VT_VOID; /* invalid type */
4546 pt.ref = NULL;
4547 next();
4549 convert_parameter_type(&pt);
4550 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4551 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4552 *plast = s;
4553 plast = &s->next;
4554 if (tok == ')')
4555 break;
4556 skip(',');
4557 if (l == FUNC_NEW && tok == TOK_DOTS) {
4558 l = FUNC_ELLIPSIS;
4559 next();
4560 break;
4562 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4563 tcc_error("invalid type");
4565 } else
4566 /* if no parameters, then old type prototype */
4567 l = FUNC_OLD;
4568 skip(')');
4569 /* NOTE: const is ignored in returned type as it has a special
4570 meaning in gcc / C++ */
4571 type->t &= ~VT_CONSTANT;
4572 /* some ancient pre-K&R C allows a function to return an array
4573 and the array brackets to be put after the arguments, such
4574 that "int c()[]" means something like "int[] c()" */
4575 if (tok == '[') {
4576 next();
4577 skip(']'); /* only handle simple "[]" */
4578 mk_pointer(type);
4580 /* we push a anonymous symbol which will contain the function prototype */
4581 ad->f.func_args = arg_size;
4582 ad->f.func_type = l;
4583 s = sym_push(SYM_FIELD, type, 0, 0);
4584 s->a = ad->a;
4585 s->f = ad->f;
4586 s->next = first;
4587 type->t = VT_FUNC;
4588 type->ref = s;
4589 } else if (tok == '[') {
4590 int saved_nocode_wanted = nocode_wanted;
4591 /* array definition */
4592 next();
4593 while (1) {
4594 /* XXX The optional type-quals and static should only be accepted
4595 in parameter decls. The '*' as well, and then even only
4596 in prototypes (not function defs). */
4597 switch (tok) {
4598 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4599 case TOK_CONST1:
4600 case TOK_VOLATILE1:
4601 case TOK_STATIC:
4602 case '*':
4603 next();
4604 continue;
4605 default:
4606 break;
4608 break;
4610 n = -1;
4611 t1 = 0;
4612 if (tok != ']') {
4613 if (!local_stack || (storage & VT_STATIC))
4614 vpushi(expr_const());
4615 else {
4616 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4617 length must always be evaluated, even under nocode_wanted,
4618 so that its size slot is initialized (e.g. under sizeof
4619 or typeof). */
4620 nocode_wanted = 0;
4621 gexpr();
4623 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4624 n = vtop->c.i;
4625 if (n < 0)
4626 tcc_error("invalid array size");
4627 } else {
4628 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4629 tcc_error("size of variable length array should be an integer");
4630 n = 0;
4631 t1 = VT_VLA;
4634 skip(']');
4635 /* parse next post type */
4636 post_type(type, ad, storage, 0);
4638 if ((type->t & VT_BTYPE) == VT_FUNC)
4639 tcc_error("declaration of an array of functions");
4640 if ((type->t & VT_BTYPE) == VT_VOID
4641 || type_size(type, &unused_align) < 0)
4642 tcc_error("declaration of an array of incomplete type elements");
4644 t1 |= type->t & VT_VLA;
4646 if (t1 & VT_VLA) {
4647 if (n < 0)
4648 tcc_error("need explicit inner array size in VLAs");
4649 loc -= type_size(&int_type, &align);
4650 loc &= -align;
4651 n = loc;
4653 vla_runtime_type_size(type, &align);
4654 gen_op('*');
4655 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4656 vswap();
4657 vstore();
4659 if (n != -1)
4660 vpop();
4661 nocode_wanted = saved_nocode_wanted;
4663 /* we push an anonymous symbol which will contain the array
4664 element type */
4665 s = sym_push(SYM_FIELD, type, 0, n);
4666 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4667 type->ref = s;
4669 return 1;
4672 /* Parse a type declarator (except basic type), and return the type
4673 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4674 expected. 'type' should contain the basic type. 'ad' is the
4675 attribute definition of the basic type. It can be modified by
4676 type_decl(). If this (possibly abstract) declarator is a pointer chain
4677 it returns the innermost pointed to type (equals *type, but is a different
4678 pointer), otherwise returns type itself, that's used for recursive calls. */
4679 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4681 CType *post, *ret;
4682 int qualifiers, storage;
4684 /* recursive type, remove storage bits first, apply them later again */
4685 storage = type->t & VT_STORAGE;
4686 type->t &= ~VT_STORAGE;
4687 post = ret = type;
4689 while (tok == '*') {
4690 qualifiers = 0;
4691 redo:
4692 next();
4693 switch(tok) {
4694 case TOK_CONST1:
4695 case TOK_CONST2:
4696 case TOK_CONST3:
4697 qualifiers |= VT_CONSTANT;
4698 goto redo;
4699 case TOK_VOLATILE1:
4700 case TOK_VOLATILE2:
4701 case TOK_VOLATILE3:
4702 qualifiers |= VT_VOLATILE;
4703 goto redo;
4704 case TOK_RESTRICT1:
4705 case TOK_RESTRICT2:
4706 case TOK_RESTRICT3:
4707 goto redo;
4708 /* XXX: clarify attribute handling */
4709 case TOK_ATTRIBUTE1:
4710 case TOK_ATTRIBUTE2:
4711 parse_attribute(ad);
4712 break;
4714 mk_pointer(type);
4715 type->t |= qualifiers;
4716 if (ret == type)
4717 /* innermost pointed to type is the one for the first derivation */
4718 ret = pointed_type(type);
4721 if (tok == '(') {
4722 /* This is possibly a parameter type list for abstract declarators
4723 ('int ()'), use post_type for testing this. */
4724 if (!post_type(type, ad, 0, td)) {
4725 /* It's not, so it's a nested declarator, and the post operations
4726 apply to the innermost pointed to type (if any). */
4727 /* XXX: this is not correct to modify 'ad' at this point, but
4728 the syntax is not clear */
4729 parse_attribute(ad);
4730 post = type_decl(type, ad, v, td);
4731 skip(')');
4732 } else
4733 goto abstract;
4734 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4735 /* type identifier */
4736 *v = tok;
4737 next();
4738 } else {
4739 abstract:
4740 if (!(td & TYPE_ABSTRACT))
4741 expect("identifier");
4742 *v = 0;
4744 post_type(post, ad, storage, 0);
4745 parse_attribute(ad);
4746 type->t |= storage;
4747 return ret;
4750 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4751 ST_FUNC int lvalue_type(int t)
4753 int bt, r;
4754 r = VT_LVAL;
4755 bt = t & VT_BTYPE;
4756 if (bt == VT_BYTE || bt == VT_BOOL)
4757 r |= VT_LVAL_BYTE;
4758 else if (bt == VT_SHORT)
4759 r |= VT_LVAL_SHORT;
4760 else
4761 return r;
4762 if (t & VT_UNSIGNED)
4763 r |= VT_LVAL_UNSIGNED;
4764 return r;
4767 /* indirection with full error checking and bound check */
4768 ST_FUNC void indir(void)
4770 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4771 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4772 return;
4773 expect("pointer");
4775 if (vtop->r & VT_LVAL)
4776 gv(RC_INT);
4777 vtop->type = *pointed_type(&vtop->type);
4778 /* Arrays and functions are never lvalues */
4779 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4780 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4781 vtop->r |= lvalue_type(vtop->type.t);
4782 /* if bound checking, the referenced pointer must be checked */
4783 #ifdef CONFIG_TCC_BCHECK
4784 if (tcc_state->do_bounds_check)
4785 vtop->r |= VT_MUSTBOUND;
4786 #endif
4790 /* pass a parameter to a function and do type checking and casting */
4791 static void gfunc_param_typed(Sym *func, Sym *arg)
4793 int func_type;
4794 CType type;
4796 func_type = func->f.func_type;
4797 if (func_type == FUNC_OLD ||
4798 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4799 /* default casting : only need to convert float to double */
4800 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4801 gen_cast_s(VT_DOUBLE);
4802 } else if (vtop->type.t & VT_BITFIELD) {
4803 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4804 type.ref = vtop->type.ref;
4805 gen_cast(&type);
4807 } else if (arg == NULL) {
4808 tcc_error("too many arguments to function");
4809 } else {
4810 type = arg->type;
4811 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4812 gen_assign_cast(&type);
4816 /* parse an expression and return its type without any side effect. */
4817 static void expr_type(CType *type, void (*expr_fn)(void))
4819 nocode_wanted++;
4820 expr_fn();
4821 *type = vtop->type;
4822 vpop();
4823 nocode_wanted--;
4826 /* parse an expression of the form '(type)' or '(expr)' and return its
4827 type */
4828 static void parse_expr_type(CType *type)
4830 int n;
4831 AttributeDef ad;
4833 skip('(');
4834 if (parse_btype(type, &ad)) {
4835 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4836 } else {
4837 expr_type(type, gexpr);
4839 skip(')');
4842 static void parse_type(CType *type)
4844 AttributeDef ad;
4845 int n;
4847 if (!parse_btype(type, &ad)) {
4848 expect("type");
4850 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4853 static void parse_builtin_params(int nc, const char *args)
4855 char c, sep = '(';
4856 CType t;
4857 if (nc)
4858 nocode_wanted++;
4859 next();
4860 while ((c = *args++)) {
4861 skip(sep);
4862 sep = ',';
4863 switch (c) {
4864 case 'e': expr_eq(); continue;
4865 case 't': parse_type(&t); vpush(&t); continue;
4866 default: tcc_error("internal error"); break;
4869 skip(')');
4870 if (nc)
4871 nocode_wanted--;
4874 ST_FUNC void unary(void)
4876 int n, t, align, size, r, sizeof_caller;
4877 CType type;
4878 Sym *s;
4879 AttributeDef ad;
4881 sizeof_caller = in_sizeof;
4882 in_sizeof = 0;
4883 type.ref = NULL;
4884 /* XXX: GCC 2.95.3 does not generate a table although it should be
4885 better here */
4886 tok_next:
4887 switch(tok) {
4888 case TOK_EXTENSION:
4889 next();
4890 goto tok_next;
4891 case TOK_LCHAR:
4892 #ifdef TCC_TARGET_PE
4893 t = VT_SHORT|VT_UNSIGNED;
4894 goto push_tokc;
4895 #endif
4896 case TOK_CINT:
4897 case TOK_CCHAR:
4898 t = VT_INT;
4899 push_tokc:
4900 type.t = t;
4901 vsetc(&type, VT_CONST, &tokc);
4902 next();
4903 break;
4904 case TOK_CUINT:
4905 t = VT_INT | VT_UNSIGNED;
4906 goto push_tokc;
4907 case TOK_CLLONG:
4908 t = VT_LLONG;
4909 goto push_tokc;
4910 case TOK_CULLONG:
4911 t = VT_LLONG | VT_UNSIGNED;
4912 goto push_tokc;
4913 case TOK_CFLOAT:
4914 t = VT_FLOAT;
4915 goto push_tokc;
4916 case TOK_CDOUBLE:
4917 t = VT_DOUBLE;
4918 goto push_tokc;
4919 case TOK_CLDOUBLE:
4920 t = VT_LDOUBLE;
4921 goto push_tokc;
4922 case TOK_CLONG:
4923 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4924 goto push_tokc;
4925 case TOK_CULONG:
4926 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4927 goto push_tokc;
4928 case TOK___FUNCTION__:
4929 if (!gnu_ext)
4930 goto tok_identifier;
4931 /* fall thru */
4932 case TOK___FUNC__:
4934 void *ptr;
4935 int len;
4936 /* special function name identifier */
4937 len = strlen(funcname) + 1;
4938 /* generate char[len] type */
4939 type.t = VT_BYTE;
4940 mk_pointer(&type);
4941 type.t |= VT_ARRAY;
4942 type.ref->c = len;
4943 vpush_ref(&type, data_section, data_section->data_offset, len);
4944 if (!NODATA_WANTED) {
4945 ptr = section_ptr_add(data_section, len);
4946 memcpy(ptr, funcname, len);
4948 next();
4950 break;
4951 case TOK_LSTR:
4952 #ifdef TCC_TARGET_PE
4953 t = VT_SHORT | VT_UNSIGNED;
4954 #else
4955 t = VT_INT;
4956 #endif
4957 goto str_init;
4958 case TOK_STR:
4959 /* string parsing */
4960 t = VT_BYTE;
4961 if (tcc_state->char_is_unsigned)
4962 t = VT_BYTE | VT_UNSIGNED;
4963 str_init:
4964 if (tcc_state->warn_write_strings)
4965 t |= VT_CONSTANT;
4966 type.t = t;
4967 mk_pointer(&type);
4968 type.t |= VT_ARRAY;
4969 memset(&ad, 0, sizeof(AttributeDef));
4970 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4971 break;
4972 case '(':
4973 next();
4974 /* cast ? */
4975 if (parse_btype(&type, &ad)) {
4976 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4977 skip(')');
4978 /* check ISOC99 compound literal */
4979 if (tok == '{') {
4980 /* data is allocated locally by default */
4981 if (global_expr)
4982 r = VT_CONST;
4983 else
4984 r = VT_LOCAL;
4985 /* all except arrays are lvalues */
4986 if (!(type.t & VT_ARRAY))
4987 r |= lvalue_type(type.t);
4988 memset(&ad, 0, sizeof(AttributeDef));
4989 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4990 } else {
4991 if (sizeof_caller) {
4992 vpush(&type);
4993 return;
4995 unary();
4996 gen_cast(&type);
4998 } else if (tok == '{') {
4999 int saved_nocode_wanted = nocode_wanted;
5000 if (const_wanted)
5001 tcc_error("expected constant");
5002 /* save all registers */
5003 save_regs(0);
5004 /* statement expression : we do not accept break/continue
5005 inside as GCC does. We do retain the nocode_wanted state,
5006 as statement expressions can't ever be entered from the
5007 outside, so any reactivation of code emission (from labels
5008 or loop heads) can be disabled again after the end of it. */
5009 block(1);
5010 nocode_wanted = saved_nocode_wanted;
5011 skip(')');
5012 } else {
5013 gexpr();
5014 skip(')');
5016 break;
5017 case '*':
5018 next();
5019 unary();
5020 indir();
5021 break;
5022 case '&':
5023 next();
5024 unary();
5025 /* functions names must be treated as function pointers,
5026 except for unary '&' and sizeof. Since we consider that
5027 functions are not lvalues, we only have to handle it
5028 there and in function calls. */
5029 /* arrays can also be used although they are not lvalues */
5030 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5031 !(vtop->type.t & VT_ARRAY))
5032 test_lvalue();
5033 mk_pointer(&vtop->type);
5034 gaddrof();
5035 break;
5036 case '!':
5037 next();
5038 unary();
5039 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5040 gen_cast_s(VT_BOOL);
5041 vtop->c.i = !vtop->c.i;
5042 } else if (vtop->r == VT_CMP) {
5043 vtop->cmp_op ^= 1;
5044 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5045 } else {
5046 vpushi(0);
5047 gen_op(TOK_EQ);
5049 break;
5050 case '~':
5051 next();
5052 unary();
5053 vpushi(-1);
5054 gen_op('^');
5055 break;
5056 case '+':
5057 next();
5058 unary();
5059 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5060 tcc_error("pointer not accepted for unary plus");
5061 /* In order to force cast, we add zero, except for floating point
5062 where we really need an noop (otherwise -0.0 will be transformed
5063 into +0.0). */
5064 if (!is_float(vtop->type.t)) {
5065 vpushi(0);
5066 gen_op('+');
5068 break;
5069 case TOK_SIZEOF:
5070 case TOK_ALIGNOF1:
5071 case TOK_ALIGNOF2:
5072 case TOK_ALIGNOF3:
5073 t = tok;
5074 next();
5075 in_sizeof++;
5076 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5077 s = NULL;
5078 if (vtop[1].r & VT_SYM)
5079 s = vtop[1].sym; /* hack: accessing previous vtop */
5080 size = type_size(&type, &align);
5081 if (s && s->a.aligned)
5082 align = 1 << (s->a.aligned - 1);
5083 if (t == TOK_SIZEOF) {
5084 if (!(type.t & VT_VLA)) {
5085 if (size < 0)
5086 tcc_error("sizeof applied to an incomplete type");
5087 vpushs(size);
5088 } else {
5089 vla_runtime_type_size(&type, &align);
5091 } else {
5092 vpushs(align);
5094 vtop->type.t |= VT_UNSIGNED;
5095 break;
5097 case TOK_builtin_expect:
5098 /* __builtin_expect is a no-op for now */
5099 parse_builtin_params(0, "ee");
5100 vpop();
5101 break;
5102 case TOK_builtin_types_compatible_p:
5103 parse_builtin_params(0, "tt");
5104 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5105 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5106 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5107 vtop -= 2;
5108 vpushi(n);
5109 break;
5110 case TOK_builtin_choose_expr:
5112 int64_t c;
5113 next();
5114 skip('(');
5115 c = expr_const64();
5116 skip(',');
5117 if (!c) {
5118 nocode_wanted++;
5120 expr_eq();
5121 if (!c) {
5122 vpop();
5123 nocode_wanted--;
5125 skip(',');
5126 if (c) {
5127 nocode_wanted++;
5129 expr_eq();
5130 if (c) {
5131 vpop();
5132 nocode_wanted--;
5134 skip(')');
5136 break;
5137 case TOK_builtin_constant_p:
5138 parse_builtin_params(1, "e");
5139 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5140 vtop--;
5141 vpushi(n);
5142 break;
5143 case TOK_builtin_frame_address:
5144 case TOK_builtin_return_address:
5146 int tok1 = tok;
5147 int level;
5148 next();
5149 skip('(');
5150 if (tok != TOK_CINT) {
5151 tcc_error("%s only takes positive integers",
5152 tok1 == TOK_builtin_return_address ?
5153 "__builtin_return_address" :
5154 "__builtin_frame_address");
5156 level = (uint32_t)tokc.i;
5157 next();
5158 skip(')');
5159 type.t = VT_VOID;
5160 mk_pointer(&type);
5161 vset(&type, VT_LOCAL, 0); /* local frame */
5162 while (level--) {
5163 mk_pointer(&vtop->type);
5164 indir(); /* -> parent frame */
5166 if (tok1 == TOK_builtin_return_address) {
5167 // assume return address is just above frame pointer on stack
5168 vpushi(PTR_SIZE);
5169 gen_op('+');
5170 mk_pointer(&vtop->type);
5171 indir();
5174 break;
5175 #ifdef TCC_TARGET_RISCV64
5176 case TOK_builtin_va_start:
5177 parse_builtin_params(0, "ee");
5178 r = vtop->r & VT_VALMASK;
5179 if (r == VT_LLOCAL)
5180 r = VT_LOCAL;
5181 if (r != VT_LOCAL)
5182 tcc_error("__builtin_va_start expects a local variable");
5183 gen_va_start();
5184 vstore();
5185 break;
5186 #endif
5187 #ifdef TCC_TARGET_X86_64
5188 #ifdef TCC_TARGET_PE
5189 case TOK_builtin_va_start:
5190 parse_builtin_params(0, "ee");
5191 r = vtop->r & VT_VALMASK;
5192 if (r == VT_LLOCAL)
5193 r = VT_LOCAL;
5194 if (r != VT_LOCAL)
5195 tcc_error("__builtin_va_start expects a local variable");
5196 vtop->r = r;
5197 vtop->type = char_pointer_type;
5198 vtop->c.i += 8;
5199 vstore();
5200 break;
5201 #else
5202 case TOK_builtin_va_arg_types:
5203 parse_builtin_params(0, "t");
5204 vpushi(classify_x86_64_va_arg(&vtop->type));
5205 vswap();
5206 vpop();
5207 break;
5208 #endif
5209 #endif
5211 #ifdef TCC_TARGET_ARM64
5212 case TOK___va_start: {
5213 parse_builtin_params(0, "ee");
5214 //xx check types
5215 gen_va_start();
5216 vpushi(0);
5217 vtop->type.t = VT_VOID;
5218 break;
5220 case TOK___va_arg: {
5221 parse_builtin_params(0, "et");
5222 type = vtop->type;
5223 vpop();
5224 //xx check types
5225 gen_va_arg(&type);
5226 vtop->type = type;
5227 break;
5229 case TOK___arm64_clear_cache: {
5230 parse_builtin_params(0, "ee");
5231 gen_clear_cache();
5232 vpushi(0);
5233 vtop->type.t = VT_VOID;
5234 break;
5236 #endif
5237 /* pre operations */
5238 case TOK_INC:
5239 case TOK_DEC:
5240 t = tok;
5241 next();
5242 unary();
5243 inc(0, t);
5244 break;
5245 case '-':
5246 next();
5247 unary();
5248 t = vtop->type.t & VT_BTYPE;
5249 if (is_float(t)) {
5250 /* In IEEE negate(x) isn't subtract(0,x), but rather
5251 subtract(-0, x). */
5252 vpush(&vtop->type);
5253 if (t == VT_FLOAT)
5254 vtop->c.f = -1.0 * 0.0;
5255 else if (t == VT_DOUBLE)
5256 vtop->c.d = -1.0 * 0.0;
5257 else
5258 vtop->c.ld = -1.0 * 0.0;
5259 } else
5260 vpushi(0);
5261 vswap();
5262 gen_op('-');
5263 break;
5264 case TOK_LAND:
5265 if (!gnu_ext)
5266 goto tok_identifier;
5267 next();
5268 /* allow to take the address of a label */
5269 if (tok < TOK_UIDENT)
5270 expect("label identifier");
5271 s = label_find(tok);
5272 if (!s) {
5273 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5274 } else {
5275 if (s->r == LABEL_DECLARED)
5276 s->r = LABEL_FORWARD;
5278 if (!s->type.t) {
5279 s->type.t = VT_VOID;
5280 mk_pointer(&s->type);
5281 s->type.t |= VT_STATIC;
5283 vpushsym(&s->type, s);
5284 next();
5285 break;
5287 case TOK_GENERIC:
5289 CType controlling_type;
5290 int has_default = 0;
5291 int has_match = 0;
5292 int learn = 0;
5293 TokenString *str = NULL;
5294 int saved_const_wanted = const_wanted;
5296 next();
5297 skip('(');
5298 const_wanted = 0;
5299 expr_type(&controlling_type, expr_eq);
5300 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5301 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5302 mk_pointer(&controlling_type);
5303 const_wanted = saved_const_wanted;
5304 for (;;) {
5305 learn = 0;
5306 skip(',');
5307 if (tok == TOK_DEFAULT) {
5308 if (has_default)
5309 tcc_error("too many 'default'");
5310 has_default = 1;
5311 if (!has_match)
5312 learn = 1;
5313 next();
5314 } else {
5315 AttributeDef ad_tmp;
5316 int itmp;
5317 CType cur_type;
5319 in_generic++;
5320 parse_btype(&cur_type, &ad_tmp);
5321 in_generic--;
5323 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5324 if (compare_types(&controlling_type, &cur_type, 0)) {
5325 if (has_match) {
5326 tcc_error("type match twice");
5328 has_match = 1;
5329 learn = 1;
5332 skip(':');
5333 if (learn) {
5334 if (str)
5335 tok_str_free(str);
5336 skip_or_save_block(&str);
5337 } else {
5338 skip_or_save_block(NULL);
5340 if (tok == ')')
5341 break;
5343 if (!str) {
5344 char buf[60];
5345 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5346 tcc_error("type '%s' does not match any association", buf);
5348 begin_macro(str, 1);
5349 next();
5350 expr_eq();
5351 if (tok != TOK_EOF)
5352 expect(",");
5353 end_macro();
5354 next();
5355 break;
5357 // special qnan , snan and infinity values
5358 case TOK___NAN__:
5359 n = 0x7fc00000;
5360 special_math_val:
5361 vpushi(n);
5362 vtop->type.t = VT_FLOAT;
5363 next();
5364 break;
5365 case TOK___SNAN__:
5366 n = 0x7f800001;
5367 goto special_math_val;
5368 case TOK___INF__:
5369 n = 0x7f800000;
5370 goto special_math_val;
5372 default:
5373 tok_identifier:
5374 t = tok;
5375 next();
5376 if (t < TOK_UIDENT)
5377 expect("identifier");
5378 s = sym_find(t);
5379 if (!s || IS_ASM_SYM(s)) {
5380 const char *name = get_tok_str(t, NULL);
5381 if (tok != '(')
5382 tcc_error("'%s' undeclared", name);
5383 /* for simple function calls, we tolerate undeclared
5384 external reference to int() function */
5385 if (tcc_state->warn_implicit_function_declaration
5386 #ifdef TCC_TARGET_PE
5387 /* people must be warned about using undeclared WINAPI functions
5388 (which usually start with uppercase letter) */
5389 || (name[0] >= 'A' && name[0] <= 'Z')
5390 #endif
5392 tcc_warning("implicit declaration of function '%s'", name);
5393 s = external_global_sym(t, &func_old_type);
5396 r = s->r;
5397 /* A symbol that has a register is a local register variable,
5398 which starts out as VT_LOCAL value. */
5399 if ((r & VT_VALMASK) < VT_CONST)
5400 r = (r & ~VT_VALMASK) | VT_LOCAL;
5402 vset(&s->type, r, s->c);
5403 /* Point to s as backpointer (even without r&VT_SYM).
5404 Will be used by at least the x86 inline asm parser for
5405 regvars. */
5406 vtop->sym = s;
5408 if (r & VT_SYM) {
5409 vtop->c.i = 0;
5410 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5411 vtop->c.i = s->enum_val;
5413 break;
5416 /* post operations */
5417 while (1) {
5418 if (tok == TOK_INC || tok == TOK_DEC) {
5419 inc(1, tok);
5420 next();
5421 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5422 int qualifiers, cumofs = 0;
5423 /* field */
5424 if (tok == TOK_ARROW)
5425 indir();
5426 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5427 test_lvalue();
5428 gaddrof();
5429 /* expect pointer on structure */
5430 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5431 expect("struct or union");
5432 if (tok == TOK_CDOUBLE)
5433 expect("field name");
5434 next();
5435 if (tok == TOK_CINT || tok == TOK_CUINT)
5436 expect("field name");
5437 s = find_field(&vtop->type, tok, &cumofs);
5438 if (!s)
5439 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5440 /* add field offset to pointer */
5441 vtop->type = char_pointer_type; /* change type to 'char *' */
5442 vpushi(cumofs + s->c);
5443 gen_op('+');
5444 /* change type to field type, and set to lvalue */
5445 vtop->type = s->type;
5446 vtop->type.t |= qualifiers;
5447 /* an array is never an lvalue */
5448 if (!(vtop->type.t & VT_ARRAY)) {
5449 vtop->r |= lvalue_type(vtop->type.t);
5450 #ifdef CONFIG_TCC_BCHECK
5451 /* if bound checking, the referenced pointer must be checked */
5452 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5453 vtop->r |= VT_MUSTBOUND;
5454 #endif
5456 next();
5457 } else if (tok == '[') {
5458 next();
5459 gexpr();
5460 gen_op('+');
5461 indir();
5462 skip(']');
5463 } else if (tok == '(') {
5464 SValue ret;
5465 Sym *sa;
5466 int nb_args, ret_nregs, ret_align, regsize, variadic;
5468 /* function call */
5469 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5470 /* pointer test (no array accepted) */
5471 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5472 vtop->type = *pointed_type(&vtop->type);
5473 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5474 goto error_func;
5475 } else {
5476 error_func:
5477 expect("function pointer");
5479 } else {
5480 vtop->r &= ~VT_LVAL; /* no lvalue */
5482 /* get return type */
5483 s = vtop->type.ref;
5484 next();
5485 sa = s->next; /* first parameter */
5486 nb_args = regsize = 0;
5487 ret.r2 = VT_CONST;
5488 /* compute first implicit argument if a structure is returned */
5489 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5490 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5491 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5492 &ret_align, &regsize);
5493 if (ret_nregs <= 0) {
5494 /* get some space for the returned structure */
5495 size = type_size(&s->type, &align);
5496 #ifdef TCC_TARGET_ARM64
5497 /* On arm64, a small struct is return in registers.
5498 It is much easier to write it to memory if we know
5499 that we are allowed to write some extra bytes, so
5500 round the allocated space up to a power of 2: */
5501 if (size < 16)
5502 while (size & (size - 1))
5503 size = (size | (size - 1)) + 1;
5504 #endif
5505 loc = (loc - size) & -align;
5506 ret.type = s->type;
5507 ret.r = VT_LOCAL | VT_LVAL;
5508 /* pass it as 'int' to avoid structure arg passing
5509 problems */
5510 vseti(VT_LOCAL, loc);
5511 ret.c = vtop->c;
5512 if (ret_nregs < 0)
5513 vtop--;
5514 else
5515 nb_args++;
5517 } else {
5518 ret_nregs = 1;
5519 ret.type = s->type;
5522 if (ret_nregs > 0) {
5523 /* return in register */
5524 if (is_float(ret.type.t)) {
5525 ret.r = reg_fret(ret.type.t);
5526 #ifdef TCC_TARGET_X86_64
5527 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5528 ret.r2 = REG_QRET;
5529 #elif defined TCC_TARGET_RISCV64
5530 if ((ret.type.t & VT_BTYPE) == VT_LDOUBLE)
5531 ret.r2 = ret.r + 1;
5532 #endif
5533 } else {
5534 #ifndef TCC_TARGET_ARM64
5535 #ifndef TCC_TARGET_RISCV64
5536 #ifdef TCC_TARGET_X86_64
5537 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5538 #else
5539 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5540 #endif
5541 ret.r2 = REG_LRET;
5542 #endif
5543 #endif
5544 ret.r = REG_IRET;
5546 ret.c.i = 0;
5548 if (tok != ')') {
5549 for(;;) {
5550 expr_eq();
5551 gfunc_param_typed(s, sa);
5552 nb_args++;
5553 if (sa)
5554 sa = sa->next;
5555 if (tok == ')')
5556 break;
5557 skip(',');
5560 if (sa)
5561 tcc_error("too few arguments to function");
5562 skip(')');
5563 gfunc_call(nb_args);
5565 if (ret_nregs < 0) {
5566 vsetc(&ret.type, ret.r, &ret.c);
5567 #ifdef TCC_TARGET_RISCV64
5568 arch_transfer_ret_regs(1);
5569 #endif
5570 } else {
5571 /* return value */
5572 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5573 vsetc(&ret.type, r, &ret.c);
5574 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5577 /* handle packed struct return */
5578 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5579 int addr, offset;
5581 size = type_size(&s->type, &align);
5582 /* We're writing whole regs often, make sure there's enough
5583 space. Assume register size is power of 2. */
5584 if (regsize > align)
5585 align = regsize;
5586 loc = (loc - size) & -align;
5587 addr = loc;
5588 offset = 0;
5589 for (;;) {
5590 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5591 vswap();
5592 vstore();
5593 vtop--;
5594 if (--ret_nregs == 0)
5595 break;
5596 offset += regsize;
5598 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5601 if (s->f.func_noreturn)
5602 CODE_OFF();
5603 } else {
5604 break;
5609 ST_FUNC void expr_prod(void)
5611 int t;
5613 unary();
5614 while (tok == '*' || tok == '/' || tok == '%') {
5615 t = tok;
5616 next();
5617 unary();
5618 gen_op(t);
5622 ST_FUNC void expr_sum(void)
5624 int t;
5626 expr_prod();
5627 while (tok == '+' || tok == '-') {
5628 t = tok;
5629 next();
5630 expr_prod();
5631 gen_op(t);
5635 static void expr_shift(void)
5637 int t;
5639 expr_sum();
5640 while (tok == TOK_SHL || tok == TOK_SAR) {
5641 t = tok;
5642 next();
5643 expr_sum();
5644 gen_op(t);
5648 static void expr_cmp(void)
5650 int t;
5652 expr_shift();
5653 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5654 tok == TOK_ULT || tok == TOK_UGE) {
5655 t = tok;
5656 next();
5657 expr_shift();
5658 gen_op(t);
5662 static void expr_cmpeq(void)
5664 int t;
5666 expr_cmp();
5667 while (tok == TOK_EQ || tok == TOK_NE) {
5668 t = tok;
5669 next();
5670 expr_cmp();
5671 gen_op(t);
5675 static void expr_and(void)
5677 expr_cmpeq();
5678 while (tok == '&') {
5679 next();
5680 expr_cmpeq();
5681 gen_op('&');
5685 static void expr_xor(void)
5687 expr_and();
5688 while (tok == '^') {
5689 next();
5690 expr_and();
5691 gen_op('^');
5695 static void expr_or(void)
5697 expr_xor();
5698 while (tok == '|') {
5699 next();
5700 expr_xor();
5701 gen_op('|');
5705 static int condition_3way(void);
5707 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5709 int t = 0, cc = 1, f = 0, c;
5710 for(;;) {
5711 c = f ? i : condition_3way();
5712 if (c < 0) {
5713 save_regs(1), cc = 0;
5714 } else if (c != i) {
5715 nocode_wanted++, f = 1;
5717 if (tok != e_op) {
5718 if (cc || f) {
5719 vpop();
5720 vpushi(i ^ f);
5721 gsym(t);
5722 nocode_wanted -= f;
5723 } else {
5724 gvtst_set(i, t);
5726 break;
5728 if (c < 0)
5729 t = gvtst(i, t);
5730 else
5731 vpop();
5732 next();
5733 e_fn();
5737 static void expr_land(void)
5739 expr_or();
5740 if (tok == TOK_LAND)
5741 expr_landor(expr_or, TOK_LAND, 1);
5744 static void expr_lor(void)
5746 expr_land();
5747 if (tok == TOK_LOR)
5748 expr_landor(expr_land, TOK_LOR, 0);
5751 /* Assuming vtop is a value used in a conditional context
5752 (i.e. compared with zero) return 0 if it's false, 1 if
5753 true and -1 if it can't be statically determined. */
5754 static int condition_3way(void)
5756 int c = -1;
5757 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5758 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5759 vdup();
5760 gen_cast_s(VT_BOOL);
5761 c = vtop->c.i;
5762 vpop();
5764 return c;
5767 static int is_cond_bool(SValue *sv)
5769 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5770 && (sv->type.t & VT_BTYPE) == VT_INT)
5771 return (unsigned)sv->c.i < 2;
5772 if (sv->r == VT_CMP)
5773 return 1;
5774 return 0;
5777 static void expr_cond(void)
5779 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5780 SValue sv;
5781 CType type, type1, type2;
5782 int ncw_prev;
5784 expr_lor();
5785 if (tok == '?') {
5786 next();
5787 c = condition_3way();
5788 g = (tok == ':' && gnu_ext);
5789 tt = 0;
5790 if (!g) {
5791 if (c < 0) {
5792 save_regs(1);
5793 tt = gvtst(1, 0);
5794 } else {
5795 vpop();
5797 } else if (c < 0) {
5798 /* needed to avoid having different registers saved in
5799 each branch */
5800 rc = RC_INT;
5801 if (is_float(vtop->type.t)) {
5802 rc = RC_FLOAT;
5803 #ifdef TCC_TARGET_X86_64
5804 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5805 rc = RC_ST0;
5807 #elif defined TCC_TARGET_RISCV64
5808 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)
5809 rc = RC_INT;
5810 #endif
5812 gv(rc);
5813 save_regs(1);
5814 gv_dup();
5815 tt = gvtst(0, 0);
5818 ncw_prev = nocode_wanted;
5819 if (1) {
5820 if (c == 0)
5821 nocode_wanted++;
5822 if (!g)
5823 gexpr();
5825 if (c < 0 && vtop->r == VT_CMP) {
5826 t1 = gvtst(0, 0);
5827 vpushi(0);
5828 gvtst_set(0, t1);
5831 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5832 mk_pointer(&vtop->type);
5833 type1 = vtop->type;
5834 sv = *vtop; /* save value to handle it later */
5835 vtop--; /* no vpop so that FP stack is not flushed */
5837 if (g) {
5838 u = tt;
5839 } else if (c < 0) {
5840 u = gjmp(0);
5841 gsym(tt);
5842 } else
5843 u = 0;
5845 nocode_wanted = ncw_prev;
5846 if (c == 1)
5847 nocode_wanted++;
5848 skip(':');
5849 expr_cond();
5851 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5852 if (sv.r == VT_CMP) {
5853 t1 = sv.jtrue;
5854 t2 = u;
5855 } else {
5856 t1 = gvtst(0, 0);
5857 t2 = gjmp(0);
5858 gsym(u);
5859 vpushv(&sv);
5861 gvtst_set(0, t1);
5862 gvtst_set(1, t2);
5863 nocode_wanted = ncw_prev;
5864 // tcc_warning("two conditions expr_cond");
5865 return;
5868 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5869 mk_pointer(&vtop->type);
5870 type2=vtop->type;
5871 t1 = type1.t;
5872 bt1 = t1 & VT_BTYPE;
5873 t2 = type2.t;
5874 bt2 = t2 & VT_BTYPE;
5875 type.ref = NULL;
5877 /* cast operands to correct type according to ISOC rules */
5878 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5879 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5880 } else if (is_float(bt1) || is_float(bt2)) {
5881 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5882 type.t = VT_LDOUBLE;
5884 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5885 type.t = VT_DOUBLE;
5886 } else {
5887 type.t = VT_FLOAT;
5889 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5890 /* cast to biggest op */
5891 type.t = VT_LLONG | VT_LONG;
5892 if (bt1 == VT_LLONG)
5893 type.t &= t1;
5894 if (bt2 == VT_LLONG)
5895 type.t &= t2;
5896 /* convert to unsigned if it does not fit in a long long */
5897 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5898 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5899 type.t |= VT_UNSIGNED;
5900 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5901 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5902 /* If one is a null ptr constant the result type
5903 is the other. */
5904 if (is_null_pointer (vtop)) type = type1;
5905 else if (is_null_pointer (&sv)) type = type2;
5906 else if (bt1 != bt2)
5907 tcc_error("incompatible types in conditional expressions");
5908 else {
5909 CType *pt1 = pointed_type(&type1);
5910 CType *pt2 = pointed_type(&type2);
5911 int pbt1 = pt1->t & VT_BTYPE;
5912 int pbt2 = pt2->t & VT_BTYPE;
5913 int newquals, copied = 0;
5914 /* pointers to void get preferred, otherwise the
5915 pointed to types minus qualifs should be compatible */
5916 type = (pbt1 == VT_VOID) ? type1 : type2;
5917 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5918 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5919 tcc_warning("pointer type mismatch in conditional expression\n");
5921 /* combine qualifs */
5922 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5923 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5924 & newquals)
5926 /* copy the pointer target symbol */
5927 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5928 0, type.ref->c);
5929 copied = 1;
5930 pointed_type(&type)->t |= newquals;
5932 /* pointers to incomplete arrays get converted to
5933 pointers to completed ones if possible */
5934 if (pt1->t & VT_ARRAY
5935 && pt2->t & VT_ARRAY
5936 && pointed_type(&type)->ref->c < 0
5937 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5939 if (!copied)
5940 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5941 0, type.ref->c);
5942 pointed_type(&type)->ref =
5943 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5944 0, pointed_type(&type)->ref->c);
5945 pointed_type(&type)->ref->c =
5946 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5949 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5950 /* XXX: test structure compatibility */
5951 type = bt1 == VT_STRUCT ? type1 : type2;
5952 } else {
5953 /* integer operations */
5954 type.t = VT_INT | (VT_LONG & (t1 | t2));
5955 /* convert to unsigned if it does not fit in an integer */
5956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5958 type.t |= VT_UNSIGNED;
5960 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5961 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5962 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5964 /* now we convert second operand */
5965 if (c != 1) {
5966 gen_cast(&type);
5967 if (islv) {
5968 mk_pointer(&vtop->type);
5969 gaddrof();
5970 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5971 gaddrof();
5974 rc = RC_INT;
5975 if (is_float(type.t)) {
5976 rc = RC_FLOAT;
5977 #ifdef TCC_TARGET_X86_64
5978 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5979 rc = RC_ST0;
5981 #elif defined TCC_TARGET_RISCV64
5982 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE)
5983 rc = RC_INT;
5984 #endif
5985 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5986 /* for long longs, we use fixed registers to avoid having
5987 to handle a complicated move */
5988 rc = RC_IRET;
5991 tt = r2 = 0;
5992 if (c < 0) {
5993 r2 = gv(rc);
5994 tt = gjmp(0);
5996 gsym(u);
5997 nocode_wanted = ncw_prev;
5999 /* this is horrible, but we must also convert first
6000 operand */
6001 if (c != 0) {
6002 *vtop = sv;
6003 gen_cast(&type);
6004 if (islv) {
6005 mk_pointer(&vtop->type);
6006 gaddrof();
6007 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6008 gaddrof();
6011 if (c < 0) {
6012 r1 = gv(rc);
6013 move_reg(r2, r1, islv ? VT_PTR : type.t);
6014 vtop->r = r2;
6015 gsym(tt);
6018 if (islv)
6019 indir();
6024 static void expr_eq(void)
6026 int t;
6028 expr_cond();
6029 if (tok == '=' ||
6030 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6031 tok == TOK_A_XOR || tok == TOK_A_OR ||
6032 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6033 test_lvalue();
6034 t = tok;
6035 next();
6036 if (t == '=') {
6037 expr_eq();
6038 } else {
6039 vdup();
6040 expr_eq();
6041 gen_op(t & 0x7f);
6043 vstore();
6047 ST_FUNC void gexpr(void)
6049 while (1) {
6050 expr_eq();
6051 if (tok != ',')
6052 break;
6053 vpop();
6054 next();
6058 /* parse a constant expression and return value in vtop. */
6059 static void expr_const1(void)
6061 const_wanted++;
6062 nocode_wanted++;
6063 expr_cond();
6064 nocode_wanted--;
6065 const_wanted--;
6068 /* parse an integer constant and return its value. */
6069 static inline int64_t expr_const64(void)
6071 int64_t c;
6072 expr_const1();
6073 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6074 expect("constant expression");
6075 c = vtop->c.i;
6076 vpop();
6077 return c;
6080 /* parse an integer constant and return its value.
6081 Complain if it doesn't fit 32bit (signed or unsigned). */
6082 ST_FUNC int expr_const(void)
6084 int c;
6085 int64_t wc = expr_const64();
6086 c = wc;
6087 if (c != wc && (unsigned)c != wc)
6088 tcc_error("constant exceeds 32 bit");
6089 return c;
6092 /* ------------------------------------------------------------------------- */
6093 /* return from function */
6095 #ifndef TCC_TARGET_ARM64
6096 static void gfunc_return(CType *func_type)
6098 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6099 CType type, ret_type;
6100 int ret_align, ret_nregs, regsize;
6101 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6102 &ret_align, &regsize);
6103 if (ret_nregs < 0) {
6104 #ifdef TCC_TARGET_RISCV64
6105 arch_transfer_ret_regs(0);
6106 #endif
6107 } else if (0 == ret_nregs) {
6108 /* if returning structure, must copy it to implicit
6109 first pointer arg location */
6110 type = *func_type;
6111 mk_pointer(&type);
6112 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6113 indir();
6114 vswap();
6115 /* copy structure value to pointer */
6116 vstore();
6117 } else {
6118 /* returning structure packed into registers */
6119 int r, size, addr, align;
6120 size = type_size(func_type,&align);
6121 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6122 (vtop->c.i & (ret_align-1)))
6123 && (align & (ret_align-1))) {
6124 loc = (loc - size) & -ret_align;
6125 addr = loc;
6126 type = *func_type;
6127 vset(&type, VT_LOCAL | VT_LVAL, addr);
6128 vswap();
6129 vstore();
6130 vpop();
6131 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6133 vtop->type = ret_type;
6134 if (is_float(ret_type.t))
6135 r = rc_fret(ret_type.t);
6136 else
6137 r = RC_IRET;
6139 if (ret_nregs == 1)
6140 gv(r);
6141 else {
6142 for (;;) {
6143 vdup();
6144 gv(r);
6145 vpop();
6146 if (--ret_nregs == 0)
6147 break;
6148 /* We assume that when a structure is returned in multiple
6149 registers, their classes are consecutive values of the
6150 suite s(n) = 2^n */
6151 r <<= 1;
6152 vtop->c.i += regsize;
6156 } else if (is_float(func_type->t)) {
6157 gv(rc_fret(func_type->t));
6158 } else {
6159 gv(RC_IRET);
6161 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6163 #endif
6165 static void check_func_return(void)
6167 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6168 return;
6169 if (!strcmp (funcname, "main")
6170 && (func_vt.t & VT_BTYPE) == VT_INT) {
6171 /* main returns 0 by default */
6172 vpushi(0);
6173 gen_assign_cast(&func_vt);
6174 gfunc_return(&func_vt);
6175 } else {
6176 tcc_warning("function might return no value: '%s'", funcname);
6180 /* ------------------------------------------------------------------------- */
6181 /* switch/case */
6183 static int case_cmp(const void *pa, const void *pb)
6185 int64_t a = (*(struct case_t**) pa)->v1;
6186 int64_t b = (*(struct case_t**) pb)->v1;
6187 return a < b ? -1 : a > b;
6190 static void gtst_addr(int t, int a)
6192 gsym_addr(gvtst(0, t), a);
6195 static void gcase(struct case_t **base, int len, int *bsym)
6197 struct case_t *p;
6198 int e;
6199 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6200 while (len > 8) {
6201 /* binary search */
6202 p = base[len/2];
6203 vdup();
6204 if (ll)
6205 vpushll(p->v2);
6206 else
6207 vpushi(p->v2);
6208 gen_op(TOK_LE);
6209 e = gvtst(1, 0);
6210 vdup();
6211 if (ll)
6212 vpushll(p->v1);
6213 else
6214 vpushi(p->v1);
6215 gen_op(TOK_GE);
6216 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6217 /* x < v1 */
6218 gcase(base, len/2, bsym);
6219 /* x > v2 */
6220 gsym(e);
6221 e = len/2 + 1;
6222 base += e; len -= e;
6224 /* linear scan */
6225 while (len--) {
6226 p = *base++;
6227 vdup();
6228 if (ll)
6229 vpushll(p->v2);
6230 else
6231 vpushi(p->v2);
6232 if (p->v1 == p->v2) {
6233 gen_op(TOK_EQ);
6234 gtst_addr(0, p->sym);
6235 } else {
6236 gen_op(TOK_LE);
6237 e = gvtst(1, 0);
6238 vdup();
6239 if (ll)
6240 vpushll(p->v1);
6241 else
6242 vpushi(p->v1);
6243 gen_op(TOK_GE);
6244 gtst_addr(0, p->sym);
6245 gsym(e);
6248 *bsym = gjmp(*bsym);
6251 /* ------------------------------------------------------------------------- */
6252 /* __attribute__((cleanup(fn))) */
6254 static void try_call_scope_cleanup(Sym *stop)
6256 Sym *cls = cur_scope->cl.s;
6258 for (; cls != stop; cls = cls->ncl) {
6259 Sym *fs = cls->next;
6260 Sym *vs = cls->prev_tok;
6262 vpushsym(&fs->type, fs);
6263 vset(&vs->type, vs->r, vs->c);
6264 vtop->sym = vs;
6265 mk_pointer(&vtop->type);
6266 gaddrof();
6267 gfunc_call(1);
6271 static void try_call_cleanup_goto(Sym *cleanupstate)
6273 Sym *oc, *cc;
6274 int ocd, ccd;
6276 if (!cur_scope->cl.s)
6277 return;
6279 /* search NCA of both cleanup chains given parents and initial depth */
6280 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6281 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6283 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6285 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6288 try_call_scope_cleanup(cc);
6291 /* call 'func' for each __attribute__((cleanup(func))) */
6292 static void block_cleanup(struct scope *o)
6294 int jmp = 0;
6295 Sym *g, **pg;
6296 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6297 if (g->prev_tok->r & LABEL_FORWARD) {
6298 Sym *pcl = g->next;
6299 if (!jmp)
6300 jmp = gjmp(0);
6301 gsym(pcl->jnext);
6302 try_call_scope_cleanup(o->cl.s);
6303 pcl->jnext = gjmp(0);
6304 if (!o->cl.n)
6305 goto remove_pending;
6306 g->c = o->cl.n;
6307 pg = &g->prev;
6308 } else {
6309 remove_pending:
6310 *pg = g->prev;
6311 sym_free(g);
6314 gsym(jmp);
6315 try_call_scope_cleanup(o->cl.s);
6318 /* ------------------------------------------------------------------------- */
6319 /* VLA */
6321 static void vla_restore(int loc)
6323 if (loc)
6324 gen_vla_sp_restore(loc);
6327 static void vla_leave(struct scope *o)
6329 if (o->vla.num < cur_scope->vla.num)
6330 vla_restore(o->vla.loc);
6333 /* ------------------------------------------------------------------------- */
6334 /* local scopes */
6336 void new_scope(struct scope *o)
6338 /* copy and link previous scope */
6339 *o = *cur_scope;
6340 o->prev = cur_scope;
6341 cur_scope = o;
6343 /* record local declaration stack position */
6344 o->lstk = local_stack;
6345 o->llstk = local_label_stack;
6347 ++local_scope;
6350 void prev_scope(struct scope *o, int is_expr)
6352 vla_leave(o->prev);
6354 if (o->cl.s != o->prev->cl.s)
6355 block_cleanup(o->prev);
6357 /* pop locally defined labels */
6358 label_pop(&local_label_stack, o->llstk, is_expr);
6360 /* In the is_expr case (a statement expression is finished here),
6361 vtop might refer to symbols on the local_stack. Either via the
6362 type or via vtop->sym. We can't pop those nor any that in turn
6363 might be referred to. To make it easier we don't roll back
6364 any symbols in that case; some upper level call to block() will
6365 do that. We do have to remove such symbols from the lookup
6366 tables, though. sym_pop will do that. */
6368 /* pop locally defined symbols */
6369 sym_pop(&local_stack, o->lstk, is_expr);
6371 cur_scope = o->prev;
6372 --local_scope;
6375 /* leave a scope via break/continue(/goto) */
6376 void leave_scope(struct scope *o)
6378 if (!o)
6379 return;
6380 try_call_scope_cleanup(o->cl.s);
6381 vla_leave(o);
6384 /* ------------------------------------------------------------------------- */
6385 /* call block from 'for do while' loops */
6387 static void lblock(int *bsym, int *csym)
6389 struct scope *lo = loop_scope, *co = cur_scope;
6390 int *b = co->bsym, *c = co->csym;
6391 if (csym) {
6392 co->csym = csym;
6393 loop_scope = co;
6395 co->bsym = bsym;
6396 block(0);
6397 co->bsym = b;
6398 if (csym) {
6399 co->csym = c;
6400 loop_scope = lo;
6404 static void block(int is_expr)
6406 int a, b, c, d, e, t;
6407 Sym *s;
6409 if (is_expr) {
6410 /* default return value is (void) */
6411 vpushi(0);
6412 vtop->type.t = VT_VOID;
6415 again:
6416 t = tok, next();
6418 if (t == TOK_IF) {
6419 skip('(');
6420 gexpr();
6421 skip(')');
6422 a = gvtst(1, 0);
6423 block(0);
6424 if (tok == TOK_ELSE) {
6425 d = gjmp(0);
6426 gsym(a);
6427 next();
6428 block(0);
6429 gsym(d); /* patch else jmp */
6430 } else {
6431 gsym(a);
6434 } else if (t == TOK_WHILE) {
6435 d = gind();
6436 skip('(');
6437 gexpr();
6438 skip(')');
6439 a = gvtst(1, 0);
6440 b = 0;
6441 lblock(&a, &b);
6442 gjmp_addr(d);
6443 gsym_addr(b, d);
6444 gsym(a);
6446 } else if (t == '{') {
6447 struct scope o;
6448 new_scope(&o);
6450 /* handle local labels declarations */
6451 while (tok == TOK_LABEL) {
6452 do {
6453 next();
6454 if (tok < TOK_UIDENT)
6455 expect("label identifier");
6456 label_push(&local_label_stack, tok, LABEL_DECLARED);
6457 next();
6458 } while (tok == ',');
6459 skip(';');
6462 while (tok != '}') {
6463 decl(VT_LOCAL);
6464 if (tok != '}') {
6465 if (is_expr)
6466 vpop();
6467 block(is_expr);
6471 prev_scope(&o, is_expr);
6473 if (0 == local_scope && !nocode_wanted)
6474 check_func_return();
6475 next();
6477 } else if (t == TOK_RETURN) {
6478 a = tok != ';';
6479 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6480 if (a)
6481 gexpr(), gen_assign_cast(&func_vt);
6482 leave_scope(root_scope);
6483 if (a && b)
6484 gfunc_return(&func_vt);
6485 else if (a)
6486 vtop--;
6487 else if (b)
6488 tcc_warning("'return' with no value.");
6489 skip(';');
6490 /* jump unless last stmt in top-level block */
6491 if (tok != '}' || local_scope != 1)
6492 rsym = gjmp(rsym);
6493 CODE_OFF();
6495 } else if (t == TOK_BREAK) {
6496 /* compute jump */
6497 if (!cur_scope->bsym)
6498 tcc_error("cannot break");
6499 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6500 leave_scope(loop_scope);
6501 else
6502 leave_scope(cur_switch->scope);
6503 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6504 skip(';');
6506 } else if (t == TOK_CONTINUE) {
6507 /* compute jump */
6508 if (!cur_scope->csym)
6509 tcc_error("cannot continue");
6510 leave_scope(loop_scope);
6511 *cur_scope->csym = gjmp(*cur_scope->csym);
6512 skip(';');
6514 } else if (t == TOK_FOR) {
6515 struct scope o;
6516 new_scope(&o);
6518 skip('(');
6519 if (tok != ';') {
6520 /* c99 for-loop init decl? */
6521 if (!decl0(VT_LOCAL, 1, NULL)) {
6522 /* no, regular for-loop init expr */
6523 gexpr();
6524 vpop();
6527 skip(';');
6528 a = b = 0;
6529 c = d = gind();
6530 if (tok != ';') {
6531 gexpr();
6532 a = gvtst(1, 0);
6534 skip(';');
6535 if (tok != ')') {
6536 e = gjmp(0);
6537 d = gind();
6538 gexpr();
6539 vpop();
6540 gjmp_addr(c);
6541 gsym(e);
6543 skip(')');
6544 lblock(&a, &b);
6545 gjmp_addr(d);
6546 gsym_addr(b, d);
6547 gsym(a);
6548 prev_scope(&o, 0);
6550 } else if (t == TOK_DO) {
6551 a = b = 0;
6552 d = gind();
6553 lblock(&a, &b);
6554 gsym(b);
6555 skip(TOK_WHILE);
6556 skip('(');
6557 gexpr();
6558 skip(')');
6559 skip(';');
6560 c = gvtst(0, 0);
6561 gsym_addr(c, d);
6562 gsym(a);
6564 } else if (t == TOK_SWITCH) {
6565 struct switch_t *saved, sw;
6566 SValue switchval;
6568 sw.p = NULL;
6569 sw.n = 0;
6570 sw.def_sym = 0;
6571 sw.bsym = &a;
6572 sw.scope = cur_scope;
6574 saved = cur_switch;
6575 cur_switch = &sw;
6577 skip('(');
6578 gexpr();
6579 skip(')');
6580 switchval = *vtop--;
6582 a = 0;
6583 b = gjmp(0); /* jump to first case */
6584 lblock(&a, NULL);
6585 a = gjmp(a); /* add implicit break */
6586 /* case lookup */
6587 gsym(b);
6589 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6590 for (b = 1; b < sw.n; b++)
6591 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6592 tcc_error("duplicate case value");
6594 /* Our switch table sorting is signed, so the compared
6595 value needs to be as well when it's 64bit. */
6596 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6597 switchval.type.t &= ~VT_UNSIGNED;
6598 vpushv(&switchval);
6599 gv(RC_INT);
6600 d = 0, gcase(sw.p, sw.n, &d);
6601 vpop();
6602 if (sw.def_sym)
6603 gsym_addr(d, sw.def_sym);
6604 else
6605 gsym(d);
6606 /* break label */
6607 gsym(a);
6609 dynarray_reset(&sw.p, &sw.n);
6610 cur_switch = saved;
6612 } else if (t == TOK_CASE) {
6613 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6614 if (!cur_switch)
6615 expect("switch");
6616 cr->v1 = cr->v2 = expr_const64();
6617 if (gnu_ext && tok == TOK_DOTS) {
6618 next();
6619 cr->v2 = expr_const64();
6620 if (cr->v2 < cr->v1)
6621 tcc_warning("empty case range");
6623 cr->sym = gind();
6624 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6625 skip(':');
6626 is_expr = 0;
6627 goto block_after_label;
6629 } else if (t == TOK_DEFAULT) {
6630 if (!cur_switch)
6631 expect("switch");
6632 if (cur_switch->def_sym)
6633 tcc_error("too many 'default'");
6634 cur_switch->def_sym = gind();
6635 skip(':');
6636 is_expr = 0;
6637 goto block_after_label;
6639 } else if (t == TOK_GOTO) {
6640 vla_restore(root_scope->vla.loc);
6641 if (tok == '*' && gnu_ext) {
6642 /* computed goto */
6643 next();
6644 gexpr();
6645 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6646 expect("pointer");
6647 ggoto();
6649 } else if (tok >= TOK_UIDENT) {
6650 s = label_find(tok);
6651 /* put forward definition if needed */
6652 if (!s)
6653 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6654 else if (s->r == LABEL_DECLARED)
6655 s->r = LABEL_FORWARD;
6657 if (s->r & LABEL_FORWARD) {
6658 /* start new goto chain for cleanups, linked via label->next */
6659 if (cur_scope->cl.s && !nocode_wanted) {
6660 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6661 pending_gotos->prev_tok = s;
6662 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6663 pending_gotos->next = s;
6665 s->jnext = gjmp(s->jnext);
6666 } else {
6667 try_call_cleanup_goto(s->cleanupstate);
6668 gjmp_addr(s->jnext);
6670 next();
6672 } else {
6673 expect("label identifier");
6675 skip(';');
6677 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6678 asm_instr();
6680 } else {
6681 if (tok == ':' && t >= TOK_UIDENT) {
6682 /* label case */
6683 next();
6684 s = label_find(t);
6685 if (s) {
6686 if (s->r == LABEL_DEFINED)
6687 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6688 s->r = LABEL_DEFINED;
6689 if (s->next) {
6690 Sym *pcl; /* pending cleanup goto */
6691 for (pcl = s->next; pcl; pcl = pcl->prev)
6692 gsym(pcl->jnext);
6693 sym_pop(&s->next, NULL, 0);
6694 } else
6695 gsym(s->jnext);
6696 } else {
6697 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6699 s->jnext = gind();
6700 s->cleanupstate = cur_scope->cl.s;
6702 block_after_label:
6703 vla_restore(cur_scope->vla.loc);
6704 /* we accept this, but it is a mistake */
6705 if (tok == '}') {
6706 tcc_warning("deprecated use of label at end of compound statement");
6707 } else {
6708 goto again;
6711 } else {
6712 /* expression case */
6713 if (t != ';') {
6714 unget_tok(t);
6715 if (is_expr) {
6716 vpop();
6717 gexpr();
6718 } else {
6719 gexpr();
6720 vpop();
6722 skip(';');
6728 /* This skips over a stream of tokens containing balanced {} and ()
6729 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6730 with a '{'). If STR then allocates and stores the skipped tokens
6731 in *STR. This doesn't check if () and {} are nested correctly,
6732 i.e. "({)}" is accepted. */
6733 static void skip_or_save_block(TokenString **str)
6735 int braces = tok == '{';
6736 int level = 0;
6737 if (str)
6738 *str = tok_str_alloc();
6740 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6741 int t;
6742 if (tok == TOK_EOF) {
6743 if (str || level > 0)
6744 tcc_error("unexpected end of file");
6745 else
6746 break;
6748 if (str)
6749 tok_str_add_tok(*str);
6750 t = tok;
6751 next();
6752 if (t == '{' || t == '(') {
6753 level++;
6754 } else if (t == '}' || t == ')') {
6755 level--;
6756 if (level == 0 && braces && t == '}')
6757 break;
6760 if (str) {
6761 tok_str_add(*str, -1);
6762 tok_str_add(*str, 0);
6766 #define EXPR_CONST 1
6767 #define EXPR_ANY 2
6769 static void parse_init_elem(int expr_type)
6771 int saved_global_expr;
6772 switch(expr_type) {
6773 case EXPR_CONST:
6774 /* compound literals must be allocated globally in this case */
6775 saved_global_expr = global_expr;
6776 global_expr = 1;
6777 expr_const1();
6778 global_expr = saved_global_expr;
6779 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6780 (compound literals). */
6781 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6782 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6783 || vtop->sym->v < SYM_FIRST_ANOM))
6784 #ifdef TCC_TARGET_PE
6785 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6786 #endif
6788 tcc_error("initializer element is not constant");
6789 break;
6790 case EXPR_ANY:
6791 expr_eq();
6792 break;
6796 /* put zeros for variable based init */
6797 static void init_putz(Section *sec, unsigned long c, int size)
6799 if (sec) {
6800 /* nothing to do because globals are already set to zero */
6801 } else {
6802 vpush_global_sym(&func_old_type, TOK_memset);
6803 vseti(VT_LOCAL, c);
6804 #ifdef TCC_TARGET_ARM
6805 vpushs(size);
6806 vpushi(0);
6807 #else
6808 vpushi(0);
6809 vpushs(size);
6810 #endif
6811 gfunc_call(3);
6815 #define DIF_FIRST 1
6816 #define DIF_SIZE_ONLY 2
6817 #define DIF_HAVE_ELEM 4
6819 /* t is the array or struct type. c is the array or struct
6820 address. cur_field is the pointer to the current
6821 field, for arrays the 'c' member contains the current start
6822 index. 'flags' is as in decl_initializer.
6823 'al' contains the already initialized length of the
6824 current container (starting at c). This returns the new length of that. */
6825 static int decl_designator(CType *type, Section *sec, unsigned long c,
6826 Sym **cur_field, int flags, int al)
6828 Sym *s, *f;
6829 int index, index_last, align, l, nb_elems, elem_size;
6830 unsigned long corig = c;
6832 elem_size = 0;
6833 nb_elems = 1;
6835 if (flags & DIF_HAVE_ELEM)
6836 goto no_designator;
6838 if (gnu_ext && tok >= TOK_UIDENT) {
6839 l = tok, next();
6840 if (tok == ':')
6841 goto struct_field;
6842 unget_tok(l);
6845 /* NOTE: we only support ranges for last designator */
6846 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6847 if (tok == '[') {
6848 if (!(type->t & VT_ARRAY))
6849 expect("array type");
6850 next();
6851 index = index_last = expr_const();
6852 if (tok == TOK_DOTS && gnu_ext) {
6853 next();
6854 index_last = expr_const();
6856 skip(']');
6857 s = type->ref;
6858 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6859 index_last < index)
6860 tcc_error("invalid index");
6861 if (cur_field)
6862 (*cur_field)->c = index_last;
6863 type = pointed_type(type);
6864 elem_size = type_size(type, &align);
6865 c += index * elem_size;
6866 nb_elems = index_last - index + 1;
6867 } else {
6868 int cumofs;
6869 next();
6870 l = tok;
6871 struct_field:
6872 next();
6873 if ((type->t & VT_BTYPE) != VT_STRUCT)
6874 expect("struct/union type");
6875 cumofs = 0;
6876 f = find_field(type, l, &cumofs);
6877 if (!f)
6878 expect("field");
6879 if (cur_field)
6880 *cur_field = f;
6881 type = &f->type;
6882 c += cumofs + f->c;
6884 cur_field = NULL;
6886 if (!cur_field) {
6887 if (tok == '=') {
6888 next();
6889 } else if (!gnu_ext) {
6890 expect("=");
6892 } else {
6893 no_designator:
6894 if (type->t & VT_ARRAY) {
6895 index = (*cur_field)->c;
6896 if (type->ref->c >= 0 && index >= type->ref->c)
6897 tcc_error("index too large");
6898 type = pointed_type(type);
6899 c += index * type_size(type, &align);
6900 } else {
6901 f = *cur_field;
6902 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6903 *cur_field = f = f->next;
6904 if (!f)
6905 tcc_error("too many field init");
6906 type = &f->type;
6907 c += f->c;
6910 /* must put zero in holes (note that doing it that way
6911 ensures that it even works with designators) */
6912 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6913 init_putz(sec, corig + al, c - corig - al);
6914 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6916 /* XXX: make it more general */
6917 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6918 unsigned long c_end;
6919 uint8_t *src, *dst;
6920 int i;
6922 if (!sec) {
6923 vset(type, VT_LOCAL|VT_LVAL, c);
6924 for (i = 1; i < nb_elems; i++) {
6925 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6926 vswap();
6927 vstore();
6929 vpop();
6930 } else if (!NODATA_WANTED) {
6931 c_end = c + nb_elems * elem_size;
6932 if (c_end > sec->data_allocated)
6933 section_realloc(sec, c_end);
6934 src = sec->data + c;
6935 dst = src;
6936 for(i = 1; i < nb_elems; i++) {
6937 dst += elem_size;
6938 memcpy(dst, src, elem_size);
6942 c += nb_elems * type_size(type, &align);
6943 if (c - corig > al)
6944 al = c - corig;
6945 return al;
6948 /* store a value or an expression directly in global data or in local array */
6949 static void init_putv(CType *type, Section *sec, unsigned long c)
6951 int bt;
6952 void *ptr;
6953 CType dtype;
6955 dtype = *type;
6956 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6958 if (sec) {
6959 int size, align;
6960 /* XXX: not portable */
6961 /* XXX: generate error if incorrect relocation */
6962 gen_assign_cast(&dtype);
6963 bt = type->t & VT_BTYPE;
6965 if ((vtop->r & VT_SYM)
6966 && bt != VT_PTR
6967 && bt != VT_FUNC
6968 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6969 || (type->t & VT_BITFIELD))
6970 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6972 tcc_error("initializer element is not computable at load time");
6974 if (NODATA_WANTED) {
6975 vtop--;
6976 return;
6979 size = type_size(type, &align);
6980 section_reserve(sec, c + size);
6981 ptr = sec->data + c;
6983 /* XXX: make code faster ? */
6984 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6985 vtop->sym->v >= SYM_FIRST_ANOM &&
6986 /* XXX This rejects compound literals like
6987 '(void *){ptr}'. The problem is that '&sym' is
6988 represented the same way, which would be ruled out
6989 by the SYM_FIRST_ANOM check above, but also '"string"'
6990 in 'char *p = "string"' is represented the same
6991 with the type being VT_PTR and the symbol being an
6992 anonymous one. That is, there's no difference in vtop
6993 between '(void *){x}' and '&(void *){x}'. Ignore
6994 pointer typed entities here. Hopefully no real code
6995 will every use compound literals with scalar type. */
6996 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6997 /* These come from compound literals, memcpy stuff over. */
6998 Section *ssec;
6999 ElfSym *esym;
7000 ElfW_Rel *rel;
7001 esym = elfsym(vtop->sym);
7002 ssec = tcc_state->sections[esym->st_shndx];
7003 memmove (ptr, ssec->data + esym->st_value, size);
7004 if (ssec->reloc) {
7005 /* We need to copy over all memory contents, and that
7006 includes relocations. Use the fact that relocs are
7007 created it order, so look from the end of relocs
7008 until we hit one before the copied region. */
7009 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7010 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7011 while (num_relocs--) {
7012 rel--;
7013 if (rel->r_offset >= esym->st_value + size)
7014 continue;
7015 if (rel->r_offset < esym->st_value)
7016 break;
7017 /* Note: if the same fields are initialized multiple
7018 times (possible with designators) then we possibly
7019 add multiple relocations for the same offset here.
7020 That would lead to wrong code, the last reloc needs
7021 to win. We clean this up later after the whole
7022 initializer is parsed. */
7023 put_elf_reloca(symtab_section, sec,
7024 c + rel->r_offset - esym->st_value,
7025 ELFW(R_TYPE)(rel->r_info),
7026 ELFW(R_SYM)(rel->r_info),
7027 #if PTR_SIZE == 8
7028 rel->r_addend
7029 #else
7031 #endif
7035 } else {
7036 if (type->t & VT_BITFIELD) {
7037 int bit_pos, bit_size, bits, n;
7038 unsigned char *p, v, m;
7039 bit_pos = BIT_POS(vtop->type.t);
7040 bit_size = BIT_SIZE(vtop->type.t);
7041 p = (unsigned char*)ptr + (bit_pos >> 3);
7042 bit_pos &= 7, bits = 0;
7043 while (bit_size) {
7044 n = 8 - bit_pos;
7045 if (n > bit_size)
7046 n = bit_size;
7047 v = vtop->c.i >> bits << bit_pos;
7048 m = ((1 << n) - 1) << bit_pos;
7049 *p = (*p & ~m) | (v & m);
7050 bits += n, bit_size -= n, bit_pos = 0, ++p;
7052 } else
7053 switch(bt) {
7054 /* XXX: when cross-compiling we assume that each type has the
7055 same representation on host and target, which is likely to
7056 be wrong in the case of long double */
7057 case VT_BOOL:
7058 vtop->c.i = vtop->c.i != 0;
7059 case VT_BYTE:
7060 *(char *)ptr |= vtop->c.i;
7061 break;
7062 case VT_SHORT:
7063 *(short *)ptr |= vtop->c.i;
7064 break;
7065 case VT_FLOAT:
7066 *(float*)ptr = vtop->c.f;
7067 break;
7068 case VT_DOUBLE:
7069 *(double *)ptr = vtop->c.d;
7070 break;
7071 case VT_LDOUBLE:
7072 #if defined TCC_IS_NATIVE_387
7073 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7074 memcpy(ptr, &vtop->c.ld, 10);
7075 #ifdef __TINYC__
7076 else if (sizeof (long double) == sizeof (double))
7077 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7078 #endif
7079 else if (vtop->c.ld == 0.0)
7081 else
7082 #endif
7083 if (sizeof(long double) == LDOUBLE_SIZE)
7084 *(long double*)ptr = vtop->c.ld;
7085 else if (sizeof(double) == LDOUBLE_SIZE)
7086 *(double *)ptr = (double)vtop->c.ld;
7087 else
7088 tcc_error("can't cross compile long double constants");
7089 break;
7090 #if PTR_SIZE != 8
7091 case VT_LLONG:
7092 *(long long *)ptr |= vtop->c.i;
7093 break;
7094 #else
7095 case VT_LLONG:
7096 #endif
7097 case VT_PTR:
7099 addr_t val = vtop->c.i;
7100 #if PTR_SIZE == 8
7101 if (vtop->r & VT_SYM)
7102 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7103 else
7104 *(addr_t *)ptr |= val;
7105 #else
7106 if (vtop->r & VT_SYM)
7107 greloc(sec, vtop->sym, c, R_DATA_PTR);
7108 *(addr_t *)ptr |= val;
7109 #endif
7110 break;
7112 default:
7114 int val = vtop->c.i;
7115 #if PTR_SIZE == 8
7116 if (vtop->r & VT_SYM)
7117 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7118 else
7119 *(int *)ptr |= val;
7120 #else
7121 if (vtop->r & VT_SYM)
7122 greloc(sec, vtop->sym, c, R_DATA_PTR);
7123 *(int *)ptr |= val;
7124 #endif
7125 break;
7129 vtop--;
7130 } else {
7131 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7132 vswap();
7133 vstore();
7134 vpop();
7138 /* 't' contains the type and storage info. 'c' is the offset of the
7139 object in section 'sec'. If 'sec' is NULL, it means stack based
7140 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7141 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7142 size only evaluation is wanted (only for arrays). */
7143 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7144 int flags)
7146 int len, n, no_oblock, nb, i;
7147 int size1, align1;
7148 Sym *s, *f;
7149 Sym indexsym;
7150 CType *t1;
7152 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7153 /* In case of strings we have special handling for arrays, so
7154 don't consume them as initializer value (which would commit them
7155 to some anonymous symbol). */
7156 tok != TOK_LSTR && tok != TOK_STR &&
7157 !(flags & DIF_SIZE_ONLY)) {
7158 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7159 flags |= DIF_HAVE_ELEM;
7162 if ((flags & DIF_HAVE_ELEM) &&
7163 !(type->t & VT_ARRAY) &&
7164 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7165 The source type might have VT_CONSTANT set, which is
7166 of course assignable to non-const elements. */
7167 is_compatible_unqualified_types(type, &vtop->type)) {
7168 init_putv(type, sec, c);
7169 } else if (type->t & VT_ARRAY) {
7170 s = type->ref;
7171 n = s->c;
7172 t1 = pointed_type(type);
7173 size1 = type_size(t1, &align1);
7175 no_oblock = 1;
7176 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7177 tok == '{') {
7178 if (tok != '{')
7179 tcc_error("character array initializer must be a literal,"
7180 " optionally enclosed in braces");
7181 skip('{');
7182 no_oblock = 0;
7185 /* only parse strings here if correct type (otherwise: handle
7186 them as ((w)char *) expressions */
7187 if ((tok == TOK_LSTR &&
7188 #ifdef TCC_TARGET_PE
7189 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7190 #else
7191 (t1->t & VT_BTYPE) == VT_INT
7192 #endif
7193 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7194 len = 0;
7195 while (tok == TOK_STR || tok == TOK_LSTR) {
7196 int cstr_len, ch;
7198 /* compute maximum number of chars wanted */
7199 if (tok == TOK_STR)
7200 cstr_len = tokc.str.size;
7201 else
7202 cstr_len = tokc.str.size / sizeof(nwchar_t);
7203 cstr_len--;
7204 nb = cstr_len;
7205 if (n >= 0 && nb > (n - len))
7206 nb = n - len;
7207 if (!(flags & DIF_SIZE_ONLY)) {
7208 if (cstr_len > nb)
7209 tcc_warning("initializer-string for array is too long");
7210 /* in order to go faster for common case (char
7211 string in global variable, we handle it
7212 specifically */
7213 if (sec && tok == TOK_STR && size1 == 1) {
7214 if (!NODATA_WANTED)
7215 memcpy(sec->data + c + len, tokc.str.data, nb);
7216 } else {
7217 for(i=0;i<nb;i++) {
7218 if (tok == TOK_STR)
7219 ch = ((unsigned char *)tokc.str.data)[i];
7220 else
7221 ch = ((nwchar_t *)tokc.str.data)[i];
7222 vpushi(ch);
7223 init_putv(t1, sec, c + (len + i) * size1);
7227 len += nb;
7228 next();
7230 /* only add trailing zero if enough storage (no
7231 warning in this case since it is standard) */
7232 if (n < 0 || len < n) {
7233 if (!(flags & DIF_SIZE_ONLY)) {
7234 vpushi(0);
7235 init_putv(t1, sec, c + (len * size1));
7237 len++;
7239 len *= size1;
7240 } else {
7241 indexsym.c = 0;
7242 f = &indexsym;
7244 do_init_list:
7245 len = 0;
7246 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7247 len = decl_designator(type, sec, c, &f, flags, len);
7248 flags &= ~DIF_HAVE_ELEM;
7249 if (type->t & VT_ARRAY) {
7250 ++indexsym.c;
7251 /* special test for multi dimensional arrays (may not
7252 be strictly correct if designators are used at the
7253 same time) */
7254 if (no_oblock && len >= n*size1)
7255 break;
7256 } else {
7257 if (s->type.t == VT_UNION)
7258 f = NULL;
7259 else
7260 f = f->next;
7261 if (no_oblock && f == NULL)
7262 break;
7265 if (tok == '}')
7266 break;
7267 skip(',');
7270 /* put zeros at the end */
7271 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7272 init_putz(sec, c + len, n*size1 - len);
7273 if (!no_oblock)
7274 skip('}');
7275 /* patch type size if needed, which happens only for array types */
7276 if (n < 0)
7277 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7278 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7279 size1 = 1;
7280 no_oblock = 1;
7281 if ((flags & DIF_FIRST) || tok == '{') {
7282 skip('{');
7283 no_oblock = 0;
7285 s = type->ref;
7286 f = s->next;
7287 n = s->c;
7288 goto do_init_list;
7289 } else if (tok == '{') {
7290 if (flags & DIF_HAVE_ELEM)
7291 skip(';');
7292 next();
7293 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7294 skip('}');
7295 } else if ((flags & DIF_SIZE_ONLY)) {
7296 /* If we supported only ISO C we wouldn't have to accept calling
7297 this on anything than an array if DIF_SIZE_ONLY (and even then
7298 only on the outermost level, so no recursion would be needed),
7299 because initializing a flex array member isn't supported.
7300 But GNU C supports it, so we need to recurse even into
7301 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7302 /* just skip expression */
7303 skip_or_save_block(NULL);
7304 } else {
7305 if (!(flags & DIF_HAVE_ELEM)) {
7306 /* This should happen only when we haven't parsed
7307 the init element above for fear of committing a
7308 string constant to memory too early. */
7309 if (tok != TOK_STR && tok != TOK_LSTR)
7310 expect("string constant");
7311 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7313 init_putv(type, sec, c);
7317 /* parse an initializer for type 't' if 'has_init' is non zero, and
7318 allocate space in local or global data space ('r' is either
7319 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7320 variable 'v' of scope 'scope' is declared before initializers
7321 are parsed. If 'v' is zero, then a reference to the new object
7322 is put in the value stack. If 'has_init' is 2, a special parsing
7323 is done to handle string constants. */
7324 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7325 int has_init, int v, int scope)
7327 int size, align, addr;
7328 TokenString *init_str = NULL;
7330 Section *sec;
7331 Sym *flexible_array;
7332 Sym *sym = NULL;
7333 int saved_nocode_wanted = nocode_wanted;
7334 #ifdef CONFIG_TCC_BCHECK
7335 int bcheck;
7336 #endif
7338 /* Always allocate static or global variables */
7339 if (v && (r & VT_VALMASK) == VT_CONST)
7340 nocode_wanted |= 0x80000000;
7342 #ifdef CONFIG_TCC_BCHECK
7343 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7344 #endif
7346 flexible_array = NULL;
7347 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7348 Sym *field = type->ref->next;
7349 if (field) {
7350 while (field->next)
7351 field = field->next;
7352 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7353 flexible_array = field;
7357 size = type_size(type, &align);
7358 /* If unknown size, we must evaluate it before
7359 evaluating initializers because
7360 initializers can generate global data too
7361 (e.g. string pointers or ISOC99 compound
7362 literals). It also simplifies local
7363 initializers handling */
7364 if (size < 0 || (flexible_array && has_init)) {
7365 if (!has_init)
7366 tcc_error("unknown type size");
7367 /* get all init string */
7368 if (has_init == 2) {
7369 init_str = tok_str_alloc();
7370 /* only get strings */
7371 while (tok == TOK_STR || tok == TOK_LSTR) {
7372 tok_str_add_tok(init_str);
7373 next();
7375 tok_str_add(init_str, -1);
7376 tok_str_add(init_str, 0);
7377 } else {
7378 skip_or_save_block(&init_str);
7380 unget_tok(0);
7382 /* compute size */
7383 begin_macro(init_str, 1);
7384 next();
7385 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7386 /* prepare second initializer parsing */
7387 macro_ptr = init_str->str;
7388 next();
7390 /* if still unknown size, error */
7391 size = type_size(type, &align);
7392 if (size < 0)
7393 tcc_error("unknown type size");
7395 /* If there's a flex member and it was used in the initializer
7396 adjust size. */
7397 if (flexible_array &&
7398 flexible_array->type.ref->c > 0)
7399 size += flexible_array->type.ref->c
7400 * pointed_size(&flexible_array->type);
7401 /* take into account specified alignment if bigger */
7402 if (ad->a.aligned) {
7403 int speca = 1 << (ad->a.aligned - 1);
7404 if (speca > align)
7405 align = speca;
7406 } else if (ad->a.packed) {
7407 align = 1;
7410 if (!v && NODATA_WANTED)
7411 size = 0, align = 1;
7413 if ((r & VT_VALMASK) == VT_LOCAL) {
7414 sec = NULL;
7415 #ifdef CONFIG_TCC_BCHECK
7416 if (bcheck && (type->t & VT_ARRAY)) {
7417 loc--;
7419 #endif
7420 loc = (loc - size) & -align;
7421 addr = loc;
7422 #ifdef CONFIG_TCC_BCHECK
7423 /* handles bounds */
7424 /* XXX: currently, since we do only one pass, we cannot track
7425 '&' operators, so we add only arrays */
7426 if (bcheck && (type->t & VT_ARRAY)) {
7427 addr_t *bounds_ptr;
7428 /* add padding between regions */
7429 loc--;
7430 /* then add local bound info */
7431 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7432 bounds_ptr[0] = addr;
7433 bounds_ptr[1] = size;
7435 #endif
7436 if (v) {
7437 /* local variable */
7438 #ifdef CONFIG_TCC_ASM
7439 if (ad->asm_label) {
7440 int reg = asm_parse_regvar(ad->asm_label);
7441 if (reg >= 0)
7442 r = (r & ~VT_VALMASK) | reg;
7444 #endif
7445 sym = sym_push(v, type, r, addr);
7446 if (ad->cleanup_func) {
7447 Sym *cls = sym_push2(&all_cleanups,
7448 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7449 cls->prev_tok = sym;
7450 cls->next = ad->cleanup_func;
7451 cls->ncl = cur_scope->cl.s;
7452 cur_scope->cl.s = cls;
7455 sym->a = ad->a;
7456 } else {
7457 /* push local reference */
7458 vset(type, r, addr);
7460 } else {
7461 if (v && scope == VT_CONST) {
7462 /* see if the symbol was already defined */
7463 sym = sym_find(v);
7464 if (sym) {
7465 patch_storage(sym, ad, type);
7466 /* we accept several definitions of the same global variable. */
7467 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7468 goto no_alloc;
7472 /* allocate symbol in corresponding section */
7473 sec = ad->section;
7474 if (!sec) {
7475 if (has_init)
7476 sec = data_section;
7477 else if (tcc_state->nocommon)
7478 sec = bss_section;
7481 if (sec) {
7482 addr = section_add(sec, size, align);
7483 #ifdef CONFIG_TCC_BCHECK
7484 /* add padding if bound check */
7485 if (bcheck)
7486 section_add(sec, 1, 1);
7487 #endif
7488 } else {
7489 addr = align; /* SHN_COMMON is special, symbol value is align */
7490 sec = common_section;
7493 if (v) {
7494 if (!sym) {
7495 sym = sym_push(v, type, r | VT_SYM, 0);
7496 patch_storage(sym, ad, NULL);
7498 /* update symbol definition */
7499 put_extern_sym(sym, sec, addr, size);
7500 } else {
7501 /* push global reference */
7502 vpush_ref(type, sec, addr, size);
7503 sym = vtop->sym;
7504 vtop->r |= r;
7507 #ifdef CONFIG_TCC_BCHECK
7508 /* handles bounds now because the symbol must be defined
7509 before for the relocation */
7510 if (bcheck) {
7511 addr_t *bounds_ptr;
7513 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7514 /* then add global bound info */
7515 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7516 bounds_ptr[0] = 0; /* relocated */
7517 bounds_ptr[1] = size;
7519 #endif
7522 if (type->t & VT_VLA) {
7523 int a;
7525 if (NODATA_WANTED)
7526 goto no_alloc;
7528 /* save current stack pointer */
7529 if (root_scope->vla.loc == 0) {
7530 struct scope *v = cur_scope;
7531 gen_vla_sp_save(loc -= PTR_SIZE);
7532 do v->vla.loc = loc; while ((v = v->prev));
7535 vla_runtime_type_size(type, &a);
7536 gen_vla_alloc(type, a);
7537 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7538 /* on _WIN64, because of the function args scratch area, the
7539 result of alloca differs from RSP and is returned in RAX. */
7540 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7541 #endif
7542 gen_vla_sp_save(addr);
7543 cur_scope->vla.loc = addr;
7544 cur_scope->vla.num++;
7546 } else if (has_init) {
7547 size_t oldreloc_offset = 0;
7548 if (sec && sec->reloc)
7549 oldreloc_offset = sec->reloc->data_offset;
7550 decl_initializer(type, sec, addr, DIF_FIRST);
7551 if (sec && sec->reloc)
7552 squeeze_multi_relocs(sec, oldreloc_offset);
7553 /* patch flexible array member size back to -1, */
7554 /* for possible subsequent similar declarations */
7555 if (flexible_array)
7556 flexible_array->type.ref->c = -1;
7559 no_alloc:
7560 /* restore parse state if needed */
7561 if (init_str) {
7562 end_macro();
7563 next();
7566 nocode_wanted = saved_nocode_wanted;
7569 /* parse a function defined by symbol 'sym' and generate its code in
7570 'cur_text_section' */
7571 static void gen_function(Sym *sym)
7573 /* Initialize VLA state */
7574 struct scope f = { 0 };
7575 cur_scope = root_scope = &f;
7577 nocode_wanted = 0;
7578 ind = cur_text_section->data_offset;
7579 if (sym->a.aligned) {
7580 size_t newoff = section_add(cur_text_section, 0,
7581 1 << (sym->a.aligned - 1));
7582 gen_fill_nops(newoff - ind);
7584 /* NOTE: we patch the symbol size later */
7585 put_extern_sym(sym, cur_text_section, ind, 0);
7587 funcname = get_tok_str(sym->v, NULL);
7588 func_ind = ind;
7590 /* put debug symbol */
7591 tcc_debug_funcstart(tcc_state, sym);
7592 /* push a dummy symbol to enable local sym storage */
7593 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7594 local_scope = 1; /* for function parameters */
7595 gfunc_prolog(&sym->type);
7596 local_scope = 0;
7597 rsym = 0;
7598 clear_temp_local_var_list();
7599 block(0);
7600 gsym(rsym);
7601 nocode_wanted = 0;
7602 gfunc_epilog();
7603 cur_text_section->data_offset = ind;
7604 /* reset local stack */
7605 sym_pop(&local_stack, NULL, 0);
7606 local_scope = 0;
7607 label_pop(&global_label_stack, NULL, 0);
7608 sym_pop(&all_cleanups, NULL, 0);
7609 /* patch symbol size */
7610 elfsym(sym)->st_size = ind - func_ind;
7611 /* end of function */
7612 tcc_debug_funcend(tcc_state, ind - func_ind);
7613 /* It's better to crash than to generate wrong code */
7614 cur_text_section = NULL;
7615 funcname = ""; /* for safety */
7616 func_vt.t = VT_VOID; /* for safety */
7617 func_var = 0; /* for safety */
7618 ind = 0; /* for safety */
7619 nocode_wanted = 0x80000000;
7620 check_vstack();
7623 static void gen_inline_functions(TCCState *s)
7625 Sym *sym;
7626 int inline_generated, i;
7627 struct InlineFunc *fn;
7629 tcc_open_bf(s, ":inline:", 0);
7630 /* iterate while inline function are referenced */
7631 do {
7632 inline_generated = 0;
7633 for (i = 0; i < s->nb_inline_fns; ++i) {
7634 fn = s->inline_fns[i];
7635 sym = fn->sym;
7636 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7637 /* the function was used or forced (and then not internal):
7638 generate its code and convert it to a normal function */
7639 fn->sym = NULL;
7640 if (file)
7641 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7642 begin_macro(fn->func_str, 1);
7643 next();
7644 cur_text_section = text_section;
7645 gen_function(sym);
7646 end_macro();
7648 inline_generated = 1;
7651 } while (inline_generated);
7652 tcc_close();
7655 ST_FUNC void free_inline_functions(TCCState *s)
7657 int i;
7658 /* free tokens of unused inline functions */
7659 for (i = 0; i < s->nb_inline_fns; ++i) {
7660 struct InlineFunc *fn = s->inline_fns[i];
7661 if (fn->sym)
7662 tok_str_free(fn->func_str);
7664 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7667 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7668 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7669 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7671 int v, has_init, r;
7672 CType type, btype;
7673 Sym *sym;
7674 AttributeDef ad, adbase;
7676 while (1) {
7677 if (tok == TOK_STATIC_ASSERT) {
7678 int c;
7680 next();
7681 skip('(');
7682 c = expr_const();
7683 skip(',');
7684 if (c == 0)
7685 tcc_error("%s", get_tok_str(tok, &tokc));
7686 next();
7687 skip(')');
7688 skip(';');
7689 continue;
7691 if (!parse_btype(&btype, &adbase)) {
7692 if (is_for_loop_init)
7693 return 0;
7694 /* skip redundant ';' if not in old parameter decl scope */
7695 if (tok == ';' && l != VT_CMP) {
7696 next();
7697 continue;
7699 if (l != VT_CONST)
7700 break;
7701 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7702 /* global asm block */
7703 asm_global_instr();
7704 continue;
7706 if (tok >= TOK_UIDENT) {
7707 /* special test for old K&R protos without explicit int
7708 type. Only accepted when defining global data */
7709 btype.t = VT_INT;
7710 } else {
7711 if (tok != TOK_EOF)
7712 expect("declaration");
7713 break;
7716 if (tok == ';') {
7717 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7718 int v = btype.ref->v;
7719 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7720 tcc_warning("unnamed struct/union that defines no instances");
7721 next();
7722 continue;
7724 if (IS_ENUM(btype.t)) {
7725 next();
7726 continue;
7729 while (1) { /* iterate thru each declaration */
7730 type = btype;
7731 /* If the base type itself was an array type of unspecified
7732 size (like in 'typedef int arr[]; arr x = {1};') then
7733 we will overwrite the unknown size by the real one for
7734 this decl. We need to unshare the ref symbol holding
7735 that size. */
7736 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7737 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7739 ad = adbase;
7740 type_decl(&type, &ad, &v, TYPE_DIRECT);
7741 #if 0
7743 char buf[500];
7744 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7745 printf("type = '%s'\n", buf);
7747 #endif
7748 if ((type.t & VT_BTYPE) == VT_FUNC) {
7749 /* if old style function prototype, we accept a
7750 declaration list */
7751 sym = type.ref;
7752 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7753 decl0(VT_CMP, 0, sym);
7754 /* always compile 'extern inline' */
7755 if (type.t & VT_EXTERN)
7756 type.t &= ~VT_INLINE;
7759 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7760 ad.asm_label = asm_label_instr();
7761 /* parse one last attribute list, after asm label */
7762 parse_attribute(&ad);
7763 #if 0
7764 /* gcc does not allow __asm__("label") with function definition,
7765 but why not ... */
7766 if (tok == '{')
7767 expect(";");
7768 #endif
7771 #ifdef TCC_TARGET_PE
7772 if (ad.a.dllimport || ad.a.dllexport) {
7773 if (type.t & VT_STATIC)
7774 tcc_error("cannot have dll linkage with static");
7775 if (type.t & VT_TYPEDEF) {
7776 tcc_warning("'%s' attribute ignored for typedef",
7777 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7778 (ad.a.dllexport = 0, "dllexport"));
7779 } else if (ad.a.dllimport) {
7780 if ((type.t & VT_BTYPE) == VT_FUNC)
7781 ad.a.dllimport = 0;
7782 else
7783 type.t |= VT_EXTERN;
7786 #endif
7787 if (tok == '{') {
7788 if (l != VT_CONST)
7789 tcc_error("cannot use local functions");
7790 if ((type.t & VT_BTYPE) != VT_FUNC)
7791 expect("function definition");
7793 /* reject abstract declarators in function definition
7794 make old style params without decl have int type */
7795 sym = type.ref;
7796 while ((sym = sym->next) != NULL) {
7797 if (!(sym->v & ~SYM_FIELD))
7798 expect("identifier");
7799 if (sym->type.t == VT_VOID)
7800 sym->type = int_type;
7803 /* put function symbol */
7804 type.t &= ~VT_EXTERN;
7805 sym = external_sym(v, &type, 0, &ad);
7806 /* static inline functions are just recorded as a kind
7807 of macro. Their code will be emitted at the end of
7808 the compilation unit only if they are used */
7809 if (sym->type.t & VT_INLINE) {
7810 struct InlineFunc *fn;
7811 const char *filename;
7813 filename = file ? file->filename : "";
7814 fn = tcc_malloc(sizeof *fn + strlen(filename));
7815 strcpy(fn->filename, filename);
7816 fn->sym = sym;
7817 skip_or_save_block(&fn->func_str);
7818 dynarray_add(&tcc_state->inline_fns,
7819 &tcc_state->nb_inline_fns, fn);
7820 } else {
7821 /* compute text section */
7822 cur_text_section = ad.section;
7823 if (!cur_text_section)
7824 cur_text_section = text_section;
7825 gen_function(sym);
7827 break;
7828 } else {
7829 if (l == VT_CMP) {
7830 /* find parameter in function parameter list */
7831 for (sym = func_sym->next; sym; sym = sym->next)
7832 if ((sym->v & ~SYM_FIELD) == v)
7833 goto found;
7834 tcc_error("declaration for parameter '%s' but no such parameter",
7835 get_tok_str(v, NULL));
7836 found:
7837 if (type.t & VT_STORAGE) /* 'register' is okay */
7838 tcc_error("storage class specified for '%s'",
7839 get_tok_str(v, NULL));
7840 if (sym->type.t != VT_VOID)
7841 tcc_error("redefinition of parameter '%s'",
7842 get_tok_str(v, NULL));
7843 convert_parameter_type(&type);
7844 sym->type = type;
7845 } else if (type.t & VT_TYPEDEF) {
7846 /* save typedefed type */
7847 /* XXX: test storage specifiers ? */
7848 sym = sym_find(v);
7849 if (sym && sym->sym_scope == local_scope) {
7850 if (!is_compatible_types(&sym->type, &type)
7851 || !(sym->type.t & VT_TYPEDEF))
7852 tcc_error("incompatible redefinition of '%s'",
7853 get_tok_str(v, NULL));
7854 sym->type = type;
7855 } else {
7856 sym = sym_push(v, &type, 0, 0);
7858 sym->a = ad.a;
7859 sym->f = ad.f;
7860 } else if ((type.t & VT_BTYPE) == VT_VOID
7861 && !(type.t & VT_EXTERN)) {
7862 tcc_error("declaration of void object");
7863 } else {
7864 r = 0;
7865 if ((type.t & VT_BTYPE) == VT_FUNC) {
7866 /* external function definition */
7867 /* specific case for func_call attribute */
7868 type.ref->f = ad.f;
7869 } else if (!(type.t & VT_ARRAY)) {
7870 /* not lvalue if array */
7871 r |= lvalue_type(type.t);
7873 has_init = (tok == '=');
7874 if (has_init && (type.t & VT_VLA))
7875 tcc_error("variable length array cannot be initialized");
7876 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7877 || (type.t & VT_BTYPE) == VT_FUNC
7878 /* as with GCC, uninitialized global arrays with no size
7879 are considered extern: */
7880 || ((type.t & VT_ARRAY) && !has_init
7881 && l == VT_CONST && type.ref->c < 0)
7883 /* external variable or function */
7884 type.t |= VT_EXTERN;
7885 sym = external_sym(v, &type, r, &ad);
7886 if (ad.alias_target) {
7887 ElfSym *esym;
7888 Sym *alias_target;
7889 alias_target = sym_find(ad.alias_target);
7890 esym = elfsym(alias_target);
7891 if (!esym)
7892 tcc_error("unsupported forward __alias__ attribute");
7893 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7895 } else {
7896 if (type.t & VT_STATIC)
7897 r |= VT_CONST;
7898 else
7899 r |= l;
7900 if (has_init)
7901 next();
7902 else if (l == VT_CONST)
7903 /* uninitialized global variables may be overridden */
7904 type.t |= VT_EXTERN;
7905 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7908 if (tok != ',') {
7909 if (is_for_loop_init)
7910 return 1;
7911 skip(';');
7912 break;
7914 next();
7918 return 0;
7921 static void decl(int l)
7923 decl0(l, 0, NULL);
7926 /* ------------------------------------------------------------------------- */
7927 #undef gjmp_addr
7928 #undef gjmp
7929 /* ------------------------------------------------------------------------- */