Remove VT_REF
[tinycc.git] / tccgen.c
blobd8aa71e6b30477f414652ffc2f1fc26c3c7544e4
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 /* start of translation unit info */
143 ST_FUNC void tcc_debug_start(TCCState *s1)
145 if (s1->do_debug) {
146 char buf[512];
148 /* file info: full path + filename */
149 section_sym = put_elf_sym(symtab_section, 0, 0,
150 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
151 text_section->sh_num, NULL);
152 getcwd(buf, sizeof(buf));
153 #ifdef _WIN32
154 normalize_slashes(buf);
155 #endif
156 pstrcat(buf, sizeof(buf), "/");
157 put_stabs_r(buf, N_SO, 0, 0,
158 text_section->data_offset, text_section, section_sym);
159 put_stabs_r(file->filename, N_SO, 0, 0,
160 text_section->data_offset, text_section, section_sym);
161 last_ind = 0;
162 last_line_num = 0;
165 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
166 symbols can be safely used */
167 put_elf_sym(symtab_section, 0, 0,
168 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
169 SHN_ABS, file->filename);
172 /* put end of translation unit info */
173 ST_FUNC void tcc_debug_end(TCCState *s1)
175 if (!s1->do_debug)
176 return;
177 put_stabs_r(NULL, N_SO, 0, 0,
178 text_section->data_offset, text_section, section_sym);
182 /* generate line number info */
183 ST_FUNC void tcc_debug_line(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 if ((last_line_num != file->line_num || last_ind != ind)) {
188 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
189 last_ind = ind;
190 last_line_num = file->line_num;
194 /* put function symbol */
195 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
197 char buf[512];
199 if (!s1->do_debug)
200 return;
202 /* stabs info */
203 /* XXX: we put here a dummy type */
204 snprintf(buf, sizeof(buf), "%s:%c1",
205 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
206 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
207 cur_text_section, sym->c);
208 /* //gr gdb wants a line at the function */
209 put_stabn(N_SLINE, 0, file->line_num, 0);
211 last_ind = 0;
212 last_line_num = 0;
215 /* put function size */
216 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
218 if (!s1->do_debug)
219 return;
220 put_stabn(N_FUN, 0, 0, size);
223 /* ------------------------------------------------------------------------- */
224 ST_FUNC void tccgen_start(TCCState *s1)
226 cur_text_section = NULL;
227 funcname = "";
228 anon_sym = SYM_FIRST_ANOM;
229 section_sym = 0;
230 const_wanted = 0;
231 nocode_wanted = 1;
233 /* define some often used types */
234 int_type.t = VT_INT;
235 char_pointer_type.t = VT_BYTE;
236 mk_pointer(&char_pointer_type);
237 #if PTR_SIZE == 4
238 size_type.t = VT_INT;
239 #else
240 size_type.t = VT_LLONG;
241 #endif
242 func_old_type.t = VT_FUNC;
243 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
245 tcc_debug_start(s1);
247 #ifdef TCC_TARGET_ARM
248 arm_init(s1);
249 #endif
252 ST_FUNC void tccgen_end(TCCState *s1)
254 gen_inline_functions(s1);
255 check_vstack();
256 /* end of translation unit info */
257 tcc_debug_end(s1);
260 /* ------------------------------------------------------------------------- */
261 /* apply storage attibutes to Elf symbol */
263 static void update_storage(Sym *sym)
265 int t;
266 ElfW(Sym) *esym;
268 if (0 == sym->c)
269 return;
271 t = sym->type.t;
272 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
274 if (t & VT_VIS_MASK)
275 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
276 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
278 if (t & VT_WEAK)
279 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
281 #ifdef TCC_TARGET_PE
282 if (t & VT_EXPORT)
283 esym->st_other |= ST_PE_EXPORT;
284 #endif
287 /* ------------------------------------------------------------------------- */
288 /* update sym->c so that it points to an external symbol in section
289 'section' with value 'value' */
291 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
292 addr_t value, unsigned long size,
293 int can_add_underscore)
295 int sym_type, sym_bind, sh_num, info, other, t;
296 ElfW(Sym) *esym;
297 const char *name;
298 char buf1[256];
299 #ifdef CONFIG_TCC_BCHECK
300 char buf[32];
301 #endif
303 if (section == NULL)
304 sh_num = SHN_UNDEF;
305 else if (section == SECTION_ABS)
306 sh_num = SHN_ABS;
307 else if (section == SECTION_COMMON)
308 sh_num = SHN_COMMON;
309 else
310 sh_num = section->sh_num;
312 if (!sym->c) {
313 name = get_tok_str(sym->v, NULL);
314 #ifdef CONFIG_TCC_BCHECK
315 if (tcc_state->do_bounds_check) {
316 /* XXX: avoid doing that for statics ? */
317 /* if bound checking is activated, we change some function
318 names by adding the "__bound" prefix */
319 switch(sym->v) {
320 #ifdef TCC_TARGET_PE
321 /* XXX: we rely only on malloc hooks */
322 case TOK_malloc:
323 case TOK_free:
324 case TOK_realloc:
325 case TOK_memalign:
326 case TOK_calloc:
327 #endif
328 case TOK_memcpy:
329 case TOK_memmove:
330 case TOK_memset:
331 case TOK_strlen:
332 case TOK_strcpy:
333 case TOK_alloca:
334 strcpy(buf, "__bound_");
335 strcat(buf, name);
336 name = buf;
337 break;
340 #endif
341 t = sym->type.t;
342 if ((t & VT_BTYPE) == VT_FUNC) {
343 sym_type = STT_FUNC;
344 } else if ((t & VT_BTYPE) == VT_VOID) {
345 sym_type = STT_NOTYPE;
346 } else {
347 sym_type = STT_OBJECT;
349 if (t & VT_STATIC)
350 sym_bind = STB_LOCAL;
351 else
352 sym_bind = STB_GLOBAL;
353 other = 0;
354 #ifdef TCC_TARGET_PE
355 if (sym_type == STT_FUNC && sym->type.ref) {
356 Sym *ref = sym->type.ref;
357 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
358 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
359 name = buf1;
360 other |= ST_PE_STDCALL;
361 can_add_underscore = 0;
364 if (t & VT_IMPORT)
365 other |= ST_PE_IMPORT;
366 #endif
367 if (tcc_state->leading_underscore && can_add_underscore) {
368 buf1[0] = '_';
369 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
370 name = buf1;
372 if (sym->asm_label)
373 name = get_tok_str(sym->asm_label, NULL);
374 info = ELFW(ST_INFO)(sym_bind, sym_type);
375 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
376 } else {
377 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
378 esym->st_value = value;
379 esym->st_size = size;
380 esym->st_shndx = sh_num;
382 update_storage(sym);
385 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
386 addr_t value, unsigned long size)
388 put_extern_sym2(sym, section, value, size, 1);
391 /* add a new relocation entry to symbol 'sym' in section 's' */
392 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
393 addr_t addend)
395 int c = 0;
397 if (nocode_wanted && s == cur_text_section)
398 return;
400 if (sym) {
401 if (0 == sym->c)
402 put_extern_sym(sym, NULL, 0, 0);
403 c = sym->c;
406 /* now we can add ELF relocation info */
407 put_elf_reloca(symtab_section, s, offset, type, c, addend);
410 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
412 greloca(s, sym, offset, type, 0);
415 /* ------------------------------------------------------------------------- */
416 /* symbol allocator */
417 static Sym *__sym_malloc(void)
419 Sym *sym_pool, *sym, *last_sym;
420 int i;
422 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
423 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
425 last_sym = sym_free_first;
426 sym = sym_pool;
427 for(i = 0; i < SYM_POOL_NB; i++) {
428 sym->next = last_sym;
429 last_sym = sym;
430 sym++;
432 sym_free_first = last_sym;
433 return last_sym;
436 static inline Sym *sym_malloc(void)
438 Sym *sym;
439 #ifndef SYM_DEBUG
440 sym = sym_free_first;
441 if (!sym)
442 sym = __sym_malloc();
443 sym_free_first = sym->next;
444 return sym;
445 #else
446 sym = tcc_malloc(sizeof(Sym));
447 return sym;
448 #endif
451 ST_INLN void sym_free(Sym *sym)
453 #ifndef SYM_DEBUG
454 sym->next = sym_free_first;
455 sym_free_first = sym;
456 #else
457 tcc_free(sym);
458 #endif
461 /* push, without hashing */
462 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
464 Sym *s;
466 s = sym_malloc();
467 s->scope = 0;
468 s->v = v;
469 s->type.t = t;
470 s->type.ref = NULL;
471 #ifdef _WIN64
472 s->d = NULL;
473 #endif
474 s->c = c;
475 s->next = NULL;
476 /* add in stack */
477 s->prev = *ps;
478 *ps = s;
479 return s;
482 /* find a symbol and return its associated structure. 's' is the top
483 of the symbol stack */
484 ST_FUNC Sym *sym_find2(Sym *s, int v)
486 while (s) {
487 if (s->v == v)
488 return s;
489 else if (s->v == -1)
490 return NULL;
491 s = s->prev;
493 return NULL;
496 /* structure lookup */
497 ST_INLN Sym *struct_find(int v)
499 v -= TOK_IDENT;
500 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
501 return NULL;
502 return table_ident[v]->sym_struct;
505 /* find an identifier */
506 ST_INLN Sym *sym_find(int v)
508 v -= TOK_IDENT;
509 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
510 return NULL;
511 return table_ident[v]->sym_identifier;
514 /* push a given symbol on the symbol stack */
515 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
517 Sym *s, **ps;
518 TokenSym *ts;
520 if (local_stack)
521 ps = &local_stack;
522 else
523 ps = &global_stack;
524 s = sym_push2(ps, v, type->t, c);
525 s->type.ref = type->ref;
526 s->r = r;
527 /* don't record fields or anonymous symbols */
528 /* XXX: simplify */
529 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
530 /* record symbol in token array */
531 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
532 if (v & SYM_STRUCT)
533 ps = &ts->sym_struct;
534 else
535 ps = &ts->sym_identifier;
536 s->prev_tok = *ps;
537 *ps = s;
538 s->scope = local_scope;
539 if (s->prev_tok && s->prev_tok->scope == s->scope)
540 tcc_error("redeclaration of '%s'",
541 get_tok_str(v & ~SYM_STRUCT, NULL));
543 return s;
546 /* push a global identifier */
547 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
549 Sym *s, **ps;
550 s = sym_push2(&global_stack, v, t, c);
551 /* don't record anonymous symbol */
552 if (v < SYM_FIRST_ANOM) {
553 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
554 /* modify the top most local identifier, so that
555 sym_identifier will point to 's' when popped */
556 while (*ps != NULL)
557 ps = &(*ps)->prev_tok;
558 s->prev_tok = NULL;
559 *ps = s;
561 return s;
564 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
565 pop them yet from the list, but do remove them from the token array. */
566 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
568 Sym *s, *ss, **ps;
569 TokenSym *ts;
570 int v;
572 s = *ptop;
573 while(s != b) {
574 ss = s->prev;
575 v = s->v;
576 /* remove symbol in token array */
577 /* XXX: simplify */
578 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
579 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
580 if (v & SYM_STRUCT)
581 ps = &ts->sym_struct;
582 else
583 ps = &ts->sym_identifier;
584 *ps = s->prev_tok;
586 if (!keep)
587 sym_free(s);
588 s = ss;
590 if (!keep)
591 *ptop = b;
594 /* ------------------------------------------------------------------------- */
596 static void vsetc(CType *type, int r, CValue *vc)
598 int v;
600 if (vtop >= vstack + (VSTACK_SIZE - 1))
601 tcc_error("memory full (vstack)");
602 /* cannot let cpu flags if other instruction are generated. Also
603 avoid leaving VT_JMP anywhere except on the top of the stack
604 because it would complicate the code generator.
606 Don't do this when nocode_wanted. vtop might come from
607 !nocode_wanted regions (see 88_codeopt.c) and transforming
608 it to a register without actually generating code is wrong
609 as their value might still be used for real. All values
610 we push under nocode_wanted will eventually be popped
611 again, so that the VT_CMP/VT_JMP value will be in vtop
612 when code is unsuppressed again.
614 Same logic below in vswap(); */
615 if (vtop >= vstack && !nocode_wanted) {
616 v = vtop->r & VT_VALMASK;
617 if (v == VT_CMP || (v & ~1) == VT_JMP)
618 gv(RC_INT);
621 vtop++;
622 vtop->type = *type;
623 vtop->r = r;
624 vtop->r2 = VT_CONST;
625 vtop->c = *vc;
626 vtop->sym = NULL;
629 ST_FUNC void vswap(void)
631 SValue tmp;
632 /* cannot vswap cpu flags. See comment at vsetc() above */
633 if (vtop >= vstack && !nocode_wanted) {
634 int v = vtop->r & VT_VALMASK;
635 if (v == VT_CMP || (v & ~1) == VT_JMP)
636 gv(RC_INT);
638 tmp = vtop[0];
639 vtop[0] = vtop[-1];
640 vtop[-1] = tmp;
643 /* pop stack value */
644 ST_FUNC void vpop(void)
646 int v;
647 v = vtop->r & VT_VALMASK;
648 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
649 /* for x86, we need to pop the FP stack */
650 if (v == TREG_ST0) {
651 o(0xd8dd); /* fstp %st(0) */
652 } else
653 #endif
654 if (v == VT_JMP || v == VT_JMPI) {
655 /* need to put correct jump if && or || without test */
656 gsym(vtop->c.i);
658 vtop--;
661 /* push constant of type "type" with useless value */
662 ST_FUNC void vpush(CType *type)
664 CValue cval;
665 vsetc(type, VT_CONST, &cval);
668 /* push integer constant */
669 ST_FUNC void vpushi(int v)
671 CValue cval;
672 cval.i = v;
673 vsetc(&int_type, VT_CONST, &cval);
676 /* push a pointer sized constant */
677 static void vpushs(addr_t v)
679 CValue cval;
680 cval.i = v;
681 vsetc(&size_type, VT_CONST, &cval);
684 /* push arbitrary 64bit constant */
685 ST_FUNC void vpush64(int ty, unsigned long long v)
687 CValue cval;
688 CType ctype;
689 ctype.t = ty;
690 ctype.ref = NULL;
691 cval.i = v;
692 vsetc(&ctype, VT_CONST, &cval);
695 /* push long long constant */
696 static inline void vpushll(long long v)
698 vpush64(VT_LLONG, v);
701 ST_FUNC void vset(CType *type, int r, long v)
703 CValue cval;
705 cval.i = v;
706 vsetc(type, r, &cval);
709 static void vseti(int r, int v)
711 CType type;
712 type.t = VT_INT;
713 type.ref = 0;
714 vset(&type, r, v);
717 ST_FUNC void vpushv(SValue *v)
719 if (vtop >= vstack + (VSTACK_SIZE - 1))
720 tcc_error("memory full (vstack)");
721 vtop++;
722 *vtop = *v;
725 static void vdup(void)
727 vpushv(vtop);
730 /* rotate n first stack elements to the bottom
731 I1 ... In -> I2 ... In I1 [top is right]
733 ST_FUNC void vrotb(int n)
735 int i;
736 SValue tmp;
738 tmp = vtop[-n + 1];
739 for(i=-n+1;i!=0;i++)
740 vtop[i] = vtop[i+1];
741 vtop[0] = tmp;
744 /* rotate the n elements before entry e towards the top
745 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
747 ST_FUNC void vrote(SValue *e, int n)
749 int i;
750 SValue tmp;
752 tmp = *e;
753 for(i = 0;i < n - 1; i++)
754 e[-i] = e[-i - 1];
755 e[-n + 1] = tmp;
758 /* rotate n first stack elements to the top
759 I1 ... In -> In I1 ... I(n-1) [top is right]
761 ST_FUNC void vrott(int n)
763 vrote(vtop, n);
766 /* push a symbol value of TYPE */
767 static inline void vpushsym(CType *type, Sym *sym)
769 CValue cval;
770 cval.i = 0;
771 vsetc(type, VT_CONST | VT_SYM, &cval);
772 vtop->sym = sym;
775 /* Return a static symbol pointing to a section */
776 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
778 int v;
779 Sym *sym;
781 v = anon_sym++;
782 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
783 sym->type.ref = type->ref;
784 sym->r = VT_CONST | VT_SYM;
785 put_extern_sym(sym, sec, offset, size);
786 return sym;
789 /* push a reference to a section offset by adding a dummy symbol */
790 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
792 vpushsym(type, get_sym_ref(type, sec, offset, size));
795 /* define a new external reference to a symbol 'v' of type 'u' */
796 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
798 Sym *s;
800 s = sym_find(v);
801 if (!s) {
802 /* push forward reference */
803 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
804 s->type.ref = type->ref;
805 s->r = r | VT_CONST | VT_SYM;
807 return s;
810 /* Merge some storage attributes. */
811 static void patch_storage(Sym *sym, CType *type)
813 int t;
814 if (!is_compatible_types(&sym->type, type))
815 tcc_error("incompatible types for redefinition of '%s'",
816 get_tok_str(sym->v, NULL));
817 t = type->t;
818 #ifdef TCC_TARGET_PE
819 if ((sym->type.t ^ t) & VT_IMPORT)
820 tcc_error("incompatible dll linkage for redefinition of '%s'",
821 get_tok_str(sym->v, NULL));
822 #endif
823 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
824 if (t & VT_VIS_MASK) {
825 int vis = sym->type.t & VT_VIS_MASK;
826 int vis2 = t & VT_VIS_MASK;
827 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
828 vis = vis2;
829 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
830 vis = (vis < vis2) ? vis : vis2;
831 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
835 /* define a new external reference to a symbol 'v' */
836 static Sym *external_sym(int v, CType *type, int r)
838 Sym *s;
839 s = sym_find(v);
840 if (!s) {
841 /* push forward reference */
842 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
843 s->type.t |= VT_EXTERN;
844 } else {
845 if (s->type.ref == func_old_type.ref) {
846 s->type.ref = type->ref;
847 s->r = r | VT_CONST | VT_SYM;
848 s->type.t |= VT_EXTERN;
850 patch_storage(s, type);
851 update_storage(s);
853 return s;
856 /* push a reference to global symbol v */
857 ST_FUNC void vpush_global_sym(CType *type, int v)
859 vpushsym(type, external_global_sym(v, type, 0));
862 /* save registers up to (vtop - n) stack entry */
863 ST_FUNC void save_regs(int n)
865 SValue *p, *p1;
866 for(p = vstack, p1 = vtop - n; p <= p1; p++)
867 save_reg(p->r);
870 /* save r to the memory stack, and mark it as being free */
871 ST_FUNC void save_reg(int r)
873 save_reg_upstack(r, 0);
876 /* save r to the memory stack, and mark it as being free,
877 if seen up to (vtop - n) stack entry */
878 ST_FUNC void save_reg_upstack(int r, int n)
880 int l, saved, size, align;
881 SValue *p, *p1, sv;
882 CType *type;
884 if ((r &= VT_VALMASK) >= VT_CONST)
885 return;
886 if (nocode_wanted)
887 return;
889 /* modify all stack values */
890 saved = 0;
891 l = 0;
892 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
893 if ((p->r & VT_VALMASK) == r ||
894 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
895 /* must save value on stack if not already done */
896 if (!saved) {
897 /* NOTE: must reload 'r' because r might be equal to r2 */
898 r = p->r & VT_VALMASK;
899 /* store register in the stack */
900 type = &p->type;
901 if ((p->r & VT_LVAL) ||
902 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
903 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
904 type = &char_pointer_type;
905 #else
906 type = &int_type;
907 #endif
908 size = type_size(type, &align);
909 loc = (loc - size) & -align;
910 sv.type.t = type->t;
911 sv.r = VT_LOCAL | VT_LVAL;
912 sv.c.i = loc;
913 store(r, &sv);
914 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
915 /* x86 specific: need to pop fp register ST0 if saved */
916 if (r == TREG_ST0) {
917 o(0xd8dd); /* fstp %st(0) */
919 #endif
920 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
921 /* special long long case */
922 if ((type->t & VT_BTYPE) == VT_LLONG) {
923 sv.c.i += 4;
924 store(p->r2, &sv);
926 #endif
927 l = loc;
928 saved = 1;
930 /* mark that stack entry as being saved on the stack */
931 if (p->r & VT_LVAL) {
932 /* also clear the bounded flag because the
933 relocation address of the function was stored in
934 p->c.i */
935 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
936 } else {
937 p->r = lvalue_type(p->type.t) | VT_LOCAL;
939 p->r2 = VT_CONST;
940 p->c.i = l;
945 #ifdef TCC_TARGET_ARM
946 /* find a register of class 'rc2' with at most one reference on stack.
947 * If none, call get_reg(rc) */
948 ST_FUNC int get_reg_ex(int rc, int rc2)
950 int r;
951 SValue *p;
953 for(r=0;r<NB_REGS;r++) {
954 if (reg_classes[r] & rc2) {
955 int n;
956 n=0;
957 for(p = vstack; p <= vtop; p++) {
958 if ((p->r & VT_VALMASK) == r ||
959 (p->r2 & VT_VALMASK) == r)
960 n++;
962 if (n <= 1)
963 return r;
966 return get_reg(rc);
968 #endif
970 /* find a free register of class 'rc'. If none, save one register */
971 ST_FUNC int get_reg(int rc)
973 int r;
974 SValue *p;
976 /* find a free register */
977 for(r=0;r<NB_REGS;r++) {
978 if (reg_classes[r] & rc) {
979 if (nocode_wanted)
980 return r;
981 for(p=vstack;p<=vtop;p++) {
982 if ((p->r & VT_VALMASK) == r ||
983 (p->r2 & VT_VALMASK) == r)
984 goto notfound;
986 return r;
988 notfound: ;
991 /* no register left : free the first one on the stack (VERY
992 IMPORTANT to start from the bottom to ensure that we don't
993 spill registers used in gen_opi()) */
994 for(p=vstack;p<=vtop;p++) {
995 /* look at second register (if long long) */
996 r = p->r2 & VT_VALMASK;
997 if (r < VT_CONST && (reg_classes[r] & rc))
998 goto save_found;
999 r = p->r & VT_VALMASK;
1000 if (r < VT_CONST && (reg_classes[r] & rc)) {
1001 save_found:
1002 save_reg(r);
1003 return r;
1006 /* Should never comes here */
1007 return -1;
1010 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1011 if needed */
1012 static void move_reg(int r, int s, int t)
1014 SValue sv;
1016 if (r != s) {
1017 save_reg(r);
1018 sv.type.t = t;
1019 sv.type.ref = NULL;
1020 sv.r = s;
1021 sv.c.i = 0;
1022 load(r, &sv);
1026 /* get address of vtop (vtop MUST BE an lvalue) */
1027 ST_FUNC void gaddrof(void)
1029 vtop->r &= ~VT_LVAL;
1030 /* tricky: if saved lvalue, then we can go back to lvalue */
1031 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1032 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1037 #ifdef CONFIG_TCC_BCHECK
1038 /* generate lvalue bound code */
1039 static void gbound(void)
1041 int lval_type;
1042 CType type1;
1044 vtop->r &= ~VT_MUSTBOUND;
1045 /* if lvalue, then use checking code before dereferencing */
1046 if (vtop->r & VT_LVAL) {
1047 /* if not VT_BOUNDED value, then make one */
1048 if (!(vtop->r & VT_BOUNDED)) {
1049 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1050 /* must save type because we must set it to int to get pointer */
1051 type1 = vtop->type;
1052 vtop->type.t = VT_PTR;
1053 gaddrof();
1054 vpushi(0);
1055 gen_bounded_ptr_add();
1056 vtop->r |= lval_type;
1057 vtop->type = type1;
1059 /* then check for dereferencing */
1060 gen_bounded_ptr_deref();
1063 #endif
1065 /* store vtop a register belonging to class 'rc'. lvalues are
1066 converted to values. Cannot be used if cannot be converted to
1067 register value (such as structures). */
1068 ST_FUNC int gv(int rc)
1070 int r, bit_pos, bit_size, size, align, i;
1071 int rc2;
1073 /* NOTE: get_reg can modify vstack[] */
1074 if (vtop->type.t & VT_BITFIELD) {
1075 CType type;
1076 int bits = 32;
1077 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1078 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1079 /* remove bit field info to avoid loops */
1080 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1081 /* cast to int to propagate signedness in following ops */
1082 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1083 type.t = VT_LLONG;
1084 bits = 64;
1085 } else
1086 type.t = VT_INT;
1087 if((vtop->type.t & VT_UNSIGNED) ||
1088 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1089 type.t |= VT_UNSIGNED;
1090 gen_cast(&type);
1091 /* generate shifts */
1092 vpushi(bits - (bit_pos + bit_size));
1093 gen_op(TOK_SHL);
1094 vpushi(bits - bit_size);
1095 /* NOTE: transformed to SHR if unsigned */
1096 gen_op(TOK_SAR);
1097 r = gv(rc);
1098 } else {
1099 if (is_float(vtop->type.t) &&
1100 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1101 Sym *sym;
1102 int *ptr;
1103 unsigned long offset;
1104 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1105 CValue check;
1106 #endif
1108 /* XXX: unify with initializers handling ? */
1109 /* CPUs usually cannot use float constants, so we store them
1110 generically in data segment */
1111 size = type_size(&vtop->type, &align);
1112 offset = (data_section->data_offset + align - 1) & -align;
1113 data_section->data_offset = offset;
1114 /* XXX: not portable yet */
1115 #if defined(__i386__) || defined(__x86_64__)
1116 /* Zero pad x87 tenbyte long doubles */
1117 if (size == LDOUBLE_SIZE) {
1118 vtop->c.tab[2] &= 0xffff;
1119 #if LDOUBLE_SIZE == 16
1120 vtop->c.tab[3] = 0;
1121 #endif
1123 #endif
1124 ptr = section_ptr_add(data_section, size);
1125 size = size >> 2;
1126 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1127 check.d = 1;
1128 if(check.tab[0])
1129 for(i=0;i<size;i++)
1130 ptr[i] = vtop->c.tab[size-1-i];
1131 else
1132 #endif
1133 for(i=0;i<size;i++)
1134 ptr[i] = vtop->c.tab[i];
1135 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1136 vtop->r |= VT_LVAL | VT_SYM;
1137 vtop->sym = sym;
1138 vtop->c.i = 0;
1140 #ifdef CONFIG_TCC_BCHECK
1141 if (vtop->r & VT_MUSTBOUND)
1142 gbound();
1143 #endif
1145 r = vtop->r & VT_VALMASK;
1146 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1147 #ifndef TCC_TARGET_ARM64
1148 if (rc == RC_IRET)
1149 rc2 = RC_LRET;
1150 #ifdef TCC_TARGET_X86_64
1151 else if (rc == RC_FRET)
1152 rc2 = RC_QRET;
1153 #endif
1154 #endif
1155 /* need to reload if:
1156 - constant
1157 - lvalue (need to dereference pointer)
1158 - already a register, but not in the right class */
1159 if (r >= VT_CONST
1160 || (vtop->r & VT_LVAL)
1161 || !(reg_classes[r] & rc)
1162 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1163 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1164 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1165 #else
1166 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1167 #endif
1170 r = get_reg(rc);
1171 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1172 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1173 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1174 #else
1175 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1176 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1177 unsigned long long ll;
1178 #endif
1179 int r2, original_type;
1180 original_type = vtop->type.t;
1181 /* two register type load : expand to two words
1182 temporarily */
1183 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1184 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1185 /* load constant */
1186 ll = vtop->c.i;
1187 vtop->c.i = ll; /* first word */
1188 load(r, vtop);
1189 vtop->r = r; /* save register value */
1190 vpushi(ll >> 32); /* second word */
1191 } else
1192 #endif
1193 if (vtop->r & VT_LVAL) {
1194 /* We do not want to modifier the long long
1195 pointer here, so the safest (and less
1196 efficient) is to save all the other registers
1197 in the stack. XXX: totally inefficient. */
1198 #if 0
1199 save_regs(1);
1200 #else
1201 /* lvalue_save: save only if used further down the stack */
1202 save_reg_upstack(vtop->r, 1);
1203 #endif
1204 /* load from memory */
1205 vtop->type.t = load_type;
1206 load(r, vtop);
1207 vdup();
1208 vtop[-1].r = r; /* save register value */
1209 /* increment pointer to get second word */
1210 vtop->type.t = addr_type;
1211 gaddrof();
1212 vpushi(load_size);
1213 gen_op('+');
1214 vtop->r |= VT_LVAL;
1215 vtop->type.t = load_type;
1216 } else {
1217 /* move registers */
1218 load(r, vtop);
1219 vdup();
1220 vtop[-1].r = r; /* save register value */
1221 vtop->r = vtop[-1].r2;
1223 /* Allocate second register. Here we rely on the fact that
1224 get_reg() tries first to free r2 of an SValue. */
1225 r2 = get_reg(rc2);
1226 load(r2, vtop);
1227 vpop();
1228 /* write second register */
1229 vtop->r2 = r2;
1230 vtop->type.t = original_type;
1231 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1232 int t1, t;
1233 /* lvalue of scalar type : need to use lvalue type
1234 because of possible cast */
1235 t = vtop->type.t;
1236 t1 = t;
1237 /* compute memory access type */
1238 if (vtop->r & VT_LVAL_BYTE)
1239 t = VT_BYTE;
1240 else if (vtop->r & VT_LVAL_SHORT)
1241 t = VT_SHORT;
1242 if (vtop->r & VT_LVAL_UNSIGNED)
1243 t |= VT_UNSIGNED;
1244 vtop->type.t = t;
1245 load(r, vtop);
1246 /* restore wanted type */
1247 vtop->type.t = t1;
1248 } else {
1249 /* one register type load */
1250 load(r, vtop);
1253 vtop->r = r;
1254 #ifdef TCC_TARGET_C67
1255 /* uses register pairs for doubles */
1256 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1257 vtop->r2 = r+1;
1258 #endif
1260 return r;
1263 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1264 ST_FUNC void gv2(int rc1, int rc2)
1266 int v;
1268 /* generate more generic register first. But VT_JMP or VT_CMP
1269 values must be generated first in all cases to avoid possible
1270 reload errors */
1271 v = vtop[0].r & VT_VALMASK;
1272 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1273 vswap();
1274 gv(rc1);
1275 vswap();
1276 gv(rc2);
1277 /* test if reload is needed for first register */
1278 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1279 vswap();
1280 gv(rc1);
1281 vswap();
1283 } else {
1284 gv(rc2);
1285 vswap();
1286 gv(rc1);
1287 vswap();
1288 /* test if reload is needed for first register */
1289 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1290 gv(rc2);
1295 #ifndef TCC_TARGET_ARM64
1296 /* wrapper around RC_FRET to return a register by type */
1297 static int rc_fret(int t)
1299 #ifdef TCC_TARGET_X86_64
1300 if (t == VT_LDOUBLE) {
1301 return RC_ST0;
1303 #endif
1304 return RC_FRET;
1306 #endif
1308 /* wrapper around REG_FRET to return a register by type */
1309 static int reg_fret(int t)
1311 #ifdef TCC_TARGET_X86_64
1312 if (t == VT_LDOUBLE) {
1313 return TREG_ST0;
1315 #endif
1316 return REG_FRET;
1319 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1320 /* expand 64bit on stack in two ints */
1321 static void lexpand(void)
1323 int u, v;
1324 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1325 v = vtop->r & (VT_VALMASK | VT_LVAL);
1326 if (v == VT_CONST) {
1327 vdup();
1328 vtop[0].c.i >>= 32;
1329 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1330 vdup();
1331 vtop[0].c.i += 4;
1332 } else {
1333 gv(RC_INT);
1334 vdup();
1335 vtop[0].r = vtop[-1].r2;
1336 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1338 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1340 #endif
1342 #ifdef TCC_TARGET_ARM
1343 /* expand long long on stack */
1344 ST_FUNC void lexpand_nr(void)
1346 int u,v;
1348 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1349 vdup();
1350 vtop->r2 = VT_CONST;
1351 vtop->type.t = VT_INT | u;
1352 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1353 if (v == VT_CONST) {
1354 vtop[-1].c.i = vtop->c.i;
1355 vtop->c.i = vtop->c.i >> 32;
1356 vtop->r = VT_CONST;
1357 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1358 vtop->c.i += 4;
1359 vtop->r = vtop[-1].r;
1360 } else if (v > VT_CONST) {
1361 vtop--;
1362 lexpand();
1363 } else
1364 vtop->r = vtop[-1].r2;
1365 vtop[-1].r2 = VT_CONST;
1366 vtop[-1].type.t = VT_INT | u;
1368 #endif
1370 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1371 /* build a long long from two ints */
1372 static void lbuild(int t)
1374 gv2(RC_INT, RC_INT);
1375 vtop[-1].r2 = vtop[0].r;
1376 vtop[-1].type.t = t;
1377 vpop();
1379 #endif
1381 /* convert stack entry to register and duplicate its value in another
1382 register */
1383 static void gv_dup(void)
1385 int rc, t, r, r1;
1386 SValue sv;
1388 t = vtop->type.t;
1389 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1390 if ((t & VT_BTYPE) == VT_LLONG) {
1391 lexpand();
1392 gv_dup();
1393 vswap();
1394 vrotb(3);
1395 gv_dup();
1396 vrotb(4);
1397 /* stack: H L L1 H1 */
1398 lbuild(t);
1399 vrotb(3);
1400 vrotb(3);
1401 vswap();
1402 lbuild(t);
1403 vswap();
1404 } else
1405 #endif
1407 /* duplicate value */
1408 rc = RC_INT;
1409 sv.type.t = VT_INT;
1410 if (is_float(t)) {
1411 rc = RC_FLOAT;
1412 #ifdef TCC_TARGET_X86_64
1413 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1414 rc = RC_ST0;
1416 #endif
1417 sv.type.t = t;
1419 r = gv(rc);
1420 r1 = get_reg(rc);
1421 sv.r = r;
1422 sv.c.i = 0;
1423 load(r1, &sv); /* move r to r1 */
1424 vdup();
1425 /* duplicates value */
1426 if (r != r1)
1427 vtop->r = r1;
1431 /* Generate value test
1433 * Generate a test for any value (jump, comparison and integers) */
1434 ST_FUNC int gvtst(int inv, int t)
1436 int v = vtop->r & VT_VALMASK;
1437 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1438 vpushi(0);
1439 gen_op(TOK_NE);
1441 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1442 /* constant jmp optimization */
1443 if ((vtop->c.i != 0) != inv)
1444 t = gjmp(t);
1445 vtop--;
1446 return t;
1448 return gtst(inv, t);
1451 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1452 /* generate CPU independent (unsigned) long long operations */
1453 static void gen_opl(int op)
1455 int t, a, b, op1, c, i;
1456 int func;
1457 unsigned short reg_iret = REG_IRET;
1458 unsigned short reg_lret = REG_LRET;
1459 SValue tmp;
1461 switch(op) {
1462 case '/':
1463 case TOK_PDIV:
1464 func = TOK___divdi3;
1465 goto gen_func;
1466 case TOK_UDIV:
1467 func = TOK___udivdi3;
1468 goto gen_func;
1469 case '%':
1470 func = TOK___moddi3;
1471 goto gen_mod_func;
1472 case TOK_UMOD:
1473 func = TOK___umoddi3;
1474 gen_mod_func:
1475 #ifdef TCC_ARM_EABI
1476 reg_iret = TREG_R2;
1477 reg_lret = TREG_R3;
1478 #endif
1479 gen_func:
1480 /* call generic long long function */
1481 vpush_global_sym(&func_old_type, func);
1482 vrott(3);
1483 gfunc_call(2);
1484 vpushi(0);
1485 vtop->r = reg_iret;
1486 vtop->r2 = reg_lret;
1487 break;
1488 case '^':
1489 case '&':
1490 case '|':
1491 case '*':
1492 case '+':
1493 case '-':
1494 //pv("gen_opl A",0,2);
1495 t = vtop->type.t;
1496 vswap();
1497 lexpand();
1498 vrotb(3);
1499 lexpand();
1500 /* stack: L1 H1 L2 H2 */
1501 tmp = vtop[0];
1502 vtop[0] = vtop[-3];
1503 vtop[-3] = tmp;
1504 tmp = vtop[-2];
1505 vtop[-2] = vtop[-3];
1506 vtop[-3] = tmp;
1507 vswap();
1508 /* stack: H1 H2 L1 L2 */
1509 //pv("gen_opl B",0,4);
1510 if (op == '*') {
1511 vpushv(vtop - 1);
1512 vpushv(vtop - 1);
1513 gen_op(TOK_UMULL);
1514 lexpand();
1515 /* stack: H1 H2 L1 L2 ML MH */
1516 for(i=0;i<4;i++)
1517 vrotb(6);
1518 /* stack: ML MH H1 H2 L1 L2 */
1519 tmp = vtop[0];
1520 vtop[0] = vtop[-2];
1521 vtop[-2] = tmp;
1522 /* stack: ML MH H1 L2 H2 L1 */
1523 gen_op('*');
1524 vrotb(3);
1525 vrotb(3);
1526 gen_op('*');
1527 /* stack: ML MH M1 M2 */
1528 gen_op('+');
1529 gen_op('+');
1530 } else if (op == '+' || op == '-') {
1531 /* XXX: add non carry method too (for MIPS or alpha) */
1532 if (op == '+')
1533 op1 = TOK_ADDC1;
1534 else
1535 op1 = TOK_SUBC1;
1536 gen_op(op1);
1537 /* stack: H1 H2 (L1 op L2) */
1538 vrotb(3);
1539 vrotb(3);
1540 gen_op(op1 + 1); /* TOK_xxxC2 */
1541 } else {
1542 gen_op(op);
1543 /* stack: H1 H2 (L1 op L2) */
1544 vrotb(3);
1545 vrotb(3);
1546 /* stack: (L1 op L2) H1 H2 */
1547 gen_op(op);
1548 /* stack: (L1 op L2) (H1 op H2) */
1550 /* stack: L H */
1551 lbuild(t);
1552 break;
1553 case TOK_SAR:
1554 case TOK_SHR:
1555 case TOK_SHL:
1556 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1557 t = vtop[-1].type.t;
1558 vswap();
1559 lexpand();
1560 vrotb(3);
1561 /* stack: L H shift */
1562 c = (int)vtop->c.i;
1563 /* constant: simpler */
1564 /* NOTE: all comments are for SHL. the other cases are
1565 done by swaping words */
1566 vpop();
1567 if (op != TOK_SHL)
1568 vswap();
1569 if (c >= 32) {
1570 /* stack: L H */
1571 vpop();
1572 if (c > 32) {
1573 vpushi(c - 32);
1574 gen_op(op);
1576 if (op != TOK_SAR) {
1577 vpushi(0);
1578 } else {
1579 gv_dup();
1580 vpushi(31);
1581 gen_op(TOK_SAR);
1583 vswap();
1584 } else {
1585 vswap();
1586 gv_dup();
1587 /* stack: H L L */
1588 vpushi(c);
1589 gen_op(op);
1590 vswap();
1591 vpushi(32 - c);
1592 if (op == TOK_SHL)
1593 gen_op(TOK_SHR);
1594 else
1595 gen_op(TOK_SHL);
1596 vrotb(3);
1597 /* stack: L L H */
1598 vpushi(c);
1599 if (op == TOK_SHL)
1600 gen_op(TOK_SHL);
1601 else
1602 gen_op(TOK_SHR);
1603 gen_op('|');
1605 if (op != TOK_SHL)
1606 vswap();
1607 lbuild(t);
1608 } else {
1609 /* XXX: should provide a faster fallback on x86 ? */
1610 switch(op) {
1611 case TOK_SAR:
1612 func = TOK___ashrdi3;
1613 goto gen_func;
1614 case TOK_SHR:
1615 func = TOK___lshrdi3;
1616 goto gen_func;
1617 case TOK_SHL:
1618 func = TOK___ashldi3;
1619 goto gen_func;
1622 break;
1623 default:
1624 /* compare operations */
1625 t = vtop->type.t;
1626 vswap();
1627 lexpand();
1628 vrotb(3);
1629 lexpand();
1630 /* stack: L1 H1 L2 H2 */
1631 tmp = vtop[-1];
1632 vtop[-1] = vtop[-2];
1633 vtop[-2] = tmp;
1634 /* stack: L1 L2 H1 H2 */
1635 /* compare high */
1636 op1 = op;
1637 /* when values are equal, we need to compare low words. since
1638 the jump is inverted, we invert the test too. */
1639 if (op1 == TOK_LT)
1640 op1 = TOK_LE;
1641 else if (op1 == TOK_GT)
1642 op1 = TOK_GE;
1643 else if (op1 == TOK_ULT)
1644 op1 = TOK_ULE;
1645 else if (op1 == TOK_UGT)
1646 op1 = TOK_UGE;
1647 a = 0;
1648 b = 0;
1649 gen_op(op1);
1650 if (op == TOK_NE) {
1651 b = gvtst(0, 0);
1652 } else {
1653 a = gvtst(1, 0);
1654 if (op != TOK_EQ) {
1655 /* generate non equal test */
1656 vpushi(TOK_NE);
1657 vtop->r = VT_CMP;
1658 b = gvtst(0, 0);
1661 /* compare low. Always unsigned */
1662 op1 = op;
1663 if (op1 == TOK_LT)
1664 op1 = TOK_ULT;
1665 else if (op1 == TOK_LE)
1666 op1 = TOK_ULE;
1667 else if (op1 == TOK_GT)
1668 op1 = TOK_UGT;
1669 else if (op1 == TOK_GE)
1670 op1 = TOK_UGE;
1671 gen_op(op1);
1672 a = gvtst(1, a);
1673 gsym(b);
1674 vseti(VT_JMPI, a);
1675 break;
1678 #endif
1680 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1682 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1683 return (a ^ b) >> 63 ? -x : x;
1686 static int gen_opic_lt(uint64_t a, uint64_t b)
1688 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1691 /* handle integer constant optimizations and various machine
1692 independent opt */
1693 static void gen_opic(int op)
1695 SValue *v1 = vtop - 1;
1696 SValue *v2 = vtop;
1697 int t1 = v1->type.t & VT_BTYPE;
1698 int t2 = v2->type.t & VT_BTYPE;
1699 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1700 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1701 uint64_t l1 = c1 ? v1->c.i : 0;
1702 uint64_t l2 = c2 ? v2->c.i : 0;
1703 int shm = (t1 == VT_LLONG) ? 63 : 31;
1705 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1706 l1 = ((uint32_t)l1 |
1707 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1708 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1709 l2 = ((uint32_t)l2 |
1710 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1712 if (c1 && c2) {
1713 switch(op) {
1714 case '+': l1 += l2; break;
1715 case '-': l1 -= l2; break;
1716 case '&': l1 &= l2; break;
1717 case '^': l1 ^= l2; break;
1718 case '|': l1 |= l2; break;
1719 case '*': l1 *= l2; break;
1721 case TOK_PDIV:
1722 case '/':
1723 case '%':
1724 case TOK_UDIV:
1725 case TOK_UMOD:
1726 /* if division by zero, generate explicit division */
1727 if (l2 == 0) {
1728 if (const_wanted)
1729 tcc_error("division by zero in constant");
1730 goto general_case;
1732 switch(op) {
1733 default: l1 = gen_opic_sdiv(l1, l2); break;
1734 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1735 case TOK_UDIV: l1 = l1 / l2; break;
1736 case TOK_UMOD: l1 = l1 % l2; break;
1738 break;
1739 case TOK_SHL: l1 <<= (l2 & shm); break;
1740 case TOK_SHR: l1 >>= (l2 & shm); break;
1741 case TOK_SAR:
1742 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1743 break;
1744 /* tests */
1745 case TOK_ULT: l1 = l1 < l2; break;
1746 case TOK_UGE: l1 = l1 >= l2; break;
1747 case TOK_EQ: l1 = l1 == l2; break;
1748 case TOK_NE: l1 = l1 != l2; break;
1749 case TOK_ULE: l1 = l1 <= l2; break;
1750 case TOK_UGT: l1 = l1 > l2; break;
1751 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1752 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1753 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1754 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1755 /* logical */
1756 case TOK_LAND: l1 = l1 && l2; break;
1757 case TOK_LOR: l1 = l1 || l2; break;
1758 default:
1759 goto general_case;
1761 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1762 l1 = ((uint32_t)l1 |
1763 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1764 v1->c.i = l1;
1765 vtop--;
1766 } else {
1767 /* if commutative ops, put c2 as constant */
1768 if (c1 && (op == '+' || op == '&' || op == '^' ||
1769 op == '|' || op == '*')) {
1770 vswap();
1771 c2 = c1; //c = c1, c1 = c2, c2 = c;
1772 l2 = l1; //l = l1, l1 = l2, l2 = l;
1774 if (!const_wanted &&
1775 c1 && ((l1 == 0 &&
1776 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1777 (l1 == -1 && op == TOK_SAR))) {
1778 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1779 vtop--;
1780 } else if (!const_wanted &&
1781 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1782 (l2 == -1 && op == '|') ||
1783 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1784 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1785 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1786 if (l2 == 1)
1787 vtop->c.i = 0;
1788 vswap();
1789 vtop--;
1790 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1791 op == TOK_PDIV) &&
1792 l2 == 1) ||
1793 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1794 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1795 l2 == 0) ||
1796 (op == '&' &&
1797 l2 == -1))) {
1798 /* filter out NOP operations like x*1, x-0, x&-1... */
1799 vtop--;
1800 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1801 /* try to use shifts instead of muls or divs */
1802 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1803 int n = -1;
1804 while (l2) {
1805 l2 >>= 1;
1806 n++;
1808 vtop->c.i = n;
1809 if (op == '*')
1810 op = TOK_SHL;
1811 else if (op == TOK_PDIV)
1812 op = TOK_SAR;
1813 else
1814 op = TOK_SHR;
1816 goto general_case;
1817 } else if (c2 && (op == '+' || op == '-') &&
1818 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1819 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1820 /* symbol + constant case */
1821 if (op == '-')
1822 l2 = -l2;
1823 l2 += vtop[-1].c.i;
1824 /* The backends can't always deal with addends to symbols
1825 larger than +-1<<31. Don't construct such. */
1826 if ((int)l2 != l2)
1827 goto general_case;
1828 vtop--;
1829 vtop->c.i = l2;
1830 } else {
1831 general_case:
1832 /* call low level op generator */
1833 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1834 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1835 gen_opl(op);
1836 else
1837 gen_opi(op);
1842 /* generate a floating point operation with constant propagation */
1843 static void gen_opif(int op)
1845 int c1, c2;
1846 SValue *v1, *v2;
1847 long double f1, f2;
1849 v1 = vtop - 1;
1850 v2 = vtop;
1851 /* currently, we cannot do computations with forward symbols */
1852 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1853 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1854 if (c1 && c2) {
1855 if (v1->type.t == VT_FLOAT) {
1856 f1 = v1->c.f;
1857 f2 = v2->c.f;
1858 } else if (v1->type.t == VT_DOUBLE) {
1859 f1 = v1->c.d;
1860 f2 = v2->c.d;
1861 } else {
1862 f1 = v1->c.ld;
1863 f2 = v2->c.ld;
1866 /* NOTE: we only do constant propagation if finite number (not
1867 NaN or infinity) (ANSI spec) */
1868 if (!ieee_finite(f1) || !ieee_finite(f2))
1869 goto general_case;
1871 switch(op) {
1872 case '+': f1 += f2; break;
1873 case '-': f1 -= f2; break;
1874 case '*': f1 *= f2; break;
1875 case '/':
1876 if (f2 == 0.0) {
1877 if (const_wanted)
1878 tcc_error("division by zero in constant");
1879 goto general_case;
1881 f1 /= f2;
1882 break;
1883 /* XXX: also handles tests ? */
1884 default:
1885 goto general_case;
1887 /* XXX: overflow test ? */
1888 if (v1->type.t == VT_FLOAT) {
1889 v1->c.f = f1;
1890 } else if (v1->type.t == VT_DOUBLE) {
1891 v1->c.d = f1;
1892 } else {
1893 v1->c.ld = f1;
1895 vtop--;
1896 } else {
1897 general_case:
1898 gen_opf(op);
1902 static int pointed_size(CType *type)
1904 int align;
1905 return type_size(pointed_type(type), &align);
1908 static void vla_runtime_pointed_size(CType *type)
1910 int align;
1911 vla_runtime_type_size(pointed_type(type), &align);
1914 static inline int is_null_pointer(SValue *p)
1916 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1917 return 0;
1918 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1919 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1920 ((p->type.t & VT_BTYPE) == VT_PTR &&
1921 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1924 static inline int is_integer_btype(int bt)
1926 return (bt == VT_BYTE || bt == VT_SHORT ||
1927 bt == VT_INT || bt == VT_LLONG);
1930 /* check types for comparison or subtraction of pointers */
1931 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1933 CType *type1, *type2, tmp_type1, tmp_type2;
1934 int bt1, bt2;
1936 /* null pointers are accepted for all comparisons as gcc */
1937 if (is_null_pointer(p1) || is_null_pointer(p2))
1938 return;
1939 type1 = &p1->type;
1940 type2 = &p2->type;
1941 bt1 = type1->t & VT_BTYPE;
1942 bt2 = type2->t & VT_BTYPE;
1943 /* accept comparison between pointer and integer with a warning */
1944 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1945 if (op != TOK_LOR && op != TOK_LAND )
1946 tcc_warning("comparison between pointer and integer");
1947 return;
1950 /* both must be pointers or implicit function pointers */
1951 if (bt1 == VT_PTR) {
1952 type1 = pointed_type(type1);
1953 } else if (bt1 != VT_FUNC)
1954 goto invalid_operands;
1956 if (bt2 == VT_PTR) {
1957 type2 = pointed_type(type2);
1958 } else if (bt2 != VT_FUNC) {
1959 invalid_operands:
1960 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1962 if ((type1->t & VT_BTYPE) == VT_VOID ||
1963 (type2->t & VT_BTYPE) == VT_VOID)
1964 return;
1965 tmp_type1 = *type1;
1966 tmp_type2 = *type2;
1967 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1968 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1969 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1970 /* gcc-like error if '-' is used */
1971 if (op == '-')
1972 goto invalid_operands;
1973 else
1974 tcc_warning("comparison of distinct pointer types lacks a cast");
1978 /* generic gen_op: handles types problems */
1979 ST_FUNC void gen_op(int op)
1981 int u, t1, t2, bt1, bt2, t;
1982 CType type1;
1984 redo:
1985 t1 = vtop[-1].type.t;
1986 t2 = vtop[0].type.t;
1987 bt1 = t1 & VT_BTYPE;
1988 bt2 = t2 & VT_BTYPE;
1990 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1991 tcc_error("operation on a struct");
1992 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1993 if (bt2 == VT_FUNC) {
1994 mk_pointer(&vtop->type);
1995 gaddrof();
1997 if (bt1 == VT_FUNC) {
1998 vswap();
1999 mk_pointer(&vtop->type);
2000 gaddrof();
2001 vswap();
2003 goto redo;
2004 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2005 /* at least one operand is a pointer */
2006 /* relationnal op: must be both pointers */
2007 if (op >= TOK_ULT && op <= TOK_LOR) {
2008 check_comparison_pointer_types(vtop - 1, vtop, op);
2009 /* pointers are handled are unsigned */
2010 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2011 t = VT_LLONG | VT_UNSIGNED;
2012 #else
2013 t = VT_INT | VT_UNSIGNED;
2014 #endif
2015 goto std_op;
2017 /* if both pointers, then it must be the '-' op */
2018 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2019 if (op != '-')
2020 tcc_error("cannot use pointers here");
2021 check_comparison_pointer_types(vtop - 1, vtop, op);
2022 /* XXX: check that types are compatible */
2023 if (vtop[-1].type.t & VT_VLA) {
2024 vla_runtime_pointed_size(&vtop[-1].type);
2025 } else {
2026 vpushi(pointed_size(&vtop[-1].type));
2028 vrott(3);
2029 gen_opic(op);
2030 /* set to integer type */
2031 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2032 vtop->type.t = VT_LLONG;
2033 #else
2034 vtop->type.t = VT_INT;
2035 #endif
2036 vswap();
2037 gen_op(TOK_PDIV);
2038 } else {
2039 /* exactly one pointer : must be '+' or '-'. */
2040 if (op != '-' && op != '+')
2041 tcc_error("cannot use pointers here");
2042 /* Put pointer as first operand */
2043 if (bt2 == VT_PTR) {
2044 vswap();
2045 t = t1, t1 = t2, t2 = t;
2047 #if PTR_SIZE == 4
2048 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2049 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2050 gen_cast(&int_type);
2051 #endif
2052 type1 = vtop[-1].type;
2053 type1.t &= ~VT_ARRAY;
2054 if (vtop[-1].type.t & VT_VLA)
2055 vla_runtime_pointed_size(&vtop[-1].type);
2056 else {
2057 u = pointed_size(&vtop[-1].type);
2058 if (u < 0)
2059 tcc_error("unknown array element size");
2060 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2061 vpushll(u);
2062 #else
2063 /* XXX: cast to int ? (long long case) */
2064 vpushi(u);
2065 #endif
2067 gen_op('*');
2068 #if 0
2069 /* #ifdef CONFIG_TCC_BCHECK
2070 The main reason to removing this code:
2071 #include <stdio.h>
2072 int main ()
2074 int v[10];
2075 int i = 10;
2076 int j = 9;
2077 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2078 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2080 When this code is on. then the output looks like
2081 v+i-j = 0xfffffffe
2082 v+(i-j) = 0xbff84000
2084 /* if evaluating constant expression, no code should be
2085 generated, so no bound check */
2086 if (tcc_state->do_bounds_check && !const_wanted) {
2087 /* if bounded pointers, we generate a special code to
2088 test bounds */
2089 if (op == '-') {
2090 vpushi(0);
2091 vswap();
2092 gen_op('-');
2094 gen_bounded_ptr_add();
2095 } else
2096 #endif
2098 gen_opic(op);
2100 /* put again type if gen_opic() swaped operands */
2101 vtop->type = type1;
2103 } else if (is_float(bt1) || is_float(bt2)) {
2104 /* compute bigger type and do implicit casts */
2105 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2106 t = VT_LDOUBLE;
2107 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2108 t = VT_DOUBLE;
2109 } else {
2110 t = VT_FLOAT;
2112 /* floats can only be used for a few operations */
2113 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2114 (op < TOK_ULT || op > TOK_GT))
2115 tcc_error("invalid operands for binary operation");
2116 goto std_op;
2117 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2118 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2119 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2120 t |= VT_UNSIGNED;
2121 goto std_op;
2122 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2123 /* cast to biggest op */
2124 t = VT_LLONG;
2125 /* convert to unsigned if it does not fit in a long long */
2126 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2127 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2128 t |= VT_UNSIGNED;
2129 goto std_op;
2130 } else {
2131 /* integer operations */
2132 t = VT_INT;
2133 /* convert to unsigned if it does not fit in an integer */
2134 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2135 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2136 t |= VT_UNSIGNED;
2137 std_op:
2138 /* XXX: currently, some unsigned operations are explicit, so
2139 we modify them here */
2140 if (t & VT_UNSIGNED) {
2141 if (op == TOK_SAR)
2142 op = TOK_SHR;
2143 else if (op == '/')
2144 op = TOK_UDIV;
2145 else if (op == '%')
2146 op = TOK_UMOD;
2147 else if (op == TOK_LT)
2148 op = TOK_ULT;
2149 else if (op == TOK_GT)
2150 op = TOK_UGT;
2151 else if (op == TOK_LE)
2152 op = TOK_ULE;
2153 else if (op == TOK_GE)
2154 op = TOK_UGE;
2156 vswap();
2157 type1.t = t;
2158 gen_cast(&type1);
2159 vswap();
2160 /* special case for shifts and long long: we keep the shift as
2161 an integer */
2162 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2163 type1.t = VT_INT;
2164 gen_cast(&type1);
2165 if (is_float(t))
2166 gen_opif(op);
2167 else
2168 gen_opic(op);
2169 if (op >= TOK_ULT && op <= TOK_GT) {
2170 /* relationnal op: the result is an int */
2171 vtop->type.t = VT_INT;
2172 } else {
2173 vtop->type.t = t;
2176 // Make sure that we have converted to an rvalue:
2177 if (vtop->r & VT_LVAL)
2178 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2181 #ifndef TCC_TARGET_ARM
2182 /* generic itof for unsigned long long case */
2183 static void gen_cvt_itof1(int t)
2185 #ifdef TCC_TARGET_ARM64
2186 gen_cvt_itof(t);
2187 #else
2188 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2189 (VT_LLONG | VT_UNSIGNED)) {
2191 if (t == VT_FLOAT)
2192 vpush_global_sym(&func_old_type, TOK___floatundisf);
2193 #if LDOUBLE_SIZE != 8
2194 else if (t == VT_LDOUBLE)
2195 vpush_global_sym(&func_old_type, TOK___floatundixf);
2196 #endif
2197 else
2198 vpush_global_sym(&func_old_type, TOK___floatundidf);
2199 vrott(2);
2200 gfunc_call(1);
2201 vpushi(0);
2202 vtop->r = reg_fret(t);
2203 } else {
2204 gen_cvt_itof(t);
2206 #endif
2208 #endif
2210 /* generic ftoi for unsigned long long case */
2211 static void gen_cvt_ftoi1(int t)
2213 #ifdef TCC_TARGET_ARM64
2214 gen_cvt_ftoi(t);
2215 #else
2216 int st;
2218 if (t == (VT_LLONG | VT_UNSIGNED)) {
2219 /* not handled natively */
2220 st = vtop->type.t & VT_BTYPE;
2221 if (st == VT_FLOAT)
2222 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2223 #if LDOUBLE_SIZE != 8
2224 else if (st == VT_LDOUBLE)
2225 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2226 #endif
2227 else
2228 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2229 vrott(2);
2230 gfunc_call(1);
2231 vpushi(0);
2232 vtop->r = REG_IRET;
2233 vtop->r2 = REG_LRET;
2234 } else {
2235 gen_cvt_ftoi(t);
2237 #endif
2240 /* force char or short cast */
2241 static void force_charshort_cast(int t)
2243 int bits, dbt;
2244 dbt = t & VT_BTYPE;
2245 /* XXX: add optimization if lvalue : just change type and offset */
2246 if (dbt == VT_BYTE)
2247 bits = 8;
2248 else
2249 bits = 16;
2250 if (t & VT_UNSIGNED) {
2251 vpushi((1 << bits) - 1);
2252 gen_op('&');
2253 } else {
2254 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2255 bits = 64 - bits;
2256 else
2257 bits = 32 - bits;
2258 vpushi(bits);
2259 gen_op(TOK_SHL);
2260 /* result must be signed or the SAR is converted to an SHL
2261 This was not the case when "t" was a signed short
2262 and the last value on the stack was an unsigned int */
2263 vtop->type.t &= ~VT_UNSIGNED;
2264 vpushi(bits);
2265 gen_op(TOK_SAR);
2269 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2270 static void gen_cast(CType *type)
2272 int sbt, dbt, sf, df, c, p;
2274 /* special delayed cast for char/short */
2275 /* XXX: in some cases (multiple cascaded casts), it may still
2276 be incorrect */
2277 if (vtop->r & VT_MUSTCAST) {
2278 vtop->r &= ~VT_MUSTCAST;
2279 force_charshort_cast(vtop->type.t);
2282 /* bitfields first get cast to ints */
2283 if (vtop->type.t & VT_BITFIELD) {
2284 gv(RC_INT);
2287 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2288 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2290 if (sbt != dbt) {
2291 sf = is_float(sbt);
2292 df = is_float(dbt);
2293 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2294 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2295 if (c) {
2296 /* constant case: we can do it now */
2297 /* XXX: in ISOC, cannot do it if error in convert */
2298 if (sbt == VT_FLOAT)
2299 vtop->c.ld = vtop->c.f;
2300 else if (sbt == VT_DOUBLE)
2301 vtop->c.ld = vtop->c.d;
2303 if (df) {
2304 if ((sbt & VT_BTYPE) == VT_LLONG) {
2305 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2306 vtop->c.ld = vtop->c.i;
2307 else
2308 vtop->c.ld = -(long double)-vtop->c.i;
2309 } else if(!sf) {
2310 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2311 vtop->c.ld = (uint32_t)vtop->c.i;
2312 else
2313 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2316 if (dbt == VT_FLOAT)
2317 vtop->c.f = (float)vtop->c.ld;
2318 else if (dbt == VT_DOUBLE)
2319 vtop->c.d = (double)vtop->c.ld;
2320 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2321 vtop->c.i = vtop->c.ld;
2322 } else if (sf && dbt == VT_BOOL) {
2323 vtop->c.i = (vtop->c.ld != 0);
2324 } else {
2325 if(sf)
2326 vtop->c.i = vtop->c.ld;
2327 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2329 else if (sbt & VT_UNSIGNED)
2330 vtop->c.i = (uint32_t)vtop->c.i;
2331 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2332 else if (sbt == VT_PTR)
2334 #endif
2335 else if (sbt != VT_LLONG)
2336 vtop->c.i = ((uint32_t)vtop->c.i |
2337 -(vtop->c.i & 0x80000000));
2339 if (dbt == (VT_LLONG|VT_UNSIGNED))
2341 else if (dbt == VT_BOOL)
2342 vtop->c.i = (vtop->c.i != 0);
2343 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2344 else if (dbt == VT_PTR)
2346 #endif
2347 else if (dbt != VT_LLONG) {
2348 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2349 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2350 0xffffffff);
2351 vtop->c.i &= m;
2352 if (!(dbt & VT_UNSIGNED))
2353 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2356 } else if (p && dbt == VT_BOOL) {
2357 vtop->r = VT_CONST;
2358 vtop->c.i = 1;
2359 } else {
2360 /* non constant case: generate code */
2361 if (sf && df) {
2362 /* convert from fp to fp */
2363 gen_cvt_ftof(dbt);
2364 } else if (df) {
2365 /* convert int to fp */
2366 gen_cvt_itof1(dbt);
2367 } else if (sf) {
2368 /* convert fp to int */
2369 if (dbt == VT_BOOL) {
2370 vpushi(0);
2371 gen_op(TOK_NE);
2372 } else {
2373 /* we handle char/short/etc... with generic code */
2374 if (dbt != (VT_INT | VT_UNSIGNED) &&
2375 dbt != (VT_LLONG | VT_UNSIGNED) &&
2376 dbt != VT_LLONG)
2377 dbt = VT_INT;
2378 gen_cvt_ftoi1(dbt);
2379 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2380 /* additional cast for char/short... */
2381 vtop->type.t = dbt;
2382 gen_cast(type);
2385 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2386 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2387 if ((sbt & VT_BTYPE) != VT_LLONG) {
2388 /* scalar to long long */
2389 /* machine independent conversion */
2390 gv(RC_INT);
2391 /* generate high word */
2392 if (sbt == (VT_INT | VT_UNSIGNED)) {
2393 vpushi(0);
2394 gv(RC_INT);
2395 } else {
2396 if (sbt == VT_PTR) {
2397 /* cast from pointer to int before we apply
2398 shift operation, which pointers don't support*/
2399 gen_cast(&int_type);
2401 gv_dup();
2402 vpushi(31);
2403 gen_op(TOK_SAR);
2405 /* patch second register */
2406 vtop[-1].r2 = vtop->r;
2407 vpop();
2409 #else
2410 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2411 (dbt & VT_BTYPE) == VT_PTR ||
2412 (dbt & VT_BTYPE) == VT_FUNC) {
2413 if ((sbt & VT_BTYPE) != VT_LLONG &&
2414 (sbt & VT_BTYPE) != VT_PTR &&
2415 (sbt & VT_BTYPE) != VT_FUNC) {
2416 /* need to convert from 32bit to 64bit */
2417 gv(RC_INT);
2418 if (sbt != (VT_INT | VT_UNSIGNED)) {
2419 #if defined(TCC_TARGET_ARM64)
2420 gen_cvt_sxtw();
2421 #elif defined(TCC_TARGET_X86_64)
2422 int r = gv(RC_INT);
2423 /* x86_64 specific: movslq */
2424 o(0x6348);
2425 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2426 #else
2427 #error
2428 #endif
2431 #endif
2432 } else if (dbt == VT_BOOL) {
2433 /* scalar to bool */
2434 vpushi(0);
2435 gen_op(TOK_NE);
2436 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2437 (dbt & VT_BTYPE) == VT_SHORT) {
2438 if (sbt == VT_PTR) {
2439 vtop->type.t = VT_INT;
2440 tcc_warning("nonportable conversion from pointer to char/short");
2442 force_charshort_cast(dbt);
2443 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2444 } else if ((dbt & VT_BTYPE) == VT_INT) {
2445 /* scalar to int */
2446 if ((sbt & VT_BTYPE) == VT_LLONG) {
2447 /* from long long: just take low order word */
2448 lexpand();
2449 vpop();
2451 /* if lvalue and single word type, nothing to do because
2452 the lvalue already contains the real type size (see
2453 VT_LVAL_xxx constants) */
2454 #endif
2457 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2458 /* if we are casting between pointer types,
2459 we must update the VT_LVAL_xxx size */
2460 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2461 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2463 vtop->type = *type;
2466 /* return type size as known at compile time. Put alignment at 'a' */
2467 ST_FUNC int type_size(CType *type, int *a)
2469 Sym *s;
2470 int bt;
2472 bt = type->t & VT_BTYPE;
2473 if (bt == VT_STRUCT) {
2474 /* struct/union */
2475 s = type->ref;
2476 *a = s->r;
2477 return s->c;
2478 } else if (bt == VT_PTR) {
2479 if (type->t & VT_ARRAY) {
2480 int ts;
2482 s = type->ref;
2483 ts = type_size(&s->type, a);
2485 if (ts < 0 && s->c < 0)
2486 ts = -ts;
2488 return ts * s->c;
2489 } else {
2490 *a = PTR_SIZE;
2491 return PTR_SIZE;
2493 } else if (bt == VT_LDOUBLE) {
2494 *a = LDOUBLE_ALIGN;
2495 return LDOUBLE_SIZE;
2496 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2497 #ifdef TCC_TARGET_I386
2498 #ifdef TCC_TARGET_PE
2499 *a = 8;
2500 #else
2501 *a = 4;
2502 #endif
2503 #elif defined(TCC_TARGET_ARM)
2504 #ifdef TCC_ARM_EABI
2505 *a = 8;
2506 #else
2507 *a = 4;
2508 #endif
2509 #else
2510 *a = 8;
2511 #endif
2512 return 8;
2513 } else if (bt == VT_INT || bt == VT_FLOAT) {
2514 *a = 4;
2515 return 4;
2516 } else if (bt == VT_SHORT) {
2517 *a = 2;
2518 return 2;
2519 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2520 *a = 8;
2521 return 16;
2522 } else if (bt == VT_ENUM) {
2523 *a = 4;
2524 /* Enums might be incomplete, so don't just return '4' here. */
2525 return type->ref->c;
2526 } else {
2527 /* char, void, function, _Bool */
2528 *a = 1;
2529 return 1;
2533 /* push type size as known at runtime time on top of value stack. Put
2534 alignment at 'a' */
2535 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2537 if (type->t & VT_VLA) {
2538 type_size(&type->ref->type, a);
2539 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2540 } else {
2541 vpushi(type_size(type, a));
2545 static void vla_sp_restore(void) {
2546 if (vlas_in_scope) {
2547 gen_vla_sp_restore(vla_sp_loc);
2551 static void vla_sp_restore_root(void) {
2552 if (vlas_in_scope) {
2553 gen_vla_sp_restore(vla_sp_root_loc);
2557 /* return the pointed type of t */
2558 static inline CType *pointed_type(CType *type)
2560 return &type->ref->type;
2563 /* modify type so that its it is a pointer to type. */
2564 ST_FUNC void mk_pointer(CType *type)
2566 Sym *s;
2567 s = sym_push(SYM_FIELD, type, 0, -1);
2568 type->t = VT_PTR | (type->t & ~VT_TYPE);
2569 type->ref = s;
2572 /* compare function types. OLD functions match any new functions */
2573 static int is_compatible_func(CType *type1, CType *type2)
2575 Sym *s1, *s2;
2577 s1 = type1->ref;
2578 s2 = type2->ref;
2579 if (!is_compatible_types(&s1->type, &s2->type))
2580 return 0;
2581 /* check func_call */
2582 if (s1->a.func_call != s2->a.func_call)
2583 return 0;
2584 /* XXX: not complete */
2585 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2586 return 1;
2587 if (s1->c != s2->c)
2588 return 0;
2589 while (s1 != NULL) {
2590 if (s2 == NULL)
2591 return 0;
2592 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2593 return 0;
2594 s1 = s1->next;
2595 s2 = s2->next;
2597 if (s2)
2598 return 0;
2599 return 1;
2602 /* return true if type1 and type2 are the same. If unqualified is
2603 true, qualifiers on the types are ignored.
2605 - enums are not checked as gcc __builtin_types_compatible_p ()
2607 static int compare_types(CType *type1, CType *type2, int unqualified)
2609 int bt1, t1, t2;
2611 t1 = type1->t & VT_TYPE;
2612 t2 = type2->t & VT_TYPE;
2613 if (unqualified) {
2614 /* strip qualifiers before comparing */
2615 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2616 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2618 /* Default Vs explicit signedness only matters for char */
2619 if ((t1 & VT_BTYPE) != VT_BYTE) {
2620 t1 &= ~VT_DEFSIGN;
2621 t2 &= ~VT_DEFSIGN;
2623 /* An enum is compatible with (unsigned) int. Ideally we would
2624 store the enums signedness in type->ref.a.<some_bit> and
2625 only accept unsigned enums with unsigned int and vice versa.
2626 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2627 from pointer target types, so we can't add it here either. */
2628 if ((t1 & VT_BTYPE) == VT_ENUM) {
2629 t1 = VT_INT;
2630 if (type1->ref->a.unsigned_enum)
2631 t1 |= VT_UNSIGNED;
2633 if ((t2 & VT_BTYPE) == VT_ENUM) {
2634 t2 = VT_INT;
2635 if (type2->ref->a.unsigned_enum)
2636 t2 |= VT_UNSIGNED;
2638 /* XXX: bitfields ? */
2639 if (t1 != t2)
2640 return 0;
2641 /* test more complicated cases */
2642 bt1 = t1 & VT_BTYPE;
2643 if (bt1 == VT_PTR) {
2644 type1 = pointed_type(type1);
2645 type2 = pointed_type(type2);
2646 return is_compatible_types(type1, type2);
2647 } else if (bt1 == VT_STRUCT) {
2648 return (type1->ref == type2->ref);
2649 } else if (bt1 == VT_FUNC) {
2650 return is_compatible_func(type1, type2);
2651 } else {
2652 return 1;
2656 /* return true if type1 and type2 are exactly the same (including
2657 qualifiers).
2659 static int is_compatible_types(CType *type1, CType *type2)
2661 return compare_types(type1,type2,0);
2664 /* return true if type1 and type2 are the same (ignoring qualifiers).
2666 static int is_compatible_parameter_types(CType *type1, CType *type2)
2668 return compare_types(type1,type2,1);
2671 /* print a type. If 'varstr' is not NULL, then the variable is also
2672 printed in the type */
2673 /* XXX: union */
2674 /* XXX: add array and function pointers */
2675 static void type_to_str(char *buf, int buf_size,
2676 CType *type, const char *varstr)
2678 int bt, v, t;
2679 Sym *s, *sa;
2680 char buf1[256];
2681 const char *tstr;
2683 t = type->t & VT_TYPE;
2684 bt = t & VT_BTYPE;
2685 buf[0] = '\0';
2686 if (t & VT_CONSTANT)
2687 pstrcat(buf, buf_size, "const ");
2688 if (t & VT_VOLATILE)
2689 pstrcat(buf, buf_size, "volatile ");
2690 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2691 pstrcat(buf, buf_size, "unsigned ");
2692 else if (t & VT_DEFSIGN)
2693 pstrcat(buf, buf_size, "signed ");
2694 switch(bt) {
2695 case VT_VOID:
2696 tstr = "void";
2697 goto add_tstr;
2698 case VT_BOOL:
2699 tstr = "_Bool";
2700 goto add_tstr;
2701 case VT_BYTE:
2702 tstr = "char";
2703 goto add_tstr;
2704 case VT_SHORT:
2705 tstr = "short";
2706 goto add_tstr;
2707 case VT_INT:
2708 tstr = "int";
2709 goto add_tstr;
2710 case VT_LONG:
2711 tstr = "long";
2712 goto add_tstr;
2713 case VT_LLONG:
2714 tstr = "long long";
2715 goto add_tstr;
2716 case VT_FLOAT:
2717 tstr = "float";
2718 goto add_tstr;
2719 case VT_DOUBLE:
2720 tstr = "double";
2721 goto add_tstr;
2722 case VT_LDOUBLE:
2723 tstr = "long double";
2724 add_tstr:
2725 pstrcat(buf, buf_size, tstr);
2726 break;
2727 case VT_ENUM:
2728 case VT_STRUCT:
2729 if (bt == VT_STRUCT)
2730 tstr = "struct ";
2731 else
2732 tstr = "enum ";
2733 pstrcat(buf, buf_size, tstr);
2734 v = type->ref->v & ~SYM_STRUCT;
2735 if (v >= SYM_FIRST_ANOM)
2736 pstrcat(buf, buf_size, "<anonymous>");
2737 else
2738 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2739 break;
2740 case VT_FUNC:
2741 s = type->ref;
2742 type_to_str(buf, buf_size, &s->type, varstr);
2743 pstrcat(buf, buf_size, "(");
2744 sa = s->next;
2745 while (sa != NULL) {
2746 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2747 pstrcat(buf, buf_size, buf1);
2748 sa = sa->next;
2749 if (sa)
2750 pstrcat(buf, buf_size, ", ");
2752 pstrcat(buf, buf_size, ")");
2753 goto no_var;
2754 case VT_PTR:
2755 s = type->ref;
2756 if (t & VT_ARRAY) {
2757 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2758 type_to_str(buf, buf_size, &s->type, buf1);
2759 goto no_var;
2761 pstrcpy(buf1, sizeof(buf1), "*");
2762 if (t & VT_CONSTANT)
2763 pstrcat(buf1, buf_size, "const ");
2764 if (t & VT_VOLATILE)
2765 pstrcat(buf1, buf_size, "volatile ");
2766 if (varstr)
2767 pstrcat(buf1, sizeof(buf1), varstr);
2768 type_to_str(buf, buf_size, &s->type, buf1);
2769 goto no_var;
2771 if (varstr) {
2772 pstrcat(buf, buf_size, " ");
2773 pstrcat(buf, buf_size, varstr);
2775 no_var: ;
2778 /* verify type compatibility to store vtop in 'dt' type, and generate
2779 casts if needed. */
2780 static void gen_assign_cast(CType *dt)
2782 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2783 char buf1[256], buf2[256];
2784 int dbt, sbt;
2786 st = &vtop->type; /* source type */
2787 dbt = dt->t & VT_BTYPE;
2788 sbt = st->t & VT_BTYPE;
2789 if (sbt == VT_VOID || dbt == VT_VOID) {
2790 if (sbt == VT_VOID && dbt == VT_VOID)
2791 ; /*
2792 It is Ok if both are void
2793 A test program:
2794 void func1() {}
2795 void func2() {
2796 return func1();
2798 gcc accepts this program
2800 else
2801 tcc_error("cannot cast from/to void");
2803 if (dt->t & VT_CONSTANT)
2804 tcc_warning("assignment of read-only location");
2805 switch(dbt) {
2806 case VT_PTR:
2807 /* special cases for pointers */
2808 /* '0' can also be a pointer */
2809 if (is_null_pointer(vtop))
2810 goto type_ok;
2811 /* accept implicit pointer to integer cast with warning */
2812 if (is_integer_btype(sbt)) {
2813 tcc_warning("assignment makes pointer from integer without a cast");
2814 goto type_ok;
2816 type1 = pointed_type(dt);
2817 /* a function is implicitely a function pointer */
2818 if (sbt == VT_FUNC) {
2819 if ((type1->t & VT_BTYPE) != VT_VOID &&
2820 !is_compatible_types(pointed_type(dt), st))
2821 tcc_warning("assignment from incompatible pointer type");
2822 goto type_ok;
2824 if (sbt != VT_PTR)
2825 goto error;
2826 type2 = pointed_type(st);
2827 if ((type1->t & VT_BTYPE) == VT_VOID ||
2828 (type2->t & VT_BTYPE) == VT_VOID) {
2829 /* void * can match anything */
2830 } else {
2831 /* exact type match, except for qualifiers */
2832 tmp_type1 = *type1;
2833 tmp_type2 = *type2;
2834 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2835 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2836 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2837 /* Like GCC don't warn by default for merely changes
2838 in pointer target signedness. Do warn for different
2839 base types, though, in particular for unsigned enums
2840 and signed int targets. */
2841 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2842 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2843 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2845 else
2846 tcc_warning("assignment from incompatible pointer type");
2849 /* check const and volatile */
2850 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2851 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2852 tcc_warning("assignment discards qualifiers from pointer target type");
2853 break;
2854 case VT_BYTE:
2855 case VT_SHORT:
2856 case VT_INT:
2857 case VT_LLONG:
2858 if (sbt == VT_PTR || sbt == VT_FUNC) {
2859 tcc_warning("assignment makes integer from pointer without a cast");
2860 } else if (sbt == VT_STRUCT) {
2861 goto case_VT_STRUCT;
2863 /* XXX: more tests */
2864 break;
2865 case VT_STRUCT:
2866 case_VT_STRUCT:
2867 tmp_type1 = *dt;
2868 tmp_type2 = *st;
2869 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2870 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2871 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2872 error:
2873 type_to_str(buf1, sizeof(buf1), st, NULL);
2874 type_to_str(buf2, sizeof(buf2), dt, NULL);
2875 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2877 break;
2879 type_ok:
2880 gen_cast(dt);
2883 /* store vtop in lvalue pushed on stack */
2884 ST_FUNC void vstore(void)
2886 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2888 ft = vtop[-1].type.t;
2889 sbt = vtop->type.t & VT_BTYPE;
2890 dbt = ft & VT_BTYPE;
2891 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2892 (sbt == VT_INT && dbt == VT_SHORT))
2893 && !(vtop->type.t & VT_BITFIELD)) {
2894 /* optimize char/short casts */
2895 delayed_cast = VT_MUSTCAST;
2896 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2897 ((1 << VT_STRUCT_SHIFT) - 1));
2898 /* XXX: factorize */
2899 if (ft & VT_CONSTANT)
2900 tcc_warning("assignment of read-only location");
2901 } else {
2902 delayed_cast = 0;
2903 if (!(ft & VT_BITFIELD))
2904 gen_assign_cast(&vtop[-1].type);
2907 if (sbt == VT_STRUCT) {
2908 /* if structure, only generate pointer */
2909 /* structure assignment : generate memcpy */
2910 /* XXX: optimize if small size */
2911 size = type_size(&vtop->type, &align);
2913 /* destination */
2914 vswap();
2915 vtop->type.t = VT_PTR;
2916 gaddrof();
2918 /* address of memcpy() */
2919 #ifdef TCC_ARM_EABI
2920 if(!(align & 7))
2921 vpush_global_sym(&func_old_type, TOK_memcpy8);
2922 else if(!(align & 3))
2923 vpush_global_sym(&func_old_type, TOK_memcpy4);
2924 else
2925 #endif
2926 /* Use memmove, rather than memcpy, as dest and src may be same: */
2927 vpush_global_sym(&func_old_type, TOK_memmove);
2929 vswap();
2930 /* source */
2931 vpushv(vtop - 2);
2932 vtop->type.t = VT_PTR;
2933 gaddrof();
2934 /* type size */
2935 vpushi(size);
2936 gfunc_call(3);
2938 /* leave source on stack */
2939 } else if (ft & VT_BITFIELD) {
2940 /* bitfield store handling */
2942 /* save lvalue as expression result (example: s.b = s.a = n;) */
2943 vdup(), vtop[-1] = vtop[-2];
2945 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2946 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2947 /* remove bit field info to avoid loops */
2948 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2950 if((ft & VT_BTYPE) == VT_BOOL) {
2951 gen_cast(&vtop[-1].type);
2952 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2955 /* duplicate destination */
2956 vdup();
2957 vtop[-1] = vtop[-2];
2959 /* mask and shift source */
2960 if((ft & VT_BTYPE) != VT_BOOL) {
2961 if((ft & VT_BTYPE) == VT_LLONG) {
2962 vpushll((1ULL << bit_size) - 1ULL);
2963 } else {
2964 vpushi((1 << bit_size) - 1);
2966 gen_op('&');
2968 vpushi(bit_pos);
2969 gen_op(TOK_SHL);
2970 /* load destination, mask and or with source */
2971 vswap();
2972 if((ft & VT_BTYPE) == VT_LLONG) {
2973 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2974 } else {
2975 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2977 gen_op('&');
2978 gen_op('|');
2979 /* store result */
2980 vstore();
2981 /* ... and discard */
2982 vpop();
2984 } else {
2985 #ifdef CONFIG_TCC_BCHECK
2986 /* bound check case */
2987 if (vtop[-1].r & VT_MUSTBOUND) {
2988 vswap();
2989 gbound();
2990 vswap();
2992 #endif
2993 rc = RC_INT;
2994 if (is_float(ft)) {
2995 rc = RC_FLOAT;
2996 #ifdef TCC_TARGET_X86_64
2997 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2998 rc = RC_ST0;
2999 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3000 rc = RC_FRET;
3002 #endif
3004 r = gv(rc); /* generate value */
3005 /* if lvalue was saved on stack, must read it */
3006 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3007 SValue sv;
3008 t = get_reg(RC_INT);
3009 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3010 sv.type.t = VT_PTR;
3011 #else
3012 sv.type.t = VT_INT;
3013 #endif
3014 sv.r = VT_LOCAL | VT_LVAL;
3015 sv.c.i = vtop[-1].c.i;
3016 load(t, &sv);
3017 vtop[-1].r = t | VT_LVAL;
3019 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3020 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3021 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3022 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3023 #else
3024 if ((ft & VT_BTYPE) == VT_LLONG) {
3025 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3026 #endif
3027 vtop[-1].type.t = load_type;
3028 store(r, vtop - 1);
3029 vswap();
3030 /* convert to int to increment easily */
3031 vtop->type.t = addr_type;
3032 gaddrof();
3033 vpushi(load_size);
3034 gen_op('+');
3035 vtop->r |= VT_LVAL;
3036 vswap();
3037 vtop[-1].type.t = load_type;
3038 /* XXX: it works because r2 is spilled last ! */
3039 store(vtop->r2, vtop - 1);
3040 } else {
3041 store(r, vtop - 1);
3044 vswap();
3045 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3046 vtop->r |= delayed_cast;
3050 /* post defines POST/PRE add. c is the token ++ or -- */
3051 ST_FUNC void inc(int post, int c)
3053 test_lvalue();
3054 vdup(); /* save lvalue */
3055 if (post) {
3056 gv_dup(); /* duplicate value */
3057 vrotb(3);
3058 vrotb(3);
3060 /* add constant */
3061 vpushi(c - TOK_MID);
3062 gen_op('+');
3063 vstore(); /* store value */
3064 if (post)
3065 vpop(); /* if post op, return saved value */
3068 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3070 /* read the string */
3071 if (tok != TOK_STR)
3072 expect(msg);
3073 cstr_new(astr);
3074 while (tok == TOK_STR) {
3075 /* XXX: add \0 handling too ? */
3076 cstr_cat(astr, tokc.str.data, -1);
3077 next();
3079 cstr_ccat(astr, '\0');
3082 /* If I is >= 1 and a power of two, returns log2(i)+1.
3083 If I is 0 returns 0. */
3084 static int exact_log2p1(int i)
3086 int ret;
3087 if (!i)
3088 return 0;
3089 for (ret = 1; i >= 1 << 8; ret += 8)
3090 i >>= 8;
3091 if (i >= 1 << 4)
3092 ret += 4, i >>= 4;
3093 if (i >= 1 << 2)
3094 ret += 2, i >>= 2;
3095 if (i >= 1 << 1)
3096 ret++;
3097 return ret;
3100 /* Parse GNUC __attribute__ extension. Currently, the following
3101 extensions are recognized:
3102 - aligned(n) : set data/function alignment.
3103 - packed : force data alignment to 1
3104 - section(x) : generate data/code in this section.
3105 - unused : currently ignored, but may be used someday.
3106 - regparm(n) : pass function parameters in registers (i386 only)
3108 static void parse_attribute(AttributeDef *ad)
3110 int t, n;
3111 CString astr;
3113 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3114 next();
3115 skip('(');
3116 skip('(');
3117 while (tok != ')') {
3118 if (tok < TOK_IDENT)
3119 expect("attribute name");
3120 t = tok;
3121 next();
3122 switch(t) {
3123 case TOK_SECTION1:
3124 case TOK_SECTION2:
3125 skip('(');
3126 parse_mult_str(&astr, "section name");
3127 ad->section = find_section(tcc_state, (char *)astr.data);
3128 skip(')');
3129 cstr_free(&astr);
3130 break;
3131 case TOK_ALIAS1:
3132 case TOK_ALIAS2:
3133 skip('(');
3134 parse_mult_str(&astr, "alias(\"target\")");
3135 ad->alias_target = /* save string as token, for later */
3136 tok_alloc((char*)astr.data, astr.size-1)->tok;
3137 skip(')');
3138 cstr_free(&astr);
3139 break;
3140 case TOK_VISIBILITY1:
3141 case TOK_VISIBILITY2:
3142 skip('(');
3143 parse_mult_str(&astr,
3144 "visibility(\"default|hidden|internal|protected\")");
3145 if (!strcmp (astr.data, "default"))
3146 ad->a.visibility = STV_DEFAULT;
3147 else if (!strcmp (astr.data, "hidden"))
3148 ad->a.visibility = STV_HIDDEN;
3149 else if (!strcmp (astr.data, "internal"))
3150 ad->a.visibility = STV_INTERNAL;
3151 else if (!strcmp (astr.data, "protected"))
3152 ad->a.visibility = STV_PROTECTED;
3153 else
3154 expect("visibility(\"default|hidden|internal|protected\")");
3155 skip(')');
3156 cstr_free(&astr);
3157 break;
3158 case TOK_ALIGNED1:
3159 case TOK_ALIGNED2:
3160 if (tok == '(') {
3161 next();
3162 n = expr_const();
3163 if (n <= 0 || (n & (n - 1)) != 0)
3164 tcc_error("alignment must be a positive power of two");
3165 skip(')');
3166 } else {
3167 n = MAX_ALIGN;
3169 ad->a.aligned = exact_log2p1(n);
3170 if (n != 1 << (ad->a.aligned - 1))
3171 tcc_error("alignment of %d is larger than implemented", n);
3172 break;
3173 case TOK_PACKED1:
3174 case TOK_PACKED2:
3175 ad->a.packed = 1;
3176 break;
3177 case TOK_WEAK1:
3178 case TOK_WEAK2:
3179 ad->a.weak = 1;
3180 break;
3181 case TOK_UNUSED1:
3182 case TOK_UNUSED2:
3183 /* currently, no need to handle it because tcc does not
3184 track unused objects */
3185 break;
3186 case TOK_NORETURN1:
3187 case TOK_NORETURN2:
3188 /* currently, no need to handle it because tcc does not
3189 track unused objects */
3190 break;
3191 case TOK_CDECL1:
3192 case TOK_CDECL2:
3193 case TOK_CDECL3:
3194 ad->a.func_call = FUNC_CDECL;
3195 break;
3196 case TOK_STDCALL1:
3197 case TOK_STDCALL2:
3198 case TOK_STDCALL3:
3199 ad->a.func_call = FUNC_STDCALL;
3200 break;
3201 #ifdef TCC_TARGET_I386
3202 case TOK_REGPARM1:
3203 case TOK_REGPARM2:
3204 skip('(');
3205 n = expr_const();
3206 if (n > 3)
3207 n = 3;
3208 else if (n < 0)
3209 n = 0;
3210 if (n > 0)
3211 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3212 skip(')');
3213 break;
3214 case TOK_FASTCALL1:
3215 case TOK_FASTCALL2:
3216 case TOK_FASTCALL3:
3217 ad->a.func_call = FUNC_FASTCALLW;
3218 break;
3219 #endif
3220 case TOK_MODE:
3221 skip('(');
3222 switch(tok) {
3223 case TOK_MODE_DI:
3224 ad->a.mode = VT_LLONG + 1;
3225 break;
3226 case TOK_MODE_QI:
3227 ad->a.mode = VT_BYTE + 1;
3228 break;
3229 case TOK_MODE_HI:
3230 ad->a.mode = VT_SHORT + 1;
3231 break;
3232 case TOK_MODE_SI:
3233 case TOK_MODE_word:
3234 ad->a.mode = VT_INT + 1;
3235 break;
3236 default:
3237 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3238 break;
3240 next();
3241 skip(')');
3242 break;
3243 case TOK_DLLEXPORT:
3244 ad->a.func_export = 1;
3245 break;
3246 case TOK_DLLIMPORT:
3247 ad->a.func_import = 1;
3248 break;
3249 default:
3250 if (tcc_state->warn_unsupported)
3251 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3252 /* skip parameters */
3253 if (tok == '(') {
3254 int parenthesis = 0;
3255 do {
3256 if (tok == '(')
3257 parenthesis++;
3258 else if (tok == ')')
3259 parenthesis--;
3260 next();
3261 } while (parenthesis && tok != -1);
3263 break;
3265 if (tok != ',')
3266 break;
3267 next();
3269 skip(')');
3270 skip(')');
3274 static Sym * find_field (CType *type, int v)
3276 Sym *s = type->ref;
3277 v |= SYM_FIELD;
3278 while ((s = s->next) != NULL) {
3279 if ((s->v & SYM_FIELD) &&
3280 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3281 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3282 Sym *ret = find_field (&s->type, v);
3283 if (ret)
3284 return ret;
3286 if (s->v == v)
3287 break;
3289 return s;
3292 static void struct_add_offset (Sym *s, int offset)
3294 while ((s = s->next) != NULL) {
3295 if ((s->v & SYM_FIELD) &&
3296 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3297 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3298 struct_add_offset(s->type.ref, offset);
3299 } else
3300 s->c += offset;
3304 static void struct_layout(CType *type, AttributeDef *ad)
3306 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3307 int pcc = !tcc_state->ms_bitfields;
3308 Sym *f;
3309 if (ad->a.aligned)
3310 maxalign = 1 << (ad->a.aligned - 1);
3311 else
3312 maxalign = 1;
3313 offset = 0;
3314 c = 0;
3315 bit_pos = 0;
3316 prevbt = VT_STRUCT; /* make it never match */
3317 prev_bit_size = 0;
3318 for (f = type->ref->next; f; f = f->next) {
3319 int typealign, bit_size;
3320 int size = type_size(&f->type, &typealign);
3321 if (f->type.t & VT_BITFIELD)
3322 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3323 else
3324 bit_size = -1;
3325 if (bit_size == 0 && pcc) {
3326 /* Zero-width bit-fields in PCC mode aren't affected
3327 by any packing (attribute or pragma). */
3328 align = typealign;
3329 } else if (f->r > 1) {
3330 align = f->r;
3331 } else if (ad->a.packed || f->r == 1) {
3332 align = 1;
3333 /* Packed fields or packed records don't let the base type
3334 influence the records type alignment. */
3335 typealign = 1;
3336 } else {
3337 align = typealign;
3339 if (type->ref->type.t != TOK_STRUCT) {
3340 if (pcc && bit_size >= 0)
3341 size = (bit_size + 7) >> 3;
3342 /* Bit position is already zero from our caller. */
3343 offset = 0;
3344 if (size > c)
3345 c = size;
3346 } else if (bit_size < 0) {
3347 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3348 prevbt = VT_STRUCT;
3349 prev_bit_size = 0;
3350 c = (c + addbytes + align - 1) & -align;
3351 offset = c;
3352 if (size > 0)
3353 c += size;
3354 bit_pos = 0;
3355 } else {
3356 /* A bit-field. Layout is more complicated. There are two
3357 options TCC implements: PCC compatible and MS compatible
3358 (PCC compatible is what GCC uses for almost all targets).
3359 In PCC layout the overall size of the struct (in c) is
3360 _excluding_ the current run of bit-fields (that is,
3361 there's at least additional bit_pos bits after c). In
3362 MS layout c does include the current run of bit-fields.
3364 This matters for calculating the natural alignment buckets
3365 in PCC mode. */
3367 /* 'align' will be used to influence records alignment,
3368 so it's the max of specified and type alignment, except
3369 in certain cases that depend on the mode. */
3370 if (align < typealign)
3371 align = typealign;
3372 if (pcc) {
3373 /* In PCC layout a non-packed bit-field is placed adjacent
3374 to the preceding bit-fields, except if it would overflow
3375 its container (depending on base type) or it's a zero-width
3376 bit-field. Packed non-zero-width bit-fields always are
3377 placed adjacent. */
3378 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3379 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3380 if (bit_size == 0 ||
3381 ((typealign != 1 || size == 1) &&
3382 (ofs2 / (typealign * 8)) > (size/typealign))) {
3383 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3384 bit_pos = 0;
3385 } else while (bit_pos + bit_size > size * 8) {
3386 c += size;
3387 bit_pos -= size * 8;
3389 offset = c;
3390 /* In PCC layout named bit-fields influence the alignment
3391 of the containing struct using the base types alignment,
3392 except for packed fields (which here have correct
3393 align/typealign). */
3394 if ((f->v & SYM_FIRST_ANOM))
3395 align = 1;
3396 } else {
3397 bt = f->type.t & VT_BTYPE;
3398 if ((bit_pos + bit_size > size * 8) ||
3399 (bit_size > 0) == (bt != prevbt)) {
3400 c = (c + typealign - 1) & -typealign;
3401 offset = c;
3402 bit_pos = 0;
3403 /* In MS bitfield mode a bit-field run always uses
3404 at least as many bits as the underlying type.
3405 To start a new run it's also required that this
3406 or the last bit-field had non-zero width. */
3407 if (bit_size || prev_bit_size)
3408 c += size;
3410 /* In MS layout the records alignment is normally
3411 influenced by the field, except for a zero-width
3412 field at the start of a run (but by further zero-width
3413 fields it is again). */
3414 if (bit_size == 0 && prevbt != bt)
3415 align = 1;
3416 prevbt = bt;
3417 prev_bit_size = bit_size;
3419 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3420 | (bit_pos << VT_STRUCT_SHIFT);
3421 bit_pos += bit_size;
3422 if (pcc && bit_pos >= size * 8) {
3423 c += size;
3424 bit_pos -= size * 8;
3427 if (align > maxalign)
3428 maxalign = align;
3429 #if 0
3430 printf("set field %s offset=%d c=%d",
3431 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3432 if (f->type.t & VT_BITFIELD) {
3433 printf(" pos=%d size=%d",
3434 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3435 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3437 printf("\n");
3438 #endif
3440 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3441 Sym *ass;
3442 /* An anonymous struct/union. Adjust member offsets
3443 to reflect the real offset of our containing struct.
3444 Also set the offset of this anon member inside
3445 the outer struct to be zero. Via this it
3446 works when accessing the field offset directly
3447 (from base object), as well as when recursing
3448 members in initializer handling. */
3449 int v2 = f->type.ref->v;
3450 if (!(v2 & SYM_FIELD) &&
3451 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3452 Sym **pps;
3453 /* This happens only with MS extensions. The
3454 anon member has a named struct type, so it
3455 potentially is shared with other references.
3456 We need to unshare members so we can modify
3457 them. */
3458 ass = f->type.ref;
3459 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3460 &f->type.ref->type, 0,
3461 f->type.ref->c);
3462 pps = &f->type.ref->next;
3463 while ((ass = ass->next) != NULL) {
3464 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3465 pps = &((*pps)->next);
3467 *pps = NULL;
3469 struct_add_offset(f->type.ref, offset);
3470 f->c = 0;
3471 } else {
3472 f->c = offset;
3475 f->r = 0;
3477 /* store size and alignment */
3478 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3479 + maxalign - 1) & -maxalign;
3480 type->ref->r = maxalign;
3483 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3484 static void struct_decl(CType *type, AttributeDef *ad, int u)
3486 int a, v, size, align, flexible, alignoverride;
3487 long c;
3488 int bit_size, bsize, bt;
3489 Sym *s, *ss, **ps;
3490 AttributeDef ad1;
3491 CType type1, btype;
3493 a = tok; /* save decl type */
3494 next();
3495 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3496 parse_attribute(ad);
3497 if (tok != '{') {
3498 v = tok;
3499 next();
3500 /* struct already defined ? return it */
3501 if (v < TOK_IDENT)
3502 expect("struct/union/enum name");
3503 s = struct_find(v);
3504 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3505 if (s->type.t != a)
3506 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3507 goto do_decl;
3509 } else {
3510 v = anon_sym++;
3512 /* Record the original enum/struct/union token. */
3513 type1.t = a;
3514 type1.ref = NULL;
3515 /* we put an undefined size for struct/union */
3516 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3517 s->r = 0; /* default alignment is zero as gcc */
3518 /* put struct/union/enum name in type */
3519 do_decl:
3520 type->t = u;
3521 type->ref = s;
3523 if (tok == '{') {
3524 next();
3525 if (s->c != -1)
3526 tcc_error("struct/union/enum already defined");
3527 /* cannot be empty */
3528 c = 0;
3529 /* non empty enums are not allowed */
3530 if (a == TOK_ENUM) {
3531 int seen_neg = 0;
3532 int seen_wide = 0;
3533 for(;;) {
3534 CType *t = &int_type;
3535 v = tok;
3536 if (v < TOK_UIDENT)
3537 expect("identifier");
3538 ss = sym_find(v);
3539 if (ss && !local_stack)
3540 tcc_error("redefinition of enumerator '%s'",
3541 get_tok_str(v, NULL));
3542 next();
3543 if (tok == '=') {
3544 next();
3545 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3546 c = expr_const64();
3547 #else
3548 /* We really want to support long long enums
3549 on i386 as well, but the Sym structure only
3550 holds a 'long' for associated constants,
3551 and enlarging it would bump its size (no
3552 available padding). So punt for now. */
3553 c = expr_const();
3554 #endif
3556 if (c < 0)
3557 seen_neg = 1;
3558 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3559 seen_wide = 1, t = &size_type;
3560 /* enum symbols have static storage */
3561 ss = sym_push(v, t, VT_CONST, c);
3562 ss->type.t |= VT_STATIC;
3563 if (tok != ',')
3564 break;
3565 next();
3566 c++;
3567 /* NOTE: we accept a trailing comma */
3568 if (tok == '}')
3569 break;
3571 if (!seen_neg)
3572 s->a.unsigned_enum = 1;
3573 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3574 skip('}');
3575 } else {
3576 ps = &s->next;
3577 flexible = 0;
3578 while (tok != '}') {
3579 if (!parse_btype(&btype, &ad1)) {
3580 skip(';');
3581 continue;
3583 while (1) {
3584 if (flexible)
3585 tcc_error("flexible array member '%s' not at the end of struct",
3586 get_tok_str(v, NULL));
3587 bit_size = -1;
3588 v = 0;
3589 type1 = btype;
3590 if (tok != ':') {
3591 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3592 if (v == 0) {
3593 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3594 expect("identifier");
3595 else {
3596 int v = btype.ref->v;
3597 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3598 if (tcc_state->ms_extensions == 0)
3599 expect("identifier");
3603 if (type_size(&type1, &align) < 0) {
3604 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3605 flexible = 1;
3606 else
3607 tcc_error("field '%s' has incomplete type",
3608 get_tok_str(v, NULL));
3610 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3611 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3612 tcc_error("invalid type for '%s'",
3613 get_tok_str(v, NULL));
3615 if (tok == ':') {
3616 next();
3617 bit_size = expr_const();
3618 /* XXX: handle v = 0 case for messages */
3619 if (bit_size < 0)
3620 tcc_error("negative width in bit-field '%s'",
3621 get_tok_str(v, NULL));
3622 if (v && bit_size == 0)
3623 tcc_error("zero width for bit-field '%s'",
3624 get_tok_str(v, NULL));
3625 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3626 parse_attribute(&ad1);
3628 size = type_size(&type1, &align);
3629 /* Only remember non-default alignment. */
3630 alignoverride = 0;
3631 if (ad1.a.aligned) {
3632 int speca = 1 << (ad1.a.aligned - 1);
3633 alignoverride = speca;
3634 } else if (ad1.a.packed || ad->a.packed) {
3635 alignoverride = 1;
3636 } else if (*tcc_state->pack_stack_ptr) {
3637 if (align > *tcc_state->pack_stack_ptr)
3638 alignoverride = *tcc_state->pack_stack_ptr;
3640 if (bit_size >= 0) {
3641 bt = type1.t & VT_BTYPE;
3642 if (bt != VT_INT &&
3643 bt != VT_BYTE &&
3644 bt != VT_SHORT &&
3645 bt != VT_BOOL &&
3646 bt != VT_ENUM &&
3647 bt != VT_LLONG)
3648 tcc_error("bitfields must have scalar type");
3649 bsize = size * 8;
3650 if (bit_size > bsize) {
3651 tcc_error("width of '%s' exceeds its type",
3652 get_tok_str(v, NULL));
3653 } else if (bit_size == bsize) {
3654 /* no need for bit fields */
3656 } else {
3657 type1.t |= VT_BITFIELD |
3658 (0 << VT_STRUCT_SHIFT) |
3659 (bit_size << (VT_STRUCT_SHIFT + 6));
3662 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3663 /* Remember we've seen a real field to check
3664 for placement of flexible array member. */
3665 c = 1;
3667 /* If member is a struct or bit-field, enforce
3668 placing into the struct (as anonymous). */
3669 if (v == 0 &&
3670 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3671 bit_size >= 0)) {
3672 v = anon_sym++;
3674 if (v) {
3675 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3676 *ps = ss;
3677 ps = &ss->next;
3679 if (tok == ';' || tok == TOK_EOF)
3680 break;
3681 skip(',');
3683 skip(';');
3685 skip('}');
3686 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3687 parse_attribute(ad);
3688 struct_layout(type, ad);
3693 /* return 1 if basic type is a type size (short, long, long long) */
3694 ST_FUNC int is_btype_size(int bt)
3696 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3699 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3700 are added to the element type, copied because it could be a typedef. */
3701 static void parse_btype_qualify(CType *type, int qualifiers)
3703 while (type->t & VT_ARRAY) {
3704 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3705 type = &type->ref->type;
3707 type->t |= qualifiers;
3710 /* return 0 if no type declaration. otherwise, return the basic type
3711 and skip it.
3713 static int parse_btype(CType *type, AttributeDef *ad)
3715 int t, u, bt_size, complete, type_found, typespec_found, g;
3716 Sym *s;
3717 CType type1;
3719 memset(ad, 0, sizeof(AttributeDef));
3720 complete = 0;
3721 type_found = 0;
3722 typespec_found = 0;
3723 t = 0;
3724 while(1) {
3725 switch(tok) {
3726 case TOK_EXTENSION:
3727 /* currently, we really ignore extension */
3728 next();
3729 continue;
3731 /* basic types */
3732 case TOK_CHAR:
3733 u = VT_BYTE;
3734 basic_type:
3735 next();
3736 basic_type1:
3737 if (complete)
3738 tcc_error("too many basic types");
3739 t |= u;
3740 bt_size = is_btype_size (u & VT_BTYPE);
3741 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3742 complete = 1;
3743 typespec_found = 1;
3744 break;
3745 case TOK_VOID:
3746 u = VT_VOID;
3747 goto basic_type;
3748 case TOK_SHORT:
3749 u = VT_SHORT;
3750 goto basic_type;
3751 case TOK_INT:
3752 u = VT_INT;
3753 goto basic_type;
3754 case TOK_LONG:
3755 next();
3756 if ((t & VT_BTYPE) == VT_DOUBLE) {
3757 #ifndef TCC_TARGET_PE
3758 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3759 #endif
3760 } else if ((t & VT_BTYPE) == VT_LONG) {
3761 t = (t & ~VT_BTYPE) | VT_LLONG;
3762 } else {
3763 u = VT_LONG;
3764 goto basic_type1;
3766 break;
3767 #ifdef TCC_TARGET_ARM64
3768 case TOK_UINT128:
3769 /* GCC's __uint128_t appears in some Linux header files. Make it a
3770 synonym for long double to get the size and alignment right. */
3771 u = VT_LDOUBLE;
3772 goto basic_type;
3773 #endif
3774 case TOK_BOOL:
3775 u = VT_BOOL;
3776 goto basic_type;
3777 case TOK_FLOAT:
3778 u = VT_FLOAT;
3779 goto basic_type;
3780 case TOK_DOUBLE:
3781 next();
3782 if ((t & VT_BTYPE) == VT_LONG) {
3783 #ifdef TCC_TARGET_PE
3784 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3785 #else
3786 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3787 #endif
3788 } else {
3789 u = VT_DOUBLE;
3790 goto basic_type1;
3792 break;
3793 case TOK_ENUM:
3794 struct_decl(&type1, ad, VT_ENUM);
3795 basic_type2:
3796 u = type1.t;
3797 type->ref = type1.ref;
3798 goto basic_type1;
3799 case TOK_STRUCT:
3800 case TOK_UNION:
3801 struct_decl(&type1, ad, VT_STRUCT);
3802 goto basic_type2;
3804 /* type modifiers */
3805 case TOK_CONST1:
3806 case TOK_CONST2:
3807 case TOK_CONST3:
3808 type->t = t;
3809 parse_btype_qualify(type, VT_CONSTANT);
3810 t = type->t;
3811 next();
3812 break;
3813 case TOK_VOLATILE1:
3814 case TOK_VOLATILE2:
3815 case TOK_VOLATILE3:
3816 type->t = t;
3817 parse_btype_qualify(type, VT_VOLATILE);
3818 t = type->t;
3819 next();
3820 break;
3821 case TOK_SIGNED1:
3822 case TOK_SIGNED2:
3823 case TOK_SIGNED3:
3824 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3825 tcc_error("signed and unsigned modifier");
3826 typespec_found = 1;
3827 t |= VT_DEFSIGN;
3828 next();
3829 break;
3830 case TOK_REGISTER:
3831 case TOK_AUTO:
3832 case TOK_RESTRICT1:
3833 case TOK_RESTRICT2:
3834 case TOK_RESTRICT3:
3835 next();
3836 break;
3837 case TOK_UNSIGNED:
3838 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3839 tcc_error("signed and unsigned modifier");
3840 t |= VT_DEFSIGN | VT_UNSIGNED;
3841 next();
3842 typespec_found = 1;
3843 break;
3845 /* storage */
3846 case TOK_EXTERN:
3847 g = VT_EXTERN;
3848 goto storage;
3849 case TOK_STATIC:
3850 g = VT_STATIC;
3851 goto storage;
3852 case TOK_TYPEDEF:
3853 g = VT_TYPEDEF;
3854 goto storage;
3855 storage:
3856 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3857 tcc_error("multiple storage classes");
3858 t |= g;
3859 next();
3860 break;
3861 case TOK_INLINE1:
3862 case TOK_INLINE2:
3863 case TOK_INLINE3:
3864 t |= VT_INLINE;
3865 next();
3866 break;
3868 /* GNUC attribute */
3869 case TOK_ATTRIBUTE1:
3870 case TOK_ATTRIBUTE2:
3871 parse_attribute(ad);
3872 if (ad->a.mode) {
3873 u = ad->a.mode -1;
3874 t = (t & ~VT_BTYPE) | u;
3876 break;
3877 /* GNUC typeof */
3878 case TOK_TYPEOF1:
3879 case TOK_TYPEOF2:
3880 case TOK_TYPEOF3:
3881 next();
3882 parse_expr_type(&type1);
3883 /* remove all storage modifiers except typedef */
3884 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3885 goto basic_type2;
3886 default:
3887 if (typespec_found)
3888 goto the_end;
3889 s = sym_find(tok);
3890 if (!s || !(s->type.t & VT_TYPEDEF))
3891 goto the_end;
3893 type->t = ((s->type.t & ~VT_TYPEDEF) |
3894 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3895 type->ref = s->type.ref;
3896 if (t & (VT_CONSTANT | VT_VOLATILE))
3897 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3898 t = type->t;
3900 if (s->r) {
3901 /* get attributes from typedef */
3902 if (0 == ad->a.aligned)
3903 ad->a.aligned = s->a.aligned;
3904 if (0 == ad->a.func_call)
3905 ad->a.func_call = s->a.func_call;
3906 ad->a.packed |= s->a.packed;
3908 next();
3909 typespec_found = 1;
3910 break;
3912 type_found = 1;
3914 the_end:
3915 if (tcc_state->char_is_unsigned) {
3916 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3917 t |= VT_UNSIGNED;
3920 /* long is never used as type */
3921 if ((t & VT_BTYPE) == VT_LONG)
3922 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3923 defined TCC_TARGET_PE
3924 t = (t & ~VT_BTYPE) | VT_INT;
3925 #else
3926 t = (t & ~VT_BTYPE) | VT_LLONG;
3927 #endif
3928 type->t = t;
3929 return type_found;
3932 /* convert a function parameter type (array to pointer and function to
3933 function pointer) */
3934 static inline void convert_parameter_type(CType *pt)
3936 /* remove const and volatile qualifiers (XXX: const could be used
3937 to indicate a const function parameter */
3938 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3939 /* array must be transformed to pointer according to ANSI C */
3940 pt->t &= ~VT_ARRAY;
3941 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3942 mk_pointer(pt);
3946 ST_FUNC void parse_asm_str(CString *astr)
3948 skip('(');
3949 parse_mult_str(astr, "string constant");
3952 /* Parse an asm label and return the token */
3953 static int asm_label_instr(void)
3955 int v;
3956 CString astr;
3958 next();
3959 parse_asm_str(&astr);
3960 skip(')');
3961 #ifdef ASM_DEBUG
3962 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3963 #endif
3964 v = tok_alloc(astr.data, astr.size - 1)->tok;
3965 cstr_free(&astr);
3966 return v;
3969 static void post_type(CType *type, AttributeDef *ad, int storage)
3971 int n, l, t1, arg_size, align;
3972 Sym **plast, *s, *first;
3973 AttributeDef ad1;
3974 CType pt;
3976 if (tok == '(') {
3977 /* function declaration */
3978 next();
3979 l = 0;
3980 first = NULL;
3981 plast = &first;
3982 arg_size = 0;
3983 if (tok != ')') {
3984 for(;;) {
3985 /* read param name and compute offset */
3986 if (l != FUNC_OLD) {
3987 if (!parse_btype(&pt, &ad1)) {
3988 if (l) {
3989 tcc_error("invalid type");
3990 } else {
3991 l = FUNC_OLD;
3992 goto old_proto;
3995 l = FUNC_NEW;
3996 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3997 break;
3998 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3999 if ((pt.t & VT_BTYPE) == VT_VOID)
4000 tcc_error("parameter declared as void");
4001 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4002 } else {
4003 old_proto:
4004 n = tok;
4005 if (n < TOK_UIDENT)
4006 expect("identifier");
4007 pt.t = VT_INT;
4008 next();
4010 convert_parameter_type(&pt);
4011 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4012 *plast = s;
4013 plast = &s->next;
4014 if (tok == ')')
4015 break;
4016 skip(',');
4017 if (l == FUNC_NEW && tok == TOK_DOTS) {
4018 l = FUNC_ELLIPSIS;
4019 next();
4020 break;
4024 /* if no parameters, then old type prototype */
4025 if (l == 0)
4026 l = FUNC_OLD;
4027 skip(')');
4028 /* NOTE: const is ignored in returned type as it has a special
4029 meaning in gcc / C++ */
4030 type->t &= ~VT_CONSTANT;
4031 /* some ancient pre-K&R C allows a function to return an array
4032 and the array brackets to be put after the arguments, such
4033 that "int c()[]" means something like "int[] c()" */
4034 if (tok == '[') {
4035 next();
4036 skip(']'); /* only handle simple "[]" */
4037 type->t |= VT_PTR;
4039 /* we push a anonymous symbol which will contain the function prototype */
4040 ad->a.func_args = arg_size;
4041 s = sym_push(SYM_FIELD, type, 0, l);
4042 s->a = ad->a;
4043 s->next = first;
4044 type->t = VT_FUNC;
4045 type->ref = s;
4046 } else if (tok == '[') {
4047 int saved_nocode_wanted = nocode_wanted;
4048 /* array definition */
4049 next();
4050 if (tok == TOK_RESTRICT1)
4051 next();
4052 n = -1;
4053 t1 = 0;
4054 if (tok != ']') {
4055 if (!local_stack || (storage & VT_STATIC))
4056 vpushi(expr_const());
4057 else {
4058 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4059 length must always be evaluated, even under nocode_wanted,
4060 so that its size slot is initialized (e.g. under sizeof
4061 or typeof). */
4062 nocode_wanted = 0;
4063 gexpr();
4065 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4066 n = vtop->c.i;
4067 if (n < 0)
4068 tcc_error("invalid array size");
4069 } else {
4070 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4071 tcc_error("size of variable length array should be an integer");
4072 t1 = VT_VLA;
4075 skip(']');
4076 /* parse next post type */
4077 post_type(type, ad, storage);
4078 if (type->t == VT_FUNC)
4079 tcc_error("declaration of an array of functions");
4080 t1 |= type->t & VT_VLA;
4082 if (t1 & VT_VLA) {
4083 loc -= type_size(&int_type, &align);
4084 loc &= -align;
4085 n = loc;
4087 vla_runtime_type_size(type, &align);
4088 gen_op('*');
4089 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4090 vswap();
4091 vstore();
4093 if (n != -1)
4094 vpop();
4095 nocode_wanted = saved_nocode_wanted;
4097 /* we push an anonymous symbol which will contain the array
4098 element type */
4099 s = sym_push(SYM_FIELD, type, 0, n);
4100 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4101 type->ref = s;
4105 /* Parse a type declaration (except basic type), and return the type
4106 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4107 expected. 'type' should contain the basic type. 'ad' is the
4108 attribute definition of the basic type. It can be modified by
4109 type_decl().
4111 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4113 Sym *s;
4114 CType type1, *type2;
4115 int qualifiers, storage;
4117 while (tok == '*') {
4118 qualifiers = 0;
4119 redo:
4120 next();
4121 switch(tok) {
4122 case TOK_CONST1:
4123 case TOK_CONST2:
4124 case TOK_CONST3:
4125 qualifiers |= VT_CONSTANT;
4126 goto redo;
4127 case TOK_VOLATILE1:
4128 case TOK_VOLATILE2:
4129 case TOK_VOLATILE3:
4130 qualifiers |= VT_VOLATILE;
4131 goto redo;
4132 case TOK_RESTRICT1:
4133 case TOK_RESTRICT2:
4134 case TOK_RESTRICT3:
4135 goto redo;
4136 /* XXX: clarify attribute handling */
4137 case TOK_ATTRIBUTE1:
4138 case TOK_ATTRIBUTE2:
4139 parse_attribute(ad);
4140 break;
4142 mk_pointer(type);
4143 type->t |= qualifiers;
4146 /* recursive type */
4147 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4148 type1.t = 0; /* XXX: same as int */
4149 if (tok == '(') {
4150 next();
4151 /* XXX: this is not correct to modify 'ad' at this point, but
4152 the syntax is not clear */
4153 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4154 parse_attribute(ad);
4155 type_decl(&type1, ad, v, td);
4156 skip(')');
4157 } else {
4158 /* type identifier */
4159 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4160 *v = tok;
4161 next();
4162 } else {
4163 if (!(td & TYPE_ABSTRACT))
4164 expect("identifier");
4165 *v = 0;
4168 storage = type->t & VT_STORAGE;
4169 type->t &= ~VT_STORAGE;
4170 post_type(type, ad, storage);
4171 type->t |= storage;
4172 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4173 parse_attribute(ad);
4175 if (!type1.t)
4176 return;
4177 /* append type at the end of type1 */
4178 type2 = &type1;
4179 for(;;) {
4180 s = type2->ref;
4181 type2 = &s->type;
4182 if (!type2->t) {
4183 *type2 = *type;
4184 break;
4187 *type = type1;
4188 type->t |= storage;
4191 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4192 ST_FUNC int lvalue_type(int t)
4194 int bt, r;
4195 r = VT_LVAL;
4196 bt = t & VT_BTYPE;
4197 if (bt == VT_BYTE || bt == VT_BOOL)
4198 r |= VT_LVAL_BYTE;
4199 else if (bt == VT_SHORT)
4200 r |= VT_LVAL_SHORT;
4201 else
4202 return r;
4203 if (t & VT_UNSIGNED)
4204 r |= VT_LVAL_UNSIGNED;
4205 return r;
4208 /* indirection with full error checking and bound check */
4209 ST_FUNC void indir(void)
4211 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4212 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4213 return;
4214 expect("pointer");
4216 if (vtop->r & VT_LVAL)
4217 gv(RC_INT);
4218 vtop->type = *pointed_type(&vtop->type);
4219 /* Arrays and functions are never lvalues */
4220 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4221 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4222 vtop->r |= lvalue_type(vtop->type.t);
4223 /* if bound checking, the referenced pointer must be checked */
4224 #ifdef CONFIG_TCC_BCHECK
4225 if (tcc_state->do_bounds_check)
4226 vtop->r |= VT_MUSTBOUND;
4227 #endif
4231 /* pass a parameter to a function and do type checking and casting */
4232 static void gfunc_param_typed(Sym *func, Sym *arg)
4234 int func_type;
4235 CType type;
4237 func_type = func->c;
4238 if (func_type == FUNC_OLD ||
4239 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4240 /* default casting : only need to convert float to double */
4241 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4242 type.t = VT_DOUBLE;
4243 gen_cast(&type);
4244 } else if (vtop->type.t & VT_BITFIELD) {
4245 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4246 type.ref = vtop->type.ref;
4247 gen_cast(&type);
4249 } else if (arg == NULL) {
4250 tcc_error("too many arguments to function");
4251 } else {
4252 type = arg->type;
4253 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4254 gen_assign_cast(&type);
4258 /* parse an expression of the form '(type)' or '(expr)' and return its
4259 type */
4260 static void parse_expr_type(CType *type)
4262 int n;
4263 AttributeDef ad;
4265 skip('(');
4266 if (parse_btype(type, &ad)) {
4267 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4268 } else {
4269 expr_type(type);
4271 skip(')');
4274 static void parse_type(CType *type)
4276 AttributeDef ad;
4277 int n;
4279 if (!parse_btype(type, &ad)) {
4280 expect("type");
4282 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4285 static void vpush_tokc(int t)
4287 CType type;
4288 type.t = t;
4289 type.ref = 0;
4290 vsetc(&type, VT_CONST, &tokc);
4293 ST_FUNC void unary(void)
4295 int n, t, align, size, r, sizeof_caller;
4296 CType type;
4297 Sym *s;
4298 AttributeDef ad;
4300 sizeof_caller = in_sizeof;
4301 in_sizeof = 0;
4302 /* XXX: GCC 2.95.3 does not generate a table although it should be
4303 better here */
4304 tok_next:
4305 switch(tok) {
4306 case TOK_EXTENSION:
4307 next();
4308 goto tok_next;
4309 case TOK_CINT:
4310 case TOK_CCHAR:
4311 case TOK_LCHAR:
4312 vpushi(tokc.i);
4313 next();
4314 break;
4315 case TOK_CUINT:
4316 vpush_tokc(VT_INT | VT_UNSIGNED);
4317 next();
4318 break;
4319 case TOK_CLLONG:
4320 vpush_tokc(VT_LLONG);
4321 next();
4322 break;
4323 case TOK_CULLONG:
4324 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4325 next();
4326 break;
4327 case TOK_CFLOAT:
4328 vpush_tokc(VT_FLOAT);
4329 next();
4330 break;
4331 case TOK_CDOUBLE:
4332 vpush_tokc(VT_DOUBLE);
4333 next();
4334 break;
4335 case TOK_CLDOUBLE:
4336 vpush_tokc(VT_LDOUBLE);
4337 next();
4338 break;
4339 case TOK___FUNCTION__:
4340 if (!gnu_ext)
4341 goto tok_identifier;
4342 /* fall thru */
4343 case TOK___FUNC__:
4345 void *ptr;
4346 int len;
4347 /* special function name identifier */
4348 len = strlen(funcname) + 1;
4349 /* generate char[len] type */
4350 type.t = VT_BYTE;
4351 mk_pointer(&type);
4352 type.t |= VT_ARRAY;
4353 type.ref->c = len;
4354 vpush_ref(&type, data_section, data_section->data_offset, len);
4355 ptr = section_ptr_add(data_section, len);
4356 memcpy(ptr, funcname, len);
4357 next();
4359 break;
4360 case TOK_LSTR:
4361 #ifdef TCC_TARGET_PE
4362 t = VT_SHORT | VT_UNSIGNED;
4363 #else
4364 t = VT_INT;
4365 #endif
4366 goto str_init;
4367 case TOK_STR:
4368 /* string parsing */
4369 t = VT_BYTE;
4370 str_init:
4371 if (tcc_state->warn_write_strings)
4372 t |= VT_CONSTANT;
4373 type.t = t;
4374 mk_pointer(&type);
4375 type.t |= VT_ARRAY;
4376 memset(&ad, 0, sizeof(AttributeDef));
4377 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4378 break;
4379 case '(':
4380 next();
4381 /* cast ? */
4382 if (parse_btype(&type, &ad)) {
4383 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4384 skip(')');
4385 /* check ISOC99 compound literal */
4386 if (tok == '{') {
4387 /* data is allocated locally by default */
4388 if (global_expr)
4389 r = VT_CONST;
4390 else
4391 r = VT_LOCAL;
4392 /* all except arrays are lvalues */
4393 if (!(type.t & VT_ARRAY))
4394 r |= lvalue_type(type.t);
4395 memset(&ad, 0, sizeof(AttributeDef));
4396 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4397 } else {
4398 if (sizeof_caller) {
4399 vpush(&type);
4400 return;
4402 unary();
4403 gen_cast(&type);
4405 } else if (tok == '{') {
4406 int saved_nocode_wanted = nocode_wanted;
4407 if (const_wanted)
4408 tcc_error("expected constant");
4409 /* save all registers */
4410 save_regs(0);
4411 /* statement expression : we do not accept break/continue
4412 inside as GCC does. We do retain the nocode_wanted state,
4413 as statement expressions can't ever be entered from the
4414 outside, so any reactivation of code emission (from labels
4415 or loop heads) can be disabled again after the end of it. */
4416 block(NULL, NULL, 1);
4417 nocode_wanted = saved_nocode_wanted;
4418 skip(')');
4419 } else {
4420 gexpr();
4421 skip(')');
4423 break;
4424 case '*':
4425 next();
4426 unary();
4427 indir();
4428 break;
4429 case '&':
4430 next();
4431 unary();
4432 /* functions names must be treated as function pointers,
4433 except for unary '&' and sizeof. Since we consider that
4434 functions are not lvalues, we only have to handle it
4435 there and in function calls. */
4436 /* arrays can also be used although they are not lvalues */
4437 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4438 !(vtop->type.t & VT_ARRAY))
4439 test_lvalue();
4440 mk_pointer(&vtop->type);
4441 gaddrof();
4442 break;
4443 case '!':
4444 next();
4445 unary();
4446 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4447 CType boolean;
4448 boolean.t = VT_BOOL;
4449 gen_cast(&boolean);
4450 vtop->c.i = !vtop->c.i;
4451 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4452 vtop->c.i ^= 1;
4453 else {
4454 save_regs(1);
4455 vseti(VT_JMP, gvtst(1, 0));
4457 break;
4458 case '~':
4459 next();
4460 unary();
4461 vpushi(-1);
4462 gen_op('^');
4463 break;
4464 case '+':
4465 next();
4466 unary();
4467 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4468 tcc_error("pointer not accepted for unary plus");
4469 /* In order to force cast, we add zero, except for floating point
4470 where we really need an noop (otherwise -0.0 will be transformed
4471 into +0.0). */
4472 if (!is_float(vtop->type.t)) {
4473 vpushi(0);
4474 gen_op('+');
4476 break;
4477 case TOK_SIZEOF:
4478 case TOK_ALIGNOF1:
4479 case TOK_ALIGNOF2:
4480 t = tok;
4481 next();
4482 in_sizeof++;
4483 unary_type(&type); // Perform a in_sizeof = 0;
4484 size = type_size(&type, &align);
4485 if (t == TOK_SIZEOF) {
4486 if (!(type.t & VT_VLA)) {
4487 if (size < 0)
4488 tcc_error("sizeof applied to an incomplete type");
4489 vpushs(size);
4490 } else {
4491 vla_runtime_type_size(&type, &align);
4493 } else {
4494 vpushs(align);
4496 vtop->type.t |= VT_UNSIGNED;
4497 break;
4499 case TOK_builtin_expect:
4501 /* __builtin_expect is a no-op for now */
4502 next();
4503 skip('(');
4504 expr_eq();
4505 skip(',');
4506 expr_eq();
4507 vpop();
4508 skip(')');
4510 break;
4511 case TOK_builtin_types_compatible_p:
4513 CType type1, type2;
4514 next();
4515 skip('(');
4516 parse_type(&type1);
4517 skip(',');
4518 parse_type(&type2);
4519 skip(')');
4520 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4521 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4522 vpushi(is_compatible_types(&type1, &type2));
4524 break;
4525 case TOK_builtin_choose_expr:
4527 int64_t c;
4528 next();
4529 skip('(');
4530 c = expr_const64();
4531 skip(',');
4532 if (!c) {
4533 nocode_wanted++;
4535 expr_eq();
4536 if (!c) {
4537 vpop();
4538 nocode_wanted--;
4540 skip(',');
4541 if (c) {
4542 nocode_wanted++;
4544 expr_eq();
4545 if (c) {
4546 vpop();
4547 nocode_wanted--;
4549 skip(')');
4551 break;
4552 case TOK_builtin_constant_p:
4554 int res;
4555 next();
4556 skip('(');
4557 nocode_wanted++;
4558 expr_eq();
4559 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4560 vpop();
4561 nocode_wanted--;
4562 skip(')');
4563 vpushi(res);
4565 break;
4566 case TOK_builtin_frame_address:
4567 case TOK_builtin_return_address:
4569 int tok1 = tok;
4570 int level;
4571 CType type;
4572 next();
4573 skip('(');
4574 if (tok != TOK_CINT) {
4575 tcc_error("%s only takes positive integers",
4576 tok1 == TOK_builtin_return_address ?
4577 "__builtin_return_address" :
4578 "__builtin_frame_address");
4580 level = (uint32_t)tokc.i;
4581 next();
4582 skip(')');
4583 type.t = VT_VOID;
4584 mk_pointer(&type);
4585 vset(&type, VT_LOCAL, 0); /* local frame */
4586 while (level--) {
4587 mk_pointer(&vtop->type);
4588 indir(); /* -> parent frame */
4590 if (tok1 == TOK_builtin_return_address) {
4591 // assume return address is just above frame pointer on stack
4592 vpushi(PTR_SIZE);
4593 gen_op('+');
4594 mk_pointer(&vtop->type);
4595 indir();
4598 break;
4599 #ifdef TCC_TARGET_X86_64
4600 #ifdef TCC_TARGET_PE
4601 case TOK_builtin_va_start:
4603 next();
4604 skip('(');
4605 expr_eq();
4606 skip(',');
4607 expr_eq();
4608 skip(')');
4609 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4610 tcc_error("__builtin_va_start expects a local variable");
4611 vtop->r &= ~VT_LVAL;
4612 vtop->type = char_pointer_type;
4613 vtop->c.i += 8;
4614 vstore();
4616 break;
4617 #else
4618 case TOK_builtin_va_arg_types:
4620 CType type;
4621 next();
4622 skip('(');
4623 parse_type(&type);
4624 skip(')');
4625 vpushi(classify_x86_64_va_arg(&type));
4627 break;
4628 #endif
4629 #endif
4631 #ifdef TCC_TARGET_ARM64
4632 case TOK___va_start: {
4633 next();
4634 skip('(');
4635 expr_eq();
4636 skip(',');
4637 expr_eq();
4638 skip(')');
4639 //xx check types
4640 gen_va_start();
4641 vpushi(0);
4642 vtop->type.t = VT_VOID;
4643 break;
4645 case TOK___va_arg: {
4646 CType type;
4647 next();
4648 skip('(');
4649 expr_eq();
4650 skip(',');
4651 parse_type(&type);
4652 skip(')');
4653 //xx check types
4654 gen_va_arg(&type);
4655 vtop->type = type;
4656 break;
4658 case TOK___arm64_clear_cache: {
4659 next();
4660 skip('(');
4661 expr_eq();
4662 skip(',');
4663 expr_eq();
4664 skip(')');
4665 gen_clear_cache();
4666 vpushi(0);
4667 vtop->type.t = VT_VOID;
4668 break;
4670 #endif
4671 /* pre operations */
4672 case TOK_INC:
4673 case TOK_DEC:
4674 t = tok;
4675 next();
4676 unary();
4677 inc(0, t);
4678 break;
4679 case '-':
4680 next();
4681 unary();
4682 t = vtop->type.t & VT_BTYPE;
4683 if (is_float(t)) {
4684 /* In IEEE negate(x) isn't subtract(0,x), but rather
4685 subtract(-0, x). */
4686 vpush(&vtop->type);
4687 if (t == VT_FLOAT)
4688 vtop->c.f = -1.0 * 0.0;
4689 else if (t == VT_DOUBLE)
4690 vtop->c.d = -1.0 * 0.0;
4691 else
4692 vtop->c.ld = -1.0 * 0.0;
4693 } else
4694 vpushi(0);
4695 vswap();
4696 gen_op('-');
4697 break;
4698 case TOK_LAND:
4699 if (!gnu_ext)
4700 goto tok_identifier;
4701 next();
4702 /* allow to take the address of a label */
4703 if (tok < TOK_UIDENT)
4704 expect("label identifier");
4705 s = label_find(tok);
4706 if (!s) {
4707 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4708 } else {
4709 if (s->r == LABEL_DECLARED)
4710 s->r = LABEL_FORWARD;
4712 if (!s->type.t) {
4713 s->type.t = VT_VOID;
4714 mk_pointer(&s->type);
4715 s->type.t |= VT_STATIC;
4717 vpushsym(&s->type, s);
4718 next();
4719 break;
4721 // special qnan , snan and infinity values
4722 case TOK___NAN__:
4723 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4724 next();
4725 break;
4726 case TOK___SNAN__:
4727 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4728 next();
4729 break;
4730 case TOK___INF__:
4731 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4732 next();
4733 break;
4735 default:
4736 tok_identifier:
4737 t = tok;
4738 next();
4739 if (t < TOK_UIDENT)
4740 expect("identifier");
4741 s = sym_find(t);
4742 if (!s) {
4743 const char *name = get_tok_str(t, NULL);
4744 if (tok != '(')
4745 tcc_error("'%s' undeclared", name);
4746 /* for simple function calls, we tolerate undeclared
4747 external reference to int() function */
4748 if (tcc_state->warn_implicit_function_declaration
4749 #ifdef TCC_TARGET_PE
4750 /* people must be warned about using undeclared WINAPI functions
4751 (which usually start with uppercase letter) */
4752 || (name[0] >= 'A' && name[0] <= 'Z')
4753 #endif
4755 tcc_warning("implicit declaration of function '%s'", name);
4756 s = external_global_sym(t, &func_old_type, 0);
4759 r = s->r;
4760 /* A symbol that has a register is a local register variable,
4761 which starts out as VT_LOCAL value. */
4762 if ((r & VT_VALMASK) < VT_CONST)
4763 r = (r & ~VT_VALMASK) | VT_LOCAL;
4765 vset(&s->type, r, s->c);
4766 /* Point to s as backpointer (even without r&VT_SYM).
4767 Will be used by at least the x86 inline asm parser for
4768 regvars. */
4769 vtop->sym = s;
4770 if (vtop->r & VT_SYM) {
4771 vtop->c.i = 0;
4773 break;
4776 /* post operations */
4777 while (1) {
4778 if (tok == TOK_INC || tok == TOK_DEC) {
4779 inc(1, tok);
4780 next();
4781 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4782 int qualifiers;
4783 /* field */
4784 if (tok == TOK_ARROW)
4785 indir();
4786 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4787 test_lvalue();
4788 gaddrof();
4789 /* expect pointer on structure */
4790 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4791 expect("struct or union");
4792 if (tok == TOK_CDOUBLE)
4793 expect("field name");
4794 next();
4795 if (tok == TOK_CINT || tok == TOK_CUINT)
4796 expect("field name");
4797 s = find_field(&vtop->type, tok);
4798 if (!s)
4799 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4800 /* add field offset to pointer */
4801 vtop->type = char_pointer_type; /* change type to 'char *' */
4802 vpushi(s->c);
4803 gen_op('+');
4804 /* change type to field type, and set to lvalue */
4805 vtop->type = s->type;
4806 vtop->type.t |= qualifiers;
4807 /* an array is never an lvalue */
4808 if (!(vtop->type.t & VT_ARRAY)) {
4809 vtop->r |= lvalue_type(vtop->type.t);
4810 #ifdef CONFIG_TCC_BCHECK
4811 /* if bound checking, the referenced pointer must be checked */
4812 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4813 vtop->r |= VT_MUSTBOUND;
4814 #endif
4816 next();
4817 } else if (tok == '[') {
4818 next();
4819 gexpr();
4820 gen_op('+');
4821 indir();
4822 skip(']');
4823 } else if (tok == '(') {
4824 SValue ret;
4825 Sym *sa;
4826 int nb_args, ret_nregs, ret_align, regsize, variadic;
4828 /* function call */
4829 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4830 /* pointer test (no array accepted) */
4831 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4832 vtop->type = *pointed_type(&vtop->type);
4833 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4834 goto error_func;
4835 } else {
4836 error_func:
4837 expect("function pointer");
4839 } else {
4840 vtop->r &= ~VT_LVAL; /* no lvalue */
4842 /* get return type */
4843 s = vtop->type.ref;
4844 next();
4845 sa = s->next; /* first parameter */
4846 nb_args = regsize = 0;
4847 ret.r2 = VT_CONST;
4848 /* compute first implicit argument if a structure is returned */
4849 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4850 variadic = (s->c == FUNC_ELLIPSIS);
4851 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4852 &ret_align, &regsize);
4853 if (!ret_nregs) {
4854 /* get some space for the returned structure */
4855 size = type_size(&s->type, &align);
4856 #ifdef TCC_TARGET_ARM64
4857 /* On arm64, a small struct is return in registers.
4858 It is much easier to write it to memory if we know
4859 that we are allowed to write some extra bytes, so
4860 round the allocated space up to a power of 2: */
4861 if (size < 16)
4862 while (size & (size - 1))
4863 size = (size | (size - 1)) + 1;
4864 #endif
4865 loc = (loc - size) & -align;
4866 ret.type = s->type;
4867 ret.r = VT_LOCAL | VT_LVAL;
4868 /* pass it as 'int' to avoid structure arg passing
4869 problems */
4870 vseti(VT_LOCAL, loc);
4871 ret.c = vtop->c;
4872 nb_args++;
4874 } else {
4875 ret_nregs = 1;
4876 ret.type = s->type;
4879 if (ret_nregs) {
4880 /* return in register */
4881 if (is_float(ret.type.t)) {
4882 ret.r = reg_fret(ret.type.t);
4883 #ifdef TCC_TARGET_X86_64
4884 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4885 ret.r2 = REG_QRET;
4886 #endif
4887 } else {
4888 #ifndef TCC_TARGET_ARM64
4889 #ifdef TCC_TARGET_X86_64
4890 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4891 #else
4892 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4893 #endif
4894 ret.r2 = REG_LRET;
4895 #endif
4896 ret.r = REG_IRET;
4898 ret.c.i = 0;
4900 if (tok != ')') {
4901 for(;;) {
4902 expr_eq();
4903 gfunc_param_typed(s, sa);
4904 nb_args++;
4905 if (sa)
4906 sa = sa->next;
4907 if (tok == ')')
4908 break;
4909 skip(',');
4912 if (sa)
4913 tcc_error("too few arguments to function");
4914 skip(')');
4915 gfunc_call(nb_args);
4917 /* return value */
4918 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4919 vsetc(&ret.type, r, &ret.c);
4920 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4923 /* handle packed struct return */
4924 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4925 int addr, offset;
4927 size = type_size(&s->type, &align);
4928 /* We're writing whole regs often, make sure there's enough
4929 space. Assume register size is power of 2. */
4930 if (regsize > align)
4931 align = regsize;
4932 loc = (loc - size) & -align;
4933 addr = loc;
4934 offset = 0;
4935 for (;;) {
4936 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4937 vswap();
4938 vstore();
4939 vtop--;
4940 if (--ret_nregs == 0)
4941 break;
4942 offset += regsize;
4944 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4946 } else {
4947 break;
4952 ST_FUNC void expr_prod(void)
4954 int t;
4956 unary();
4957 while (tok == '*' || tok == '/' || tok == '%') {
4958 t = tok;
4959 next();
4960 unary();
4961 gen_op(t);
4965 ST_FUNC void expr_sum(void)
4967 int t;
4969 expr_prod();
4970 while (tok == '+' || tok == '-') {
4971 t = tok;
4972 next();
4973 expr_prod();
4974 gen_op(t);
4978 static void expr_shift(void)
4980 int t;
4982 expr_sum();
4983 while (tok == TOK_SHL || tok == TOK_SAR) {
4984 t = tok;
4985 next();
4986 expr_sum();
4987 gen_op(t);
4991 static void expr_cmp(void)
4993 int t;
4995 expr_shift();
4996 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4997 tok == TOK_ULT || tok == TOK_UGE) {
4998 t = tok;
4999 next();
5000 expr_shift();
5001 gen_op(t);
5005 static void expr_cmpeq(void)
5007 int t;
5009 expr_cmp();
5010 while (tok == TOK_EQ || tok == TOK_NE) {
5011 t = tok;
5012 next();
5013 expr_cmp();
5014 gen_op(t);
5018 static void expr_and(void)
5020 expr_cmpeq();
5021 while (tok == '&') {
5022 next();
5023 expr_cmpeq();
5024 gen_op('&');
5028 static void expr_xor(void)
5030 expr_and();
5031 while (tok == '^') {
5032 next();
5033 expr_and();
5034 gen_op('^');
5038 static void expr_or(void)
5040 expr_xor();
5041 while (tok == '|') {
5042 next();
5043 expr_xor();
5044 gen_op('|');
5048 static void expr_land(void)
5050 expr_or();
5051 if (tok == TOK_LAND) {
5052 int t = 0;
5053 for(;;) {
5054 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5055 CType ctb;
5056 ctb.t = VT_BOOL;
5057 gen_cast(&ctb);
5058 if (vtop->c.i) {
5059 vpop();
5060 } else {
5061 nocode_wanted++;
5062 while (tok == TOK_LAND) {
5063 next();
5064 expr_or();
5065 vpop();
5067 nocode_wanted--;
5068 if (t)
5069 gsym(t);
5070 gen_cast(&int_type);
5071 break;
5073 } else {
5074 if (!t)
5075 save_regs(1);
5076 t = gvtst(1, t);
5078 if (tok != TOK_LAND) {
5079 if (t)
5080 vseti(VT_JMPI, t);
5081 else
5082 vpushi(1);
5083 break;
5085 next();
5086 expr_or();
5091 static void expr_lor(void)
5093 expr_land();
5094 if (tok == TOK_LOR) {
5095 int t = 0;
5096 for(;;) {
5097 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5098 CType ctb;
5099 ctb.t = VT_BOOL;
5100 gen_cast(&ctb);
5101 if (!vtop->c.i) {
5102 vpop();
5103 } else {
5104 nocode_wanted++;
5105 while (tok == TOK_LOR) {
5106 next();
5107 expr_land();
5108 vpop();
5110 nocode_wanted--;
5111 if (t)
5112 gsym(t);
5113 gen_cast(&int_type);
5114 break;
5116 } else {
5117 if (!t)
5118 save_regs(1);
5119 t = gvtst(0, t);
5121 if (tok != TOK_LOR) {
5122 if (t)
5123 vseti(VT_JMP, t);
5124 else
5125 vpushi(0);
5126 break;
5128 next();
5129 expr_land();
5134 /* Assuming vtop is a value used in a conditional context
5135 (i.e. compared with zero) return 0 if it's false, 1 if
5136 true and -1 if it can't be statically determined. */
5137 static int condition_3way(void)
5139 int c = -1;
5140 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5141 (!(vtop->r & VT_SYM) ||
5142 !(vtop->sym->type.t & VT_WEAK))) {
5143 CType boolean;
5144 boolean.t = VT_BOOL;
5145 vdup();
5146 gen_cast(&boolean);
5147 c = vtop->c.i;
5148 vpop();
5150 return c;
5153 static void expr_cond(void)
5155 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5156 SValue sv;
5157 CType type, type1, type2;
5159 expr_lor();
5160 if (tok == '?') {
5161 next();
5162 c = condition_3way();
5163 g = (tok == ':' && gnu_ext);
5164 if (c < 0) {
5165 /* needed to avoid having different registers saved in
5166 each branch */
5167 if (is_float(vtop->type.t)) {
5168 rc = RC_FLOAT;
5169 #ifdef TCC_TARGET_X86_64
5170 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5171 rc = RC_ST0;
5173 #endif
5174 } else
5175 rc = RC_INT;
5176 gv(rc);
5177 save_regs(1);
5178 if (g)
5179 gv_dup();
5180 tt = gvtst(1, 0);
5182 } else {
5183 if (!g)
5184 vpop();
5185 tt = 0;
5188 if (1) {
5189 if (c == 0)
5190 nocode_wanted++;
5191 if (!g)
5192 gexpr();
5194 type1 = vtop->type;
5195 sv = *vtop; /* save value to handle it later */
5196 vtop--; /* no vpop so that FP stack is not flushed */
5197 skip(':');
5199 u = 0;
5200 if (c < 0)
5201 u = gjmp(0);
5202 gsym(tt);
5204 if (c == 0)
5205 nocode_wanted--;
5206 if (c == 1)
5207 nocode_wanted++;
5208 expr_cond();
5209 if (c == 1)
5210 nocode_wanted--;
5212 type2 = vtop->type;
5213 t1 = type1.t;
5214 bt1 = t1 & VT_BTYPE;
5215 t2 = type2.t;
5216 bt2 = t2 & VT_BTYPE;
5217 /* cast operands to correct type according to ISOC rules */
5218 if (is_float(bt1) || is_float(bt2)) {
5219 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5220 type.t = VT_LDOUBLE;
5222 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5223 type.t = VT_DOUBLE;
5224 } else {
5225 type.t = VT_FLOAT;
5227 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5228 /* cast to biggest op */
5229 type.t = VT_LLONG;
5230 /* convert to unsigned if it does not fit in a long long */
5231 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5232 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5233 type.t |= VT_UNSIGNED;
5234 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5235 /* If one is a null ptr constant the result type
5236 is the other. */
5237 if (is_null_pointer (vtop))
5238 type = type1;
5239 else if (is_null_pointer (&sv))
5240 type = type2;
5241 /* XXX: test pointer compatibility, C99 has more elaborate
5242 rules here. */
5243 else
5244 type = type1;
5245 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5246 /* XXX: test function pointer compatibility */
5247 type = bt1 == VT_FUNC ? type1 : type2;
5248 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5249 /* XXX: test structure compatibility */
5250 type = bt1 == VT_STRUCT ? type1 : type2;
5251 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5252 /* NOTE: as an extension, we accept void on only one side */
5253 type.t = VT_VOID;
5254 } else {
5255 /* integer operations */
5256 type.t = VT_INT;
5257 /* convert to unsigned if it does not fit in an integer */
5258 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5259 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5260 type.t |= VT_UNSIGNED;
5262 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5263 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5264 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5265 islv &= c < 0;
5267 /* now we convert second operand */
5268 if (c != 1) {
5269 gen_cast(&type);
5270 if (islv) {
5271 mk_pointer(&vtop->type);
5272 gaddrof();
5273 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5274 gaddrof();
5277 rc = RC_INT;
5278 if (is_float(type.t)) {
5279 rc = RC_FLOAT;
5280 #ifdef TCC_TARGET_X86_64
5281 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5282 rc = RC_ST0;
5284 #endif
5285 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5286 /* for long longs, we use fixed registers to avoid having
5287 to handle a complicated move */
5288 rc = RC_IRET;
5291 tt = r2 = 0;
5292 if (c < 0) {
5293 r2 = gv(rc);
5294 tt = gjmp(0);
5296 gsym(u);
5298 /* this is horrible, but we must also convert first
5299 operand */
5300 if (c != 0) {
5301 *vtop = sv;
5302 gen_cast(&type);
5303 if (islv) {
5304 mk_pointer(&vtop->type);
5305 gaddrof();
5306 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5307 gaddrof();
5310 if (c < 0) {
5311 r1 = gv(rc);
5312 move_reg(r2, r1, type.t);
5313 vtop->r = r2;
5314 gsym(tt);
5315 if (islv)
5316 indir();
5322 static void expr_eq(void)
5324 int t;
5326 expr_cond();
5327 if (tok == '=' ||
5328 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5329 tok == TOK_A_XOR || tok == TOK_A_OR ||
5330 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5331 test_lvalue();
5332 t = tok;
5333 next();
5334 if (t == '=') {
5335 expr_eq();
5336 } else {
5337 vdup();
5338 expr_eq();
5339 gen_op(t & 0x7f);
5341 vstore();
5345 ST_FUNC void gexpr(void)
5347 while (1) {
5348 expr_eq();
5349 if (tok != ',')
5350 break;
5351 vpop();
5352 next();
5356 /* parse an expression and return its type without any side effect. */
5357 static void expr_type(CType *type)
5360 nocode_wanted++;
5361 gexpr();
5362 *type = vtop->type;
5363 vpop();
5364 nocode_wanted--;
5367 /* parse a unary expression and return its type without any side
5368 effect. */
5369 static void unary_type(CType *type)
5371 nocode_wanted++;
5372 unary();
5373 *type = vtop->type;
5374 vpop();
5375 nocode_wanted--;
5378 /* parse a constant expression and return value in vtop. */
5379 static void expr_const1(void)
5381 const_wanted++;
5382 expr_cond();
5383 const_wanted--;
5386 /* parse an integer constant and return its value. */
5387 static inline int64_t expr_const64(void)
5389 int64_t c;
5390 expr_const1();
5391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5392 expect("constant expression");
5393 c = vtop->c.i;
5394 vpop();
5395 return c;
5398 /* parse an integer constant and return its value.
5399 Complain if it doesn't fit 32bit (signed or unsigned). */
5400 ST_FUNC int expr_const(void)
5402 int c;
5403 int64_t wc = expr_const64();
5404 c = wc;
5405 if (c != wc && (unsigned)c != wc)
5406 tcc_error("constant exceeds 32 bit");
5407 return c;
5410 /* return the label token if current token is a label, otherwise
5411 return zero */
5412 static int is_label(void)
5414 int last_tok;
5416 /* fast test first */
5417 if (tok < TOK_UIDENT)
5418 return 0;
5419 /* no need to save tokc because tok is an identifier */
5420 last_tok = tok;
5421 next();
5422 if (tok == ':') {
5423 next();
5424 return last_tok;
5425 } else {
5426 unget_tok(last_tok);
5427 return 0;
5431 static void label_or_decl(int l)
5433 int last_tok;
5435 /* fast test first */
5436 if (tok >= TOK_UIDENT)
5438 /* no need to save tokc because tok is an identifier */
5439 last_tok = tok;
5440 next();
5441 if (tok == ':') {
5442 unget_tok(last_tok);
5443 return;
5445 unget_tok(last_tok);
5447 decl(l);
5450 #ifndef TCC_TARGET_ARM64
5451 static void gfunc_return(CType *func_type)
5453 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5454 CType type, ret_type;
5455 int ret_align, ret_nregs, regsize;
5456 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5457 &ret_align, &regsize);
5458 if (0 == ret_nregs) {
5459 /* if returning structure, must copy it to implicit
5460 first pointer arg location */
5461 type = *func_type;
5462 mk_pointer(&type);
5463 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5464 indir();
5465 vswap();
5466 /* copy structure value to pointer */
5467 vstore();
5468 } else {
5469 /* returning structure packed into registers */
5470 int r, size, addr, align;
5471 size = type_size(func_type,&align);
5472 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5473 (vtop->c.i & (ret_align-1)))
5474 && (align & (ret_align-1))) {
5475 loc = (loc - size) & -ret_align;
5476 addr = loc;
5477 type = *func_type;
5478 vset(&type, VT_LOCAL | VT_LVAL, addr);
5479 vswap();
5480 vstore();
5481 vpop();
5482 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5484 vtop->type = ret_type;
5485 if (is_float(ret_type.t))
5486 r = rc_fret(ret_type.t);
5487 else
5488 r = RC_IRET;
5490 if (ret_nregs == 1)
5491 gv(r);
5492 else {
5493 for (;;) {
5494 vdup();
5495 gv(r);
5496 vpop();
5497 if (--ret_nregs == 0)
5498 break;
5499 /* We assume that when a structure is returned in multiple
5500 registers, their classes are consecutive values of the
5501 suite s(n) = 2^n */
5502 r <<= 1;
5503 vtop->c.i += regsize;
5507 } else if (is_float(func_type->t)) {
5508 gv(rc_fret(func_type->t));
5509 } else {
5510 gv(RC_IRET);
5512 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5514 #endif
5516 static int case_cmp(const void *pa, const void *pb)
5518 int64_t a = (*(struct case_t**) pa)->v1;
5519 int64_t b = (*(struct case_t**) pb)->v1;
5520 return a < b ? -1 : a > b;
5523 static void gcase(struct case_t **base, int len, int *bsym)
5525 struct case_t *p;
5526 int e;
5527 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5528 gv(RC_INT);
5529 while (len > 4) {
5530 /* binary search */
5531 p = base[len/2];
5532 vdup();
5533 if (ll)
5534 vpushll(p->v2);
5535 else
5536 vpushi(p->v2);
5537 gen_op(TOK_LE);
5538 e = gtst(1, 0);
5539 vdup();
5540 if (ll)
5541 vpushll(p->v1);
5542 else
5543 vpushi(p->v1);
5544 gen_op(TOK_GE);
5545 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5546 /* x < v1 */
5547 gcase(base, len/2, bsym);
5548 if (cur_switch->def_sym)
5549 gjmp_addr(cur_switch->def_sym);
5550 else
5551 *bsym = gjmp(*bsym);
5552 /* x > v2 */
5553 gsym(e);
5554 e = len/2 + 1;
5555 base += e; len -= e;
5557 /* linear scan */
5558 while (len--) {
5559 p = *base++;
5560 vdup();
5561 if (ll)
5562 vpushll(p->v2);
5563 else
5564 vpushi(p->v2);
5565 if (p->v1 == p->v2) {
5566 gen_op(TOK_EQ);
5567 gtst_addr(0, p->sym);
5568 } else {
5569 gen_op(TOK_LE);
5570 e = gtst(1, 0);
5571 vdup();
5572 if (ll)
5573 vpushll(p->v1);
5574 else
5575 vpushi(p->v1);
5576 gen_op(TOK_GE);
5577 gtst_addr(0, p->sym);
5578 gsym(e);
5583 static void block(int *bsym, int *csym, int is_expr)
5585 int a, b, c, d, cond;
5586 Sym *s;
5588 /* generate line number info */
5589 if (tcc_state->do_debug)
5590 tcc_debug_line(tcc_state);
5592 if (is_expr) {
5593 /* default return value is (void) */
5594 vpushi(0);
5595 vtop->type.t = VT_VOID;
5598 if (tok == TOK_IF) {
5599 /* if test */
5600 int saved_nocode_wanted = nocode_wanted;
5601 next();
5602 skip('(');
5603 gexpr();
5604 skip(')');
5605 cond = condition_3way();
5606 if (cond == 1)
5607 a = 0, vpop();
5608 else
5609 a = gvtst(1, 0);
5610 if (cond == 0)
5611 nocode_wanted |= 0x20000000;
5612 block(bsym, csym, 0);
5613 if (cond != 1)
5614 nocode_wanted = saved_nocode_wanted;
5615 c = tok;
5616 if (c == TOK_ELSE) {
5617 next();
5618 d = gjmp(0);
5619 gsym(a);
5620 if (cond == 1)
5621 nocode_wanted |= 0x20000000;
5622 block(bsym, csym, 0);
5623 gsym(d); /* patch else jmp */
5624 if (cond != 0)
5625 nocode_wanted = saved_nocode_wanted;
5626 } else
5627 gsym(a);
5628 } else if (tok == TOK_WHILE) {
5629 int saved_nocode_wanted;
5630 nocode_wanted &= ~0x20000000;
5631 next();
5632 d = ind;
5633 vla_sp_restore();
5634 skip('(');
5635 gexpr();
5636 skip(')');
5637 a = gvtst(1, 0);
5638 b = 0;
5639 ++local_scope;
5640 saved_nocode_wanted = nocode_wanted;
5641 block(&a, &b, 0);
5642 nocode_wanted = saved_nocode_wanted;
5643 --local_scope;
5644 gjmp_addr(d);
5645 gsym(a);
5646 gsym_addr(b, d);
5647 } else if (tok == '{') {
5648 Sym *llabel;
5649 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5651 next();
5652 /* record local declaration stack position */
5653 s = local_stack;
5654 llabel = local_label_stack;
5655 ++local_scope;
5657 /* handle local labels declarations */
5658 if (tok == TOK_LABEL) {
5659 next();
5660 for(;;) {
5661 if (tok < TOK_UIDENT)
5662 expect("label identifier");
5663 label_push(&local_label_stack, tok, LABEL_DECLARED);
5664 next();
5665 if (tok == ',') {
5666 next();
5667 } else {
5668 skip(';');
5669 break;
5673 while (tok != '}') {
5674 label_or_decl(VT_LOCAL);
5675 if (tok != '}') {
5676 if (is_expr)
5677 vpop();
5678 block(bsym, csym, is_expr);
5681 /* pop locally defined labels */
5682 label_pop(&local_label_stack, llabel);
5683 /* pop locally defined symbols */
5684 --local_scope;
5685 /* In the is_expr case (a statement expression is finished here),
5686 vtop might refer to symbols on the local_stack. Either via the
5687 type or via vtop->sym. We can't pop those nor any that in turn
5688 might be referred to. To make it easier we don't roll back
5689 any symbols in that case; some upper level call to block() will
5690 do that. We do have to remove such symbols from the lookup
5691 tables, though. sym_pop will do that. */
5692 sym_pop(&local_stack, s, is_expr);
5694 /* Pop VLA frames and restore stack pointer if required */
5695 if (vlas_in_scope > saved_vlas_in_scope) {
5696 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5697 vla_sp_restore();
5699 vlas_in_scope = saved_vlas_in_scope;
5701 next();
5702 } else if (tok == TOK_RETURN) {
5703 next();
5704 if (tok != ';') {
5705 gexpr();
5706 gen_assign_cast(&func_vt);
5707 gfunc_return(&func_vt);
5709 skip(';');
5710 /* jump unless last stmt in top-level block */
5711 if (tok != '}' || local_scope != 1)
5712 rsym = gjmp(rsym);
5713 nocode_wanted |= 0x20000000;
5714 } else if (tok == TOK_BREAK) {
5715 /* compute jump */
5716 if (!bsym)
5717 tcc_error("cannot break");
5718 *bsym = gjmp(*bsym);
5719 next();
5720 skip(';');
5721 nocode_wanted |= 0x20000000;
5722 } else if (tok == TOK_CONTINUE) {
5723 /* compute jump */
5724 if (!csym)
5725 tcc_error("cannot continue");
5726 vla_sp_restore_root();
5727 *csym = gjmp(*csym);
5728 next();
5729 skip(';');
5730 } else if (tok == TOK_FOR) {
5731 int e;
5732 int saved_nocode_wanted;
5733 nocode_wanted &= ~0x20000000;
5734 next();
5735 skip('(');
5736 s = local_stack;
5737 ++local_scope;
5738 if (tok != ';') {
5739 /* c99 for-loop init decl? */
5740 if (!decl0(VT_LOCAL, 1)) {
5741 /* no, regular for-loop init expr */
5742 gexpr();
5743 vpop();
5746 skip(';');
5747 d = ind;
5748 c = ind;
5749 vla_sp_restore();
5750 a = 0;
5751 b = 0;
5752 if (tok != ';') {
5753 gexpr();
5754 a = gvtst(1, 0);
5756 skip(';');
5757 if (tok != ')') {
5758 e = gjmp(0);
5759 c = ind;
5760 vla_sp_restore();
5761 gexpr();
5762 vpop();
5763 gjmp_addr(d);
5764 gsym(e);
5766 skip(')');
5767 saved_nocode_wanted = nocode_wanted;
5768 block(&a, &b, 0);
5769 nocode_wanted = saved_nocode_wanted;
5770 gjmp_addr(c);
5771 gsym(a);
5772 gsym_addr(b, c);
5773 --local_scope;
5774 sym_pop(&local_stack, s, 0);
5776 } else
5777 if (tok == TOK_DO) {
5778 int saved_nocode_wanted;
5779 nocode_wanted &= ~0x20000000;
5780 next();
5781 a = 0;
5782 b = 0;
5783 d = ind;
5784 vla_sp_restore();
5785 saved_nocode_wanted = nocode_wanted;
5786 block(&a, &b, 0);
5787 skip(TOK_WHILE);
5788 skip('(');
5789 gsym(b);
5790 gexpr();
5791 c = gvtst(0, 0);
5792 gsym_addr(c, d);
5793 nocode_wanted = saved_nocode_wanted;
5794 skip(')');
5795 gsym(a);
5796 skip(';');
5797 } else
5798 if (tok == TOK_SWITCH) {
5799 struct switch_t *saved, sw;
5800 int saved_nocode_wanted = nocode_wanted;
5801 SValue switchval;
5802 next();
5803 skip('(');
5804 gexpr();
5805 skip(')');
5806 switchval = *vtop--;
5807 a = 0;
5808 b = gjmp(0); /* jump to first case */
5809 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5810 saved = cur_switch;
5811 cur_switch = &sw;
5812 block(&a, csym, 0);
5813 nocode_wanted = saved_nocode_wanted;
5814 a = gjmp(a); /* add implicit break */
5815 /* case lookup */
5816 gsym(b);
5817 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5818 for (b = 1; b < sw.n; b++)
5819 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5820 tcc_error("duplicate case value");
5821 /* Our switch table sorting is signed, so the compared
5822 value needs to be as well when it's 64bit. */
5823 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5824 switchval.type.t &= ~VT_UNSIGNED;
5825 vpushv(&switchval);
5826 gcase(sw.p, sw.n, &a);
5827 vpop();
5828 if (sw.def_sym)
5829 gjmp_addr(sw.def_sym);
5830 dynarray_reset(&sw.p, &sw.n);
5831 cur_switch = saved;
5832 /* break label */
5833 gsym(a);
5834 } else
5835 if (tok == TOK_CASE) {
5836 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5837 if (!cur_switch)
5838 expect("switch");
5839 nocode_wanted &= ~0x20000000;
5840 next();
5841 cr->v1 = cr->v2 = expr_const64();
5842 if (gnu_ext && tok == TOK_DOTS) {
5843 next();
5844 cr->v2 = expr_const64();
5845 if (cr->v2 < cr->v1)
5846 tcc_warning("empty case range");
5848 cr->sym = ind;
5849 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5850 skip(':');
5851 is_expr = 0;
5852 goto block_after_label;
5853 } else
5854 if (tok == TOK_DEFAULT) {
5855 next();
5856 skip(':');
5857 if (!cur_switch)
5858 expect("switch");
5859 if (cur_switch->def_sym)
5860 tcc_error("too many 'default'");
5861 cur_switch->def_sym = ind;
5862 is_expr = 0;
5863 goto block_after_label;
5864 } else
5865 if (tok == TOK_GOTO) {
5866 next();
5867 if (tok == '*' && gnu_ext) {
5868 /* computed goto */
5869 next();
5870 gexpr();
5871 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5872 expect("pointer");
5873 ggoto();
5874 } else if (tok >= TOK_UIDENT) {
5875 s = label_find(tok);
5876 /* put forward definition if needed */
5877 if (!s) {
5878 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5879 } else {
5880 if (s->r == LABEL_DECLARED)
5881 s->r = LABEL_FORWARD;
5883 vla_sp_restore_root();
5884 if (s->r & LABEL_FORWARD)
5885 s->jnext = gjmp(s->jnext);
5886 else
5887 gjmp_addr(s->jnext);
5888 next();
5889 } else {
5890 expect("label identifier");
5892 skip(';');
5893 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5894 asm_instr();
5895 } else {
5896 b = is_label();
5897 if (b) {
5898 /* label case */
5899 s = label_find(b);
5900 if (s) {
5901 if (s->r == LABEL_DEFINED)
5902 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5903 gsym(s->jnext);
5904 s->r = LABEL_DEFINED;
5905 } else {
5906 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5908 s->jnext = ind;
5909 vla_sp_restore();
5910 /* we accept this, but it is a mistake */
5911 block_after_label:
5912 nocode_wanted &= ~0x20000000;
5913 if (tok == '}') {
5914 tcc_warning("deprecated use of label at end of compound statement");
5915 } else {
5916 if (is_expr)
5917 vpop();
5918 block(bsym, csym, is_expr);
5920 } else {
5921 /* expression case */
5922 if (tok != ';') {
5923 if (is_expr) {
5924 vpop();
5925 gexpr();
5926 } else {
5927 gexpr();
5928 vpop();
5931 skip(';');
5936 #define EXPR_CONST 1
5937 #define EXPR_ANY 2
5939 static void parse_init_elem(int expr_type)
5941 int saved_global_expr;
5942 switch(expr_type) {
5943 case EXPR_CONST:
5944 /* compound literals must be allocated globally in this case */
5945 saved_global_expr = global_expr;
5946 global_expr = 1;
5947 expr_const1();
5948 global_expr = saved_global_expr;
5949 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5950 (compound literals). */
5951 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5952 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
5953 || vtop->sym->v < SYM_FIRST_ANOM))
5954 #ifdef TCC_TARGET_PE
5955 || (vtop->type.t & VT_IMPORT)
5956 #endif
5958 tcc_error("initializer element is not constant");
5959 break;
5960 case EXPR_ANY:
5961 expr_eq();
5962 break;
5966 /* t is the array or struct type. c is the array or struct
5967 address. cur_field is the pointer to the current
5968 value, for arrays the 'c' member contains the current start
5969 index and the 'r' contains the end index (in case of range init).
5970 'size_only' is true if only size info is needed (only used
5971 in arrays) */
5972 static void decl_designator(CType *type, Section *sec, unsigned long c,
5973 Sym **cur_field, int size_only)
5975 Sym *s, *f;
5976 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5977 CType type1;
5979 notfirst = 0;
5980 elem_size = 0;
5981 nb_elems = 1;
5982 if (gnu_ext && (l = is_label()) != 0)
5983 goto struct_field;
5984 while (tok == '[' || tok == '.') {
5985 if (tok == '[') {
5986 if (!(type->t & VT_ARRAY))
5987 expect("array type");
5988 s = type->ref;
5989 next();
5990 index = expr_const();
5991 if (index < 0 || (s->c >= 0 && index >= s->c))
5992 tcc_error("invalid index");
5993 if (tok == TOK_DOTS && gnu_ext) {
5994 next();
5995 index_last = expr_const();
5996 if (index_last < 0 ||
5997 (s->c >= 0 && index_last >= s->c) ||
5998 index_last < index)
5999 tcc_error("invalid index");
6000 } else {
6001 index_last = index;
6003 skip(']');
6004 if (!notfirst) {
6005 (*cur_field)->c = index;
6006 (*cur_field)->r = index_last;
6008 type = pointed_type(type);
6009 elem_size = type_size(type, &align);
6010 c += index * elem_size;
6011 /* NOTE: we only support ranges for last designator */
6012 nb_elems = index_last - index + 1;
6013 if (nb_elems != 1) {
6014 notfirst = 1;
6015 break;
6017 } else {
6018 next();
6019 l = tok;
6020 next();
6021 struct_field:
6022 if ((type->t & VT_BTYPE) != VT_STRUCT)
6023 expect("struct/union type");
6024 f = find_field(type, l);
6025 if (!f)
6026 expect("field");
6027 if (!notfirst)
6028 *cur_field = f;
6029 /* XXX: fix this mess by using explicit storage field */
6030 type1 = f->type;
6031 type1.t |= (type->t & ~VT_TYPE);
6032 type = &type1;
6033 c += f->c;
6035 notfirst = 1;
6037 if (notfirst) {
6038 if (tok == '=') {
6039 next();
6040 } else {
6041 if (!gnu_ext)
6042 expect("=");
6044 } else {
6045 if (type->t & VT_ARRAY) {
6046 index = (*cur_field)->c;
6047 if (type->ref->c >= 0 && index >= type->ref->c)
6048 tcc_error("index too large");
6049 type = pointed_type(type);
6050 c += index * type_size(type, &align);
6051 } else {
6052 f = *cur_field;
6053 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6054 *cur_field = f = f->next;
6055 if (!f)
6056 tcc_error("too many field init");
6057 /* XXX: fix this mess by using explicit storage field */
6058 type1 = f->type;
6059 type1.t |= (type->t & ~VT_TYPE);
6060 type = &type1;
6061 c += f->c;
6064 decl_initializer(type, sec, c, 0, size_only);
6066 /* XXX: make it more general */
6067 if (!size_only && nb_elems > 1) {
6068 unsigned long c_end;
6069 uint8_t *src, *dst;
6070 int i;
6072 if (!sec) {
6073 vset(type, VT_LOCAL|VT_LVAL, c);
6074 for (i = 1; i < nb_elems; i++) {
6075 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6076 vswap();
6077 vstore();
6079 vpop();
6080 } else {
6081 c_end = c + nb_elems * elem_size;
6082 if (c_end > sec->data_allocated)
6083 section_realloc(sec, c_end);
6084 src = sec->data + c;
6085 dst = src;
6086 for(i = 1; i < nb_elems; i++) {
6087 dst += elem_size;
6088 memcpy(dst, src, elem_size);
6094 /* store a value or an expression directly in global data or in local array */
6095 static void init_putv(CType *type, Section *sec, unsigned long c)
6097 int bt, bit_pos, bit_size;
6098 void *ptr;
6099 unsigned long long bit_mask;
6100 CType dtype;
6102 dtype = *type;
6103 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6105 if (sec) {
6106 int size, align;
6107 /* XXX: not portable */
6108 /* XXX: generate error if incorrect relocation */
6109 gen_assign_cast(&dtype);
6110 bt = type->t & VT_BTYPE;
6111 size = type_size(type, &align);
6112 if (c + size > sec->data_allocated) {
6113 section_realloc(sec, c + size);
6115 ptr = sec->data + c;
6116 /* XXX: make code faster ? */
6117 if (!(type->t & VT_BITFIELD)) {
6118 bit_pos = 0;
6119 bit_size = PTR_SIZE * 8;
6120 bit_mask = -1LL;
6121 } else {
6122 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6123 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6124 bit_mask = (1LL << bit_size) - 1;
6126 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6127 vtop->sym->v >= SYM_FIRST_ANOM &&
6128 /* XXX This rejects compount literals like
6129 '(void *){ptr}'. The problem is that '&sym' is
6130 represented the same way, which would be ruled out
6131 by the SYM_FIRST_ANOM check above, but also '"string"'
6132 in 'char *p = "string"' is represented the same
6133 with the type being VT_PTR and the symbol being an
6134 anonymous one. That is, there's no difference in vtop
6135 between '(void *){x}' and '&(void *){x}'. Ignore
6136 pointer typed entities here. Hopefully no real code
6137 will every use compound literals with scalar type. */
6138 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6139 /* These come from compound literals, memcpy stuff over. */
6140 Section *ssec;
6141 ElfW(Sym) *esym;
6142 ElfW_Rel *rel;
6143 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6144 ssec = tcc_state->sections[esym->st_shndx];
6145 memmove (ptr, ssec->data + esym->st_value, size);
6146 if (ssec->reloc) {
6147 /* We need to copy over all memory contents, and that
6148 includes relocations. Use the fact that relocs are
6149 created it order, so look from the end of relocs
6150 until we hit one before the copied region. */
6151 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6152 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6153 while (num_relocs--) {
6154 rel--;
6155 if (rel->r_offset >= esym->st_value + size)
6156 continue;
6157 if (rel->r_offset < esym->st_value)
6158 break;
6159 /* Note: if the same fields are initialized multiple
6160 times (possible with designators) then we possibly
6161 add multiple relocations for the same offset here.
6162 That would lead to wrong code, the last reloc needs
6163 to win. We clean this up later after the whole
6164 initializer is parsed. */
6165 put_elf_reloca(symtab_section, sec,
6166 c + rel->r_offset - esym->st_value,
6167 ELFW(R_TYPE)(rel->r_info),
6168 ELFW(R_SYM)(rel->r_info),
6169 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6170 rel->r_addend
6171 #else
6173 #endif
6177 } else {
6178 if ((vtop->r & VT_SYM) &&
6179 (bt == VT_BYTE ||
6180 bt == VT_SHORT ||
6181 bt == VT_DOUBLE ||
6182 bt == VT_LDOUBLE ||
6183 #if PTR_SIZE == 8
6184 (bt == VT_LLONG && bit_size != 64) ||
6185 bt == VT_INT
6186 #else
6187 bt == VT_LLONG ||
6188 (bt == VT_INT && bit_size != 32)
6189 #endif
6191 tcc_error("initializer element is not computable at load time");
6192 switch(bt) {
6193 /* XXX: when cross-compiling we assume that each type has the
6194 same representation on host and target, which is likely to
6195 be wrong in the case of long double */
6196 case VT_BOOL:
6197 vtop->c.i = (vtop->c.i != 0);
6198 case VT_BYTE:
6199 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6200 break;
6201 case VT_SHORT:
6202 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6203 break;
6204 case VT_DOUBLE:
6205 *(double *)ptr = vtop->c.d;
6206 break;
6207 case VT_LDOUBLE:
6208 if (sizeof(long double) == LDOUBLE_SIZE)
6209 *(long double *)ptr = vtop->c.ld;
6210 else if (sizeof(double) == LDOUBLE_SIZE)
6211 *(double *)ptr = vtop->c.ld;
6212 else
6213 tcc_error("can't cross compile long double constants");
6214 break;
6215 #if PTR_SIZE != 8
6216 case VT_LLONG:
6217 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6218 break;
6219 #else
6220 case VT_LLONG:
6221 #endif
6222 case VT_PTR:
6224 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6225 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6226 if (vtop->r & VT_SYM)
6227 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6228 else
6229 *(addr_t *)ptr |= val;
6230 #else
6231 if (vtop->r & VT_SYM)
6232 greloc(sec, vtop->sym, c, R_DATA_PTR);
6233 *(addr_t *)ptr |= val;
6234 #endif
6235 break;
6237 default:
6239 int val = (vtop->c.i & bit_mask) << bit_pos;
6240 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6241 if (vtop->r & VT_SYM)
6242 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6243 else
6244 *(int *)ptr |= val;
6245 #else
6246 if (vtop->r & VT_SYM)
6247 greloc(sec, vtop->sym, c, R_DATA_PTR);
6248 *(int *)ptr |= val;
6249 #endif
6250 break;
6254 vtop--;
6255 } else {
6256 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6257 vswap();
6258 vstore();
6259 vpop();
6263 /* put zeros for variable based init */
6264 static void init_putz(Section *sec, unsigned long c, int size)
6266 if (sec) {
6267 /* nothing to do because globals are already set to zero */
6268 } else {
6269 vpush_global_sym(&func_old_type, TOK_memset);
6270 vseti(VT_LOCAL, c);
6271 #ifdef TCC_TARGET_ARM
6272 vpushs(size);
6273 vpushi(0);
6274 #else
6275 vpushi(0);
6276 vpushs(size);
6277 #endif
6278 gfunc_call(3);
6282 /* 't' contains the type and storage info. 'c' is the offset of the
6283 object in section 'sec'. If 'sec' is NULL, it means stack based
6284 allocation. 'first' is true if array '{' must be read (multi
6285 dimension implicit array init handling). 'size_only' is true if
6286 size only evaluation is wanted (only for arrays). */
6287 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6288 int first, int size_only)
6290 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6291 int size1, align1;
6292 int have_elem;
6293 Sym *s, *f;
6294 Sym indexsym;
6295 CType *t1;
6297 /* If we currently are at an '}' or ',' we have read an initializer
6298 element in one of our callers, and not yet consumed it. */
6299 have_elem = tok == '}' || tok == ',';
6300 if (!have_elem && tok != '{' &&
6301 /* In case of strings we have special handling for arrays, so
6302 don't consume them as initializer value (which would commit them
6303 to some anonymous symbol). */
6304 tok != TOK_LSTR && tok != TOK_STR &&
6305 !size_only) {
6306 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6307 have_elem = 1;
6310 if (have_elem &&
6311 !(type->t & VT_ARRAY) &&
6312 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6313 The source type might have VT_CONSTANT set, which is
6314 of course assignable to non-const elements. */
6315 is_compatible_parameter_types(type, &vtop->type)) {
6316 init_putv(type, sec, c);
6317 } else if (type->t & VT_ARRAY) {
6318 s = type->ref;
6319 n = s->c;
6320 array_length = 0;
6321 t1 = pointed_type(type);
6322 size1 = type_size(t1, &align1);
6324 no_oblock = 1;
6325 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6326 tok == '{') {
6327 if (tok != '{')
6328 tcc_error("character array initializer must be a literal,"
6329 " optionally enclosed in braces");
6330 skip('{');
6331 no_oblock = 0;
6334 /* only parse strings here if correct type (otherwise: handle
6335 them as ((w)char *) expressions */
6336 if ((tok == TOK_LSTR &&
6337 #ifdef TCC_TARGET_PE
6338 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6339 #else
6340 (t1->t & VT_BTYPE) == VT_INT
6341 #endif
6342 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6343 while (tok == TOK_STR || tok == TOK_LSTR) {
6344 int cstr_len, ch;
6346 /* compute maximum number of chars wanted */
6347 if (tok == TOK_STR)
6348 cstr_len = tokc.str.size;
6349 else
6350 cstr_len = tokc.str.size / sizeof(nwchar_t);
6351 cstr_len--;
6352 nb = cstr_len;
6353 if (n >= 0 && nb > (n - array_length))
6354 nb = n - array_length;
6355 if (!size_only) {
6356 if (cstr_len > nb)
6357 tcc_warning("initializer-string for array is too long");
6358 /* in order to go faster for common case (char
6359 string in global variable, we handle it
6360 specifically */
6361 if (sec && tok == TOK_STR && size1 == 1) {
6362 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6363 } else {
6364 for(i=0;i<nb;i++) {
6365 if (tok == TOK_STR)
6366 ch = ((unsigned char *)tokc.str.data)[i];
6367 else
6368 ch = ((nwchar_t *)tokc.str.data)[i];
6369 vpushi(ch);
6370 init_putv(t1, sec, c + (array_length + i) * size1);
6374 array_length += nb;
6375 next();
6377 /* only add trailing zero if enough storage (no
6378 warning in this case since it is standard) */
6379 if (n < 0 || array_length < n) {
6380 if (!size_only) {
6381 vpushi(0);
6382 init_putv(t1, sec, c + (array_length * size1));
6384 array_length++;
6386 } else {
6387 indexsym.c = 0;
6388 indexsym.r = 0;
6389 f = &indexsym;
6391 do_init_list:
6392 while (tok != '}' || have_elem) {
6393 decl_designator(type, sec, c, &f, size_only);
6394 have_elem = 0;
6395 index = f->c;
6396 /* must put zero in holes (note that doing it that way
6397 ensures that it even works with designators) */
6398 if (!size_only && array_length < index) {
6399 init_putz(sec, c + array_length * size1,
6400 (index - array_length) * size1);
6402 if (type->t & VT_ARRAY) {
6403 index = indexsym.c = ++indexsym.r;
6404 } else {
6405 index = index + type_size(&f->type, &align1);
6406 if (s->type.t == TOK_UNION)
6407 f = NULL;
6408 else
6409 f = f->next;
6411 if (index > array_length)
6412 array_length = index;
6414 if (type->t & VT_ARRAY) {
6415 /* special test for multi dimensional arrays (may not
6416 be strictly correct if designators are used at the
6417 same time) */
6418 if (no_oblock && index >= n)
6419 break;
6420 } else {
6421 if (no_oblock && f == NULL)
6422 break;
6424 if (tok == '}')
6425 break;
6426 skip(',');
6429 /* put zeros at the end */
6430 if (!size_only && array_length < n) {
6431 init_putz(sec, c + array_length * size1,
6432 (n - array_length) * size1);
6434 if (!no_oblock)
6435 skip('}');
6436 /* patch type size if needed, which happens only for array types */
6437 if (n < 0)
6438 s->c = array_length;
6439 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6440 size1 = 1;
6441 no_oblock = 1;
6442 if (first || tok == '{') {
6443 skip('{');
6444 no_oblock = 0;
6446 s = type->ref;
6447 f = s->next;
6448 array_length = 0;
6449 n = s->c;
6450 goto do_init_list;
6451 } else if (tok == '{') {
6452 next();
6453 decl_initializer(type, sec, c, first, size_only);
6454 skip('}');
6455 } else if (size_only) {
6456 /* If we supported only ISO C we wouldn't have to accept calling
6457 this on anything than an array size_only==1 (and even then
6458 only on the outermost level, so no recursion would be needed),
6459 because initializing a flex array member isn't supported.
6460 But GNU C supports it, so we need to recurse even into
6461 subfields of structs and arrays when size_only is set. */
6462 /* just skip expression */
6463 parlevel = parlevel1 = 0;
6464 while ((parlevel > 0 || parlevel1 > 0 ||
6465 (tok != '}' && tok != ',')) && tok != -1) {
6466 if (tok == '(')
6467 parlevel++;
6468 else if (tok == ')') {
6469 if (parlevel == 0 && parlevel1 == 0)
6470 break;
6471 parlevel--;
6473 else if (tok == '{')
6474 parlevel1++;
6475 else if (tok == '}') {
6476 if (parlevel == 0 && parlevel1 == 0)
6477 break;
6478 parlevel1--;
6480 next();
6482 } else {
6483 if (!have_elem) {
6484 /* This should happen only when we haven't parsed
6485 the init element above for fear of committing a
6486 string constant to memory too early. */
6487 if (tok != TOK_STR && tok != TOK_LSTR)
6488 expect("string constant");
6489 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6491 init_putv(type, sec, c);
6495 /* parse an initializer for type 't' if 'has_init' is non zero, and
6496 allocate space in local or global data space ('r' is either
6497 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6498 variable 'v' of scope 'scope' is declared before initializers
6499 are parsed. If 'v' is zero, then a reference to the new object
6500 is put in the value stack. If 'has_init' is 2, a special parsing
6501 is done to handle string constants. */
6502 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6503 int has_init, int v, int scope)
6505 int size, align, addr, data_offset;
6506 int level;
6507 ParseState saved_parse_state = {0};
6508 TokenString *init_str = NULL;
6509 Section *sec;
6510 Sym *flexible_array;
6512 flexible_array = NULL;
6513 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6514 Sym *field = type->ref->next;
6515 if (field) {
6516 while (field->next)
6517 field = field->next;
6518 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6519 flexible_array = field;
6523 size = type_size(type, &align);
6524 /* If unknown size, we must evaluate it before
6525 evaluating initializers because
6526 initializers can generate global data too
6527 (e.g. string pointers or ISOC99 compound
6528 literals). It also simplifies local
6529 initializers handling */
6530 if (size < 0 || (flexible_array && has_init)) {
6531 if (!has_init)
6532 tcc_error("unknown type size");
6533 /* get all init string */
6534 init_str = tok_str_alloc();
6535 if (has_init == 2) {
6536 /* only get strings */
6537 while (tok == TOK_STR || tok == TOK_LSTR) {
6538 tok_str_add_tok(init_str);
6539 next();
6541 } else {
6542 level = 0;
6543 while (level > 0 || (tok != ',' && tok != ';')) {
6544 if (tok < 0)
6545 tcc_error("unexpected end of file in initializer");
6546 tok_str_add_tok(init_str);
6547 if (tok == '{')
6548 level++;
6549 else if (tok == '}') {
6550 level--;
6551 if (level <= 0) {
6552 next();
6553 break;
6556 next();
6559 tok_str_add(init_str, -1);
6560 tok_str_add(init_str, 0);
6562 /* compute size */
6563 save_parse_state(&saved_parse_state);
6565 begin_macro(init_str, 1);
6566 next();
6567 decl_initializer(type, NULL, 0, 1, 1);
6568 /* prepare second initializer parsing */
6569 macro_ptr = init_str->str;
6570 next();
6572 /* if still unknown size, error */
6573 size = type_size(type, &align);
6574 if (size < 0)
6575 tcc_error("unknown type size");
6577 /* If there's a flex member and it was used in the initializer
6578 adjust size. */
6579 if (flexible_array &&
6580 flexible_array->type.ref->c > 0)
6581 size += flexible_array->type.ref->c
6582 * pointed_size(&flexible_array->type);
6583 /* take into account specified alignment if bigger */
6584 if (ad->a.aligned) {
6585 int speca = 1 << (ad->a.aligned - 1);
6586 if (speca > align)
6587 align = speca;
6588 } else if (ad->a.packed) {
6589 align = 1;
6591 if ((r & VT_VALMASK) == VT_LOCAL) {
6592 sec = NULL;
6593 #ifdef CONFIG_TCC_BCHECK
6594 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6595 loc--;
6597 #endif
6598 loc = (loc - size) & -align;
6599 addr = loc;
6600 #ifdef CONFIG_TCC_BCHECK
6601 /* handles bounds */
6602 /* XXX: currently, since we do only one pass, we cannot track
6603 '&' operators, so we add only arrays */
6604 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6605 addr_t *bounds_ptr;
6606 /* add padding between regions */
6607 loc--;
6608 /* then add local bound info */
6609 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6610 bounds_ptr[0] = addr;
6611 bounds_ptr[1] = size;
6613 #endif
6614 if (v) {
6615 /* local variable */
6616 #ifdef CONFIG_TCC_ASM
6617 if (ad->asm_label) {
6618 int reg = asm_parse_regvar(ad->asm_label);
6619 if (reg >= 0)
6620 r = (r & ~VT_VALMASK) | reg;
6622 #endif
6623 sym_push(v, type, r, addr);
6624 } else {
6625 /* push local reference */
6626 vset(type, r, addr);
6628 } else {
6629 Sym *sym = NULL;
6630 if (v && scope == VT_CONST) {
6631 /* see if the symbol was already defined */
6632 sym = sym_find(v);
6633 if (sym) {
6634 patch_storage(sym, type);
6635 if (sym->type.t & VT_EXTERN) {
6636 /* if the variable is extern, it was not allocated */
6637 sym->type.t &= ~VT_EXTERN;
6638 /* set array size if it was omitted in extern
6639 declaration */
6640 if ((sym->type.t & VT_ARRAY) &&
6641 sym->type.ref->c < 0 &&
6642 type->ref->c >= 0)
6643 sym->type.ref->c = type->ref->c;
6644 } else if (!has_init) {
6645 /* we accept several definitions of the same
6646 global variable. this is tricky, because we
6647 must play with the SHN_COMMON type of the symbol */
6648 /* no init data, we won't add more to the symbol */
6649 update_storage(sym);
6650 goto no_alloc;
6651 } else if (sym->c) {
6652 ElfW(Sym) *esym;
6653 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6654 if (esym->st_shndx == data_section->sh_num)
6655 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6660 /* allocate symbol in corresponding section */
6661 sec = ad->section;
6662 if (!sec) {
6663 if (has_init)
6664 sec = data_section;
6665 else if (tcc_state->nocommon)
6666 sec = bss_section;
6669 if (sec) {
6670 data_offset = sec->data_offset;
6671 data_offset = (data_offset + align - 1) & -align;
6672 addr = data_offset;
6673 /* very important to increment global pointer at this time
6674 because initializers themselves can create new initializers */
6675 data_offset += size;
6676 #ifdef CONFIG_TCC_BCHECK
6677 /* add padding if bound check */
6678 if (tcc_state->do_bounds_check)
6679 data_offset++;
6680 #endif
6681 sec->data_offset = data_offset;
6682 /* allocate section space to put the data */
6683 if (sec->sh_type != SHT_NOBITS &&
6684 data_offset > sec->data_allocated)
6685 section_realloc(sec, data_offset);
6686 /* align section if needed */
6687 if (align > sec->sh_addralign)
6688 sec->sh_addralign = align;
6689 } else {
6690 addr = 0; /* avoid warning */
6693 if (v) {
6694 if (scope != VT_CONST || !sym) {
6695 sym = sym_push(v, type, r | VT_SYM, 0);
6696 sym->asm_label = ad->asm_label;
6698 /* update symbol definition */
6699 if (sec) {
6700 put_extern_sym(sym, sec, addr, size);
6701 } else {
6702 put_extern_sym(sym, SECTION_COMMON, align, size);
6705 } else {
6706 /* push global reference */
6707 sym = get_sym_ref(type, sec, addr, size);
6708 vpushsym(type, sym);
6709 vtop->r |= r;
6712 #ifdef CONFIG_TCC_BCHECK
6713 /* handles bounds now because the symbol must be defined
6714 before for the relocation */
6715 if (tcc_state->do_bounds_check) {
6716 addr_t *bounds_ptr;
6718 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6719 /* then add global bound info */
6720 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6721 bounds_ptr[0] = 0; /* relocated */
6722 bounds_ptr[1] = size;
6724 #endif
6727 if (type->t & VT_VLA) {
6728 int a;
6730 /* save current stack pointer */
6731 if (vlas_in_scope == 0) {
6732 if (vla_sp_root_loc == -1)
6733 vla_sp_root_loc = (loc -= PTR_SIZE);
6734 gen_vla_sp_save(vla_sp_root_loc);
6737 vla_runtime_type_size(type, &a);
6738 gen_vla_alloc(type, a);
6739 gen_vla_sp_save(addr);
6740 vla_sp_loc = addr;
6741 vlas_in_scope++;
6743 } else if (has_init) {
6744 size_t oldreloc_offset = 0;
6745 if (sec && sec->reloc)
6746 oldreloc_offset = sec->reloc->data_offset;
6747 decl_initializer(type, sec, addr, 1, 0);
6748 if (sec && sec->reloc)
6749 squeeze_multi_relocs(sec, oldreloc_offset);
6750 /* patch flexible array member size back to -1, */
6751 /* for possible subsequent similar declarations */
6752 if (flexible_array)
6753 flexible_array->type.ref->c = -1;
6756 no_alloc:
6757 /* restore parse state if needed */
6758 if (init_str) {
6759 end_macro();
6760 restore_parse_state(&saved_parse_state);
6764 /* parse an old style function declaration list */
6765 /* XXX: check multiple parameter */
6766 static void func_decl_list(Sym *func_sym)
6768 AttributeDef ad;
6769 int v;
6770 Sym *s;
6771 CType btype, type;
6773 /* parse each declaration */
6774 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6775 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6776 if (!parse_btype(&btype, &ad))
6777 expect("declaration list");
6778 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6779 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6780 tok == ';') {
6781 /* we accept no variable after */
6782 } else {
6783 for(;;) {
6784 type = btype;
6785 type_decl(&type, &ad, &v, TYPE_DIRECT);
6786 /* find parameter in function parameter list */
6787 s = func_sym->next;
6788 while (s != NULL) {
6789 if ((s->v & ~SYM_FIELD) == v)
6790 goto found;
6791 s = s->next;
6793 tcc_error("declaration for parameter '%s' but no such parameter",
6794 get_tok_str(v, NULL));
6795 found:
6796 /* check that no storage specifier except 'register' was given */
6797 if (type.t & VT_STORAGE)
6798 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6799 convert_parameter_type(&type);
6800 /* we can add the type (NOTE: it could be local to the function) */
6801 s->type = type;
6802 /* accept other parameters */
6803 if (tok == ',')
6804 next();
6805 else
6806 break;
6809 skip(';');
6813 /* parse a function defined by symbol 'sym' and generate its code in
6814 'cur_text_section' */
6815 static void gen_function(Sym *sym)
6817 nocode_wanted = 0;
6818 ind = cur_text_section->data_offset;
6819 /* NOTE: we patch the symbol size later */
6820 put_extern_sym(sym, cur_text_section, ind, 0);
6821 funcname = get_tok_str(sym->v, NULL);
6822 func_ind = ind;
6823 /* Initialize VLA state */
6824 vla_sp_loc = -1;
6825 vla_sp_root_loc = -1;
6826 /* put debug symbol */
6827 tcc_debug_funcstart(tcc_state, sym);
6828 /* push a dummy symbol to enable local sym storage */
6829 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6830 local_scope = 1; /* for function parameters */
6831 gfunc_prolog(&sym->type);
6832 local_scope = 0;
6833 rsym = 0;
6834 block(NULL, NULL, 0);
6835 nocode_wanted = 0;
6836 gsym(rsym);
6837 gfunc_epilog();
6838 cur_text_section->data_offset = ind;
6839 label_pop(&global_label_stack, NULL);
6840 /* reset local stack */
6841 local_scope = 0;
6842 sym_pop(&local_stack, NULL, 0);
6843 /* end of function */
6844 /* patch symbol size */
6845 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6846 ind - func_ind;
6847 tcc_debug_funcend(tcc_state, ind - func_ind);
6848 /* It's better to crash than to generate wrong code */
6849 cur_text_section = NULL;
6850 funcname = ""; /* for safety */
6851 func_vt.t = VT_VOID; /* for safety */
6852 func_var = 0; /* for safety */
6853 ind = 0; /* for safety */
6854 nocode_wanted = 1;
6855 check_vstack();
6858 static void gen_inline_functions(TCCState *s)
6860 Sym *sym;
6861 int inline_generated, i, ln;
6862 struct InlineFunc *fn;
6864 ln = file->line_num;
6865 /* iterate while inline function are referenced */
6866 for(;;) {
6867 inline_generated = 0;
6868 for (i = 0; i < s->nb_inline_fns; ++i) {
6869 fn = s->inline_fns[i];
6870 sym = fn->sym;
6871 if (sym && sym->c) {
6872 /* the function was used: generate its code and
6873 convert it to a normal function */
6874 fn->sym = NULL;
6875 if (file)
6876 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6877 sym->type.t &= ~VT_INLINE;
6879 begin_macro(fn->func_str, 1);
6880 next();
6881 cur_text_section = text_section;
6882 gen_function(sym);
6883 end_macro();
6885 inline_generated = 1;
6888 if (!inline_generated)
6889 break;
6891 file->line_num = ln;
6894 ST_FUNC void free_inline_functions(TCCState *s)
6896 int i;
6897 /* free tokens of unused inline functions */
6898 for (i = 0; i < s->nb_inline_fns; ++i) {
6899 struct InlineFunc *fn = s->inline_fns[i];
6900 if (fn->sym)
6901 tok_str_free(fn->func_str);
6903 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6906 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6907 static int decl0(int l, int is_for_loop_init)
6909 int v, has_init, r;
6910 CType type, btype;
6911 Sym *sym;
6912 AttributeDef ad;
6914 while (1) {
6915 if (!parse_btype(&btype, &ad)) {
6916 if (is_for_loop_init)
6917 return 0;
6918 /* skip redundant ';' */
6919 /* XXX: find more elegant solution */
6920 if (tok == ';') {
6921 next();
6922 continue;
6924 if (l == VT_CONST &&
6925 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6926 /* global asm block */
6927 asm_global_instr();
6928 continue;
6930 /* special test for old K&R protos without explicit int
6931 type. Only accepted when defining global data */
6932 if (l == VT_LOCAL || tok < TOK_UIDENT)
6933 break;
6934 btype.t = VT_INT;
6936 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6937 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6938 tok == ';') {
6939 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6940 int v = btype.ref->v;
6941 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6942 tcc_warning("unnamed struct/union that defines no instances");
6944 next();
6945 continue;
6947 while (1) { /* iterate thru each declaration */
6948 type = btype;
6949 /* If the base type itself was an array type of unspecified
6950 size (like in 'typedef int arr[]; arr x = {1};') then
6951 we will overwrite the unknown size by the real one for
6952 this decl. We need to unshare the ref symbol holding
6953 that size. */
6954 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6955 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6957 type_decl(&type, &ad, &v, TYPE_DIRECT);
6958 #if 0
6960 char buf[500];
6961 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6962 printf("type = '%s'\n", buf);
6964 #endif
6965 if ((type.t & VT_BTYPE) == VT_FUNC) {
6966 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6967 tcc_error("function without file scope cannot be static");
6969 /* if old style function prototype, we accept a
6970 declaration list */
6971 sym = type.ref;
6972 if (sym->c == FUNC_OLD)
6973 func_decl_list(sym);
6976 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6977 ad.asm_label = asm_label_instr();
6978 /* parse one last attribute list, after asm label */
6979 parse_attribute(&ad);
6980 if (tok == '{')
6981 expect(";");
6984 if (ad.a.weak)
6985 type.t |= VT_WEAK;
6986 #ifdef TCC_TARGET_PE
6987 if (ad.a.func_import || ad.a.func_export) {
6988 if (type.t & (VT_STATIC|VT_TYPEDEF))
6989 tcc_error("cannot have dll linkage with static or typedef");
6990 if (ad.a.func_export)
6991 type.t |= VT_EXPORT;
6992 else if ((type.t & VT_BTYPE) != VT_FUNC)
6993 type.t |= VT_IMPORT|VT_EXTERN;
6995 #endif
6996 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6998 if (tok == '{') {
6999 if (l == VT_LOCAL)
7000 tcc_error("cannot use local functions");
7001 if ((type.t & VT_BTYPE) != VT_FUNC)
7002 expect("function definition");
7004 /* reject abstract declarators in function definition */
7005 sym = type.ref;
7006 while ((sym = sym->next) != NULL)
7007 if (!(sym->v & ~SYM_FIELD))
7008 expect("identifier");
7010 /* XXX: cannot do better now: convert extern line to static inline */
7011 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7012 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7014 sym = sym_find(v);
7015 if (sym) {
7016 Sym *ref;
7017 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7018 goto func_error1;
7020 ref = sym->type.ref;
7022 /* use func_call from prototype if not defined */
7023 if (ref->a.func_call != FUNC_CDECL
7024 && type.ref->a.func_call == FUNC_CDECL)
7025 type.ref->a.func_call = ref->a.func_call;
7027 /* use static from prototype */
7028 if (sym->type.t & VT_STATIC)
7029 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7031 /* If the definition has no visibility use the
7032 one from prototype. */
7033 if (! (type.t & VT_VIS_MASK))
7034 type.t |= sym->type.t & VT_VIS_MASK;
7036 /* apply other storage attributes from prototype */
7037 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
7039 if (!is_compatible_types(&sym->type, &type)) {
7040 func_error1:
7041 tcc_error("incompatible types for redefinition of '%s'",
7042 get_tok_str(v, NULL));
7044 if (ref->a.func_body)
7045 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7046 /* if symbol is already defined, then put complete type */
7047 sym->type = type;
7049 } else {
7050 /* put function symbol */
7051 sym = global_identifier_push(v, type.t, 0);
7052 sym->type.ref = type.ref;
7055 sym->type.ref->a.func_body = 1;
7056 sym->r = VT_SYM | VT_CONST;
7058 /* static inline functions are just recorded as a kind
7059 of macro. Their code will be emitted at the end of
7060 the compilation unit only if they are used */
7061 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7062 (VT_INLINE | VT_STATIC)) {
7063 int block_level;
7064 struct InlineFunc *fn;
7065 const char *filename;
7067 filename = file ? file->filename : "";
7068 fn = tcc_malloc(sizeof *fn + strlen(filename));
7069 strcpy(fn->filename, filename);
7070 fn->sym = sym;
7071 fn->func_str = tok_str_alloc();
7073 block_level = 0;
7074 for(;;) {
7075 int t;
7076 if (tok == TOK_EOF)
7077 tcc_error("unexpected end of file");
7078 tok_str_add_tok(fn->func_str);
7079 t = tok;
7080 next();
7081 if (t == '{') {
7082 block_level++;
7083 } else if (t == '}') {
7084 block_level--;
7085 if (block_level == 0)
7086 break;
7089 tok_str_add(fn->func_str, -1);
7090 tok_str_add(fn->func_str, 0);
7091 dynarray_add(&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7093 } else {
7094 /* compute text section */
7095 cur_text_section = ad.section;
7096 if (!cur_text_section)
7097 cur_text_section = text_section;
7098 gen_function(sym);
7100 break;
7101 } else {
7102 if (type.t & VT_TYPEDEF) {
7103 /* save typedefed type */
7104 /* XXX: test storage specifiers ? */
7105 sym = sym_find(v);
7106 if (sym && sym->scope == local_scope) {
7107 if (!is_compatible_types(&sym->type, &type)
7108 || !(sym->type.t & VT_TYPEDEF))
7109 tcc_error("incompatible redefinition of '%s'",
7110 get_tok_str(v, NULL));
7111 sym->type = type;
7112 } else {
7113 sym = sym_push(v, &type, 0, 0);
7115 sym->a = ad.a;
7116 } else {
7117 r = 0;
7118 if ((type.t & VT_BTYPE) == VT_FUNC) {
7119 /* external function definition */
7120 /* specific case for func_call attribute */
7121 type.ref->a = ad.a;
7122 } else if (!(type.t & VT_ARRAY)) {
7123 /* not lvalue if array */
7124 r |= lvalue_type(type.t);
7126 has_init = (tok == '=');
7127 if (has_init && (type.t & VT_VLA))
7128 tcc_error("variable length array cannot be initialized");
7129 if ((type.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7130 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7131 !has_init && l == VT_CONST && type.ref->c < 0)) {
7132 /* external variable or function */
7133 /* NOTE: as GCC, uninitialized global static
7134 arrays of null size are considered as
7135 extern */
7136 sym = external_sym(v, &type, r);
7137 sym->asm_label = ad.asm_label;
7138 if (ad.alias_target) {
7139 Section tsec;
7140 ElfW(Sym) *esym;
7141 Sym *alias_target;
7143 alias_target = sym_find(ad.alias_target);
7144 if (!alias_target || !alias_target->c)
7145 tcc_error("unsupported forward __alias__ attribute");
7146 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7147 tsec.sh_num = esym->st_shndx;
7148 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7150 } else {
7151 if (type.t & VT_STATIC)
7152 r |= VT_CONST;
7153 else
7154 r |= l;
7155 if (has_init)
7156 next();
7157 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7160 if (tok != ',') {
7161 if (is_for_loop_init)
7162 return 1;
7163 skip(';');
7164 break;
7166 next();
7168 ad.a.aligned = 0;
7171 return 0;
7174 ST_FUNC void decl(int l)
7176 decl0(l, 0);
7179 /* ------------------------------------------------------------------------- */