fix __builtin_expect
[tinycc.git] / tccgen.c
blob8aa86f3bbaae2826caf49358ba0c8cd25ac3ce04
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 /* start of translation unit info */
143 ST_FUNC void tcc_debug_start(TCCState *s1)
145 if (s1->do_debug) {
146 char buf[512];
148 /* file info: full path + filename */
149 section_sym = put_elf_sym(symtab_section, 0, 0,
150 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
151 text_section->sh_num, NULL);
152 getcwd(buf, sizeof(buf));
153 #ifdef _WIN32
154 normalize_slashes(buf);
155 #endif
156 pstrcat(buf, sizeof(buf), "/");
157 put_stabs_r(buf, N_SO, 0, 0,
158 text_section->data_offset, text_section, section_sym);
159 put_stabs_r(file->filename, N_SO, 0, 0,
160 text_section->data_offset, text_section, section_sym);
161 last_ind = 0;
162 last_line_num = 0;
165 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
166 symbols can be safely used */
167 put_elf_sym(symtab_section, 0, 0,
168 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
169 SHN_ABS, file->filename);
172 /* put end of translation unit info */
173 ST_FUNC void tcc_debug_end(TCCState *s1)
175 if (!s1->do_debug)
176 return;
177 put_stabs_r(NULL, N_SO, 0, 0,
178 text_section->data_offset, text_section, section_sym);
182 /* generate line number info */
183 ST_FUNC void tcc_debug_line(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 if ((last_line_num != file->line_num || last_ind != ind)) {
188 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
189 last_ind = ind;
190 last_line_num = file->line_num;
194 /* put function symbol */
195 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
197 char buf[512];
199 if (!s1->do_debug)
200 return;
202 /* stabs info */
203 /* XXX: we put here a dummy type */
204 snprintf(buf, sizeof(buf), "%s:%c1",
205 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
206 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
207 cur_text_section, sym->c);
208 /* //gr gdb wants a line at the function */
209 put_stabn(N_SLINE, 0, file->line_num, 0);
211 last_ind = 0;
212 last_line_num = 0;
215 /* put function size */
216 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
218 if (!s1->do_debug)
219 return;
220 put_stabn(N_FUN, 0, 0, size);
223 /* ------------------------------------------------------------------------- */
224 ST_FUNC void tccgen_start(TCCState *s1)
226 cur_text_section = NULL;
227 funcname = "";
228 anon_sym = SYM_FIRST_ANOM;
229 section_sym = 0;
230 const_wanted = 0;
231 nocode_wanted = 1;
233 /* define some often used types */
234 int_type.t = VT_INT;
235 char_pointer_type.t = VT_BYTE;
236 mk_pointer(&char_pointer_type);
237 #if PTR_SIZE == 4
238 size_type.t = VT_INT;
239 #else
240 size_type.t = VT_LLONG;
241 #endif
242 func_old_type.t = VT_FUNC;
243 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
245 tcc_debug_start(s1);
247 #ifdef TCC_TARGET_ARM
248 arm_init(s1);
249 #endif
252 ST_FUNC void tccgen_end(TCCState *s1)
254 gen_inline_functions(s1);
255 check_vstack();
256 /* end of translation unit info */
257 tcc_debug_end(s1);
260 /* ------------------------------------------------------------------------- */
261 /* apply storage attibutes to Elf symbol */
263 static void update_storage(Sym *sym)
265 int t;
266 ElfW(Sym) *esym;
268 if (0 == sym->c)
269 return;
271 t = sym->type.t;
272 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
274 if (t & VT_VIS_MASK)
275 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
276 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
278 if (t & VT_WEAK)
279 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
281 #ifdef TCC_TARGET_PE
282 if (t & VT_EXPORT)
283 esym->st_other |= ST_PE_EXPORT;
284 #endif
287 /* ------------------------------------------------------------------------- */
288 /* update sym->c so that it points to an external symbol in section
289 'section' with value 'value' */
291 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
292 addr_t value, unsigned long size,
293 int can_add_underscore)
295 int sym_type, sym_bind, sh_num, info, other, t;
296 ElfW(Sym) *esym;
297 const char *name;
298 char buf1[256];
299 #ifdef CONFIG_TCC_BCHECK
300 char buf[32];
301 #endif
303 if (section == NULL)
304 sh_num = SHN_UNDEF;
305 else if (section == SECTION_ABS)
306 sh_num = SHN_ABS;
307 else if (section == SECTION_COMMON)
308 sh_num = SHN_COMMON;
309 else
310 sh_num = section->sh_num;
312 if (!sym->c) {
313 name = get_tok_str(sym->v, NULL);
314 #ifdef CONFIG_TCC_BCHECK
315 if (tcc_state->do_bounds_check) {
316 /* XXX: avoid doing that for statics ? */
317 /* if bound checking is activated, we change some function
318 names by adding the "__bound" prefix */
319 switch(sym->v) {
320 #ifdef TCC_TARGET_PE
321 /* XXX: we rely only on malloc hooks */
322 case TOK_malloc:
323 case TOK_free:
324 case TOK_realloc:
325 case TOK_memalign:
326 case TOK_calloc:
327 #endif
328 case TOK_memcpy:
329 case TOK_memmove:
330 case TOK_memset:
331 case TOK_strlen:
332 case TOK_strcpy:
333 case TOK_alloca:
334 strcpy(buf, "__bound_");
335 strcat(buf, name);
336 name = buf;
337 break;
340 #endif
341 t = sym->type.t;
342 if ((t & VT_BTYPE) == VT_FUNC) {
343 sym_type = STT_FUNC;
344 } else if ((t & VT_BTYPE) == VT_VOID) {
345 sym_type = STT_NOTYPE;
346 } else {
347 sym_type = STT_OBJECT;
349 if (t & VT_STATIC)
350 sym_bind = STB_LOCAL;
351 else
352 sym_bind = STB_GLOBAL;
353 other = 0;
354 #ifdef TCC_TARGET_PE
355 if (sym_type == STT_FUNC && sym->type.ref) {
356 Sym *ref = sym->type.ref;
357 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
358 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
359 name = buf1;
360 other |= ST_PE_STDCALL;
361 can_add_underscore = 0;
364 if (t & VT_IMPORT)
365 other |= ST_PE_IMPORT;
366 #endif
367 if (tcc_state->leading_underscore && can_add_underscore) {
368 buf1[0] = '_';
369 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
370 name = buf1;
372 if (sym->asm_label)
373 name = get_tok_str(sym->asm_label, NULL);
374 info = ELFW(ST_INFO)(sym_bind, sym_type);
375 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
376 } else {
377 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
378 esym->st_value = value;
379 esym->st_size = size;
380 esym->st_shndx = sh_num;
382 update_storage(sym);
385 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
386 addr_t value, unsigned long size)
388 put_extern_sym2(sym, section, value, size, 1);
391 /* add a new relocation entry to symbol 'sym' in section 's' */
392 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
393 addr_t addend)
395 int c = 0;
397 if (nocode_wanted && s == cur_text_section)
398 return;
400 if (sym) {
401 if (0 == sym->c)
402 put_extern_sym(sym, NULL, 0, 0);
403 c = sym->c;
406 /* now we can add ELF relocation info */
407 put_elf_reloca(symtab_section, s, offset, type, c, addend);
410 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
412 greloca(s, sym, offset, type, 0);
415 /* ------------------------------------------------------------------------- */
416 /* symbol allocator */
417 static Sym *__sym_malloc(void)
419 Sym *sym_pool, *sym, *last_sym;
420 int i;
422 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
423 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
425 last_sym = sym_free_first;
426 sym = sym_pool;
427 for(i = 0; i < SYM_POOL_NB; i++) {
428 sym->next = last_sym;
429 last_sym = sym;
430 sym++;
432 sym_free_first = last_sym;
433 return last_sym;
436 static inline Sym *sym_malloc(void)
438 Sym *sym;
439 #ifndef SYM_DEBUG
440 sym = sym_free_first;
441 if (!sym)
442 sym = __sym_malloc();
443 sym_free_first = sym->next;
444 return sym;
445 #else
446 sym = tcc_malloc(sizeof(Sym));
447 return sym;
448 #endif
451 ST_INLN void sym_free(Sym *sym)
453 #ifndef SYM_DEBUG
454 sym->next = sym_free_first;
455 sym_free_first = sym;
456 #else
457 tcc_free(sym);
458 #endif
461 /* push, without hashing */
462 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
464 Sym *s;
466 s = sym_malloc();
467 s->scope = 0;
468 s->v = v;
469 s->type.t = t;
470 s->type.ref = NULL;
471 #ifdef _WIN64
472 s->d = NULL;
473 #endif
474 s->c = c;
475 s->next = NULL;
476 /* add in stack */
477 s->prev = *ps;
478 *ps = s;
479 return s;
482 /* find a symbol and return its associated structure. 's' is the top
483 of the symbol stack */
484 ST_FUNC Sym *sym_find2(Sym *s, int v)
486 while (s) {
487 if (s->v == v)
488 return s;
489 else if (s->v == -1)
490 return NULL;
491 s = s->prev;
493 return NULL;
496 /* structure lookup */
497 ST_INLN Sym *struct_find(int v)
499 v -= TOK_IDENT;
500 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
501 return NULL;
502 return table_ident[v]->sym_struct;
505 /* find an identifier */
506 ST_INLN Sym *sym_find(int v)
508 v -= TOK_IDENT;
509 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
510 return NULL;
511 return table_ident[v]->sym_identifier;
514 /* push a given symbol on the symbol stack */
515 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
517 Sym *s, **ps;
518 TokenSym *ts;
520 if (local_stack)
521 ps = &local_stack;
522 else
523 ps = &global_stack;
524 s = sym_push2(ps, v, type->t, c);
525 s->type.ref = type->ref;
526 s->r = r;
527 /* don't record fields or anonymous symbols */
528 /* XXX: simplify */
529 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
530 /* record symbol in token array */
531 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
532 if (v & SYM_STRUCT)
533 ps = &ts->sym_struct;
534 else
535 ps = &ts->sym_identifier;
536 s->prev_tok = *ps;
537 *ps = s;
538 s->scope = local_scope;
539 if (s->prev_tok && s->prev_tok->scope == s->scope)
540 tcc_error("redeclaration of '%s'",
541 get_tok_str(v & ~SYM_STRUCT, NULL));
543 return s;
546 /* push a global identifier */
547 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
549 Sym *s, **ps;
550 s = sym_push2(&global_stack, v, t, c);
551 /* don't record anonymous symbol */
552 if (v < SYM_FIRST_ANOM) {
553 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
554 /* modify the top most local identifier, so that
555 sym_identifier will point to 's' when popped */
556 while (*ps != NULL)
557 ps = &(*ps)->prev_tok;
558 s->prev_tok = NULL;
559 *ps = s;
561 return s;
564 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
565 pop them yet from the list, but do remove them from the token array. */
566 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
568 Sym *s, *ss, **ps;
569 TokenSym *ts;
570 int v;
572 s = *ptop;
573 while(s != b) {
574 ss = s->prev;
575 v = s->v;
576 /* remove symbol in token array */
577 /* XXX: simplify */
578 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
579 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
580 if (v & SYM_STRUCT)
581 ps = &ts->sym_struct;
582 else
583 ps = &ts->sym_identifier;
584 *ps = s->prev_tok;
586 if (!keep)
587 sym_free(s);
588 s = ss;
590 if (!keep)
591 *ptop = b;
594 /* ------------------------------------------------------------------------- */
596 static void vsetc(CType *type, int r, CValue *vc)
598 int v;
600 if (vtop >= vstack + (VSTACK_SIZE - 1))
601 tcc_error("memory full (vstack)");
602 /* cannot let cpu flags if other instruction are generated. Also
603 avoid leaving VT_JMP anywhere except on the top of the stack
604 because it would complicate the code generator.
606 Don't do this when nocode_wanted. vtop might come from
607 !nocode_wanted regions (see 88_codeopt.c) and transforming
608 it to a register without actually generating code is wrong
609 as their value might still be used for real. All values
610 we push under nocode_wanted will eventually be popped
611 again, so that the VT_CMP/VT_JMP value will be in vtop
612 when code is unsuppressed again.
614 Same logic below in vswap(); */
615 if (vtop >= vstack && !nocode_wanted) {
616 v = vtop->r & VT_VALMASK;
617 if (v == VT_CMP || (v & ~1) == VT_JMP)
618 gv(RC_INT);
621 vtop++;
622 vtop->type = *type;
623 vtop->r = r;
624 vtop->r2 = VT_CONST;
625 vtop->c = *vc;
626 vtop->sym = NULL;
629 ST_FUNC void vswap(void)
631 SValue tmp;
632 /* cannot vswap cpu flags. See comment at vsetc() above */
633 if (vtop >= vstack && !nocode_wanted) {
634 int v = vtop->r & VT_VALMASK;
635 if (v == VT_CMP || (v & ~1) == VT_JMP)
636 gv(RC_INT);
638 tmp = vtop[0];
639 vtop[0] = vtop[-1];
640 vtop[-1] = tmp;
643 /* pop stack value */
644 ST_FUNC void vpop(void)
646 int v;
647 v = vtop->r & VT_VALMASK;
648 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
649 /* for x86, we need to pop the FP stack */
650 if (v == TREG_ST0) {
651 o(0xd8dd); /* fstp %st(0) */
652 } else
653 #endif
654 if (v == VT_JMP || v == VT_JMPI) {
655 /* need to put correct jump if && or || without test */
656 gsym(vtop->c.i);
658 vtop--;
661 /* push constant of type "type" with useless value */
662 ST_FUNC void vpush(CType *type)
664 CValue cval;
665 vsetc(type, VT_CONST, &cval);
668 /* push integer constant */
669 ST_FUNC void vpushi(int v)
671 CValue cval;
672 cval.i = v;
673 vsetc(&int_type, VT_CONST, &cval);
676 /* push a pointer sized constant */
677 static void vpushs(addr_t v)
679 CValue cval;
680 cval.i = v;
681 vsetc(&size_type, VT_CONST, &cval);
684 /* push arbitrary 64bit constant */
685 ST_FUNC void vpush64(int ty, unsigned long long v)
687 CValue cval;
688 CType ctype;
689 ctype.t = ty;
690 ctype.ref = NULL;
691 cval.i = v;
692 vsetc(&ctype, VT_CONST, &cval);
695 /* push long long constant */
696 static inline void vpushll(long long v)
698 vpush64(VT_LLONG, v);
701 ST_FUNC void vset(CType *type, int r, long v)
703 CValue cval;
705 cval.i = v;
706 vsetc(type, r, &cval);
709 static void vseti(int r, int v)
711 CType type;
712 type.t = VT_INT;
713 type.ref = 0;
714 vset(&type, r, v);
717 ST_FUNC void vpushv(SValue *v)
719 if (vtop >= vstack + (VSTACK_SIZE - 1))
720 tcc_error("memory full (vstack)");
721 vtop++;
722 *vtop = *v;
725 static void vdup(void)
727 vpushv(vtop);
730 /* rotate n first stack elements to the bottom
731 I1 ... In -> I2 ... In I1 [top is right]
733 ST_FUNC void vrotb(int n)
735 int i;
736 SValue tmp;
738 tmp = vtop[-n + 1];
739 for(i=-n+1;i!=0;i++)
740 vtop[i] = vtop[i+1];
741 vtop[0] = tmp;
744 /* rotate the n elements before entry e towards the top
745 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
747 ST_FUNC void vrote(SValue *e, int n)
749 int i;
750 SValue tmp;
752 tmp = *e;
753 for(i = 0;i < n - 1; i++)
754 e[-i] = e[-i - 1];
755 e[-n + 1] = tmp;
758 /* rotate n first stack elements to the top
759 I1 ... In -> In I1 ... I(n-1) [top is right]
761 ST_FUNC void vrott(int n)
763 vrote(vtop, n);
766 /* push a symbol value of TYPE */
767 static inline void vpushsym(CType *type, Sym *sym)
769 CValue cval;
770 cval.i = 0;
771 vsetc(type, VT_CONST | VT_SYM, &cval);
772 vtop->sym = sym;
775 /* Return a static symbol pointing to a section */
776 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
778 int v;
779 Sym *sym;
781 v = anon_sym++;
782 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
783 sym->type.ref = type->ref;
784 sym->r = VT_CONST | VT_SYM;
785 put_extern_sym(sym, sec, offset, size);
786 return sym;
789 /* push a reference to a section offset by adding a dummy symbol */
790 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
792 vpushsym(type, get_sym_ref(type, sec, offset, size));
795 /* define a new external reference to a symbol 'v' of type 'u' */
796 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
798 Sym *s;
800 s = sym_find(v);
801 if (!s) {
802 /* push forward reference */
803 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
804 s->type.ref = type->ref;
805 s->r = r | VT_CONST | VT_SYM;
807 return s;
810 /* Merge some storage attributes. */
811 static void patch_storage(Sym *sym, CType *type)
813 int t;
814 if (!is_compatible_types(&sym->type, type))
815 tcc_error("incompatible types for redefinition of '%s'",
816 get_tok_str(sym->v, NULL));
817 t = type->t;
818 #ifdef TCC_TARGET_PE
819 if ((sym->type.t ^ t) & VT_IMPORT)
820 tcc_error("incompatible dll linkage for redefinition of '%s'",
821 get_tok_str(sym->v, NULL));
822 #endif
823 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
824 if (t & VT_VIS_MASK) {
825 int vis = sym->type.t & VT_VIS_MASK;
826 int vis2 = t & VT_VIS_MASK;
827 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
828 vis = vis2;
829 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
830 vis = (vis < vis2) ? vis : vis2;
831 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
835 /* define a new external reference to a symbol 'v' */
836 static Sym *external_sym(int v, CType *type, int r)
838 Sym *s;
839 s = sym_find(v);
840 if (!s) {
841 /* push forward reference */
842 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
843 s->type.t |= VT_EXTERN;
844 } else {
845 if (s->type.ref == func_old_type.ref) {
846 s->type.ref = type->ref;
847 s->r = r | VT_CONST | VT_SYM;
848 s->type.t |= VT_EXTERN;
850 patch_storage(s, type);
851 update_storage(s);
853 return s;
856 /* push a reference to global symbol v */
857 ST_FUNC void vpush_global_sym(CType *type, int v)
859 vpushsym(type, external_global_sym(v, type, 0));
862 /* save registers up to (vtop - n) stack entry */
863 ST_FUNC void save_regs(int n)
865 SValue *p, *p1;
866 for(p = vstack, p1 = vtop - n; p <= p1; p++)
867 save_reg(p->r);
870 /* save r to the memory stack, and mark it as being free */
871 ST_FUNC void save_reg(int r)
873 save_reg_upstack(r, 0);
876 /* save r to the memory stack, and mark it as being free,
877 if seen up to (vtop - n) stack entry */
878 ST_FUNC void save_reg_upstack(int r, int n)
880 int l, saved, size, align;
881 SValue *p, *p1, sv;
882 CType *type;
884 if ((r &= VT_VALMASK) >= VT_CONST)
885 return;
886 if (nocode_wanted)
887 return;
889 /* modify all stack values */
890 saved = 0;
891 l = 0;
892 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
893 if ((p->r & VT_VALMASK) == r ||
894 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
895 /* must save value on stack if not already done */
896 if (!saved) {
897 /* NOTE: must reload 'r' because r might be equal to r2 */
898 r = p->r & VT_VALMASK;
899 /* store register in the stack */
900 type = &p->type;
901 if ((p->r & VT_LVAL) ||
902 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
903 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
904 type = &char_pointer_type;
905 #else
906 type = &int_type;
907 #endif
908 size = type_size(type, &align);
909 loc = (loc - size) & -align;
910 sv.type.t = type->t;
911 sv.r = VT_LOCAL | VT_LVAL;
912 sv.c.i = loc;
913 store(r, &sv);
914 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
915 /* x86 specific: need to pop fp register ST0 if saved */
916 if (r == TREG_ST0) {
917 o(0xd8dd); /* fstp %st(0) */
919 #endif
920 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
921 /* special long long case */
922 if ((type->t & VT_BTYPE) == VT_LLONG) {
923 sv.c.i += 4;
924 store(p->r2, &sv);
926 #endif
927 l = loc;
928 saved = 1;
930 /* mark that stack entry as being saved on the stack */
931 if (p->r & VT_LVAL) {
932 /* also clear the bounded flag because the
933 relocation address of the function was stored in
934 p->c.i */
935 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
936 } else {
937 p->r = lvalue_type(p->type.t) | VT_LOCAL;
939 p->r2 = VT_CONST;
940 p->c.i = l;
945 #ifdef TCC_TARGET_ARM
946 /* find a register of class 'rc2' with at most one reference on stack.
947 * If none, call get_reg(rc) */
948 ST_FUNC int get_reg_ex(int rc, int rc2)
950 int r;
951 SValue *p;
953 for(r=0;r<NB_REGS;r++) {
954 if (reg_classes[r] & rc2) {
955 int n;
956 n=0;
957 for(p = vstack; p <= vtop; p++) {
958 if ((p->r & VT_VALMASK) == r ||
959 (p->r2 & VT_VALMASK) == r)
960 n++;
962 if (n <= 1)
963 return r;
966 return get_reg(rc);
968 #endif
970 /* find a free register of class 'rc'. If none, save one register */
971 ST_FUNC int get_reg(int rc)
973 int r;
974 SValue *p;
976 /* find a free register */
977 for(r=0;r<NB_REGS;r++) {
978 if (reg_classes[r] & rc) {
979 if (nocode_wanted)
980 return r;
981 for(p=vstack;p<=vtop;p++) {
982 if ((p->r & VT_VALMASK) == r ||
983 (p->r2 & VT_VALMASK) == r)
984 goto notfound;
986 return r;
988 notfound: ;
991 /* no register left : free the first one on the stack (VERY
992 IMPORTANT to start from the bottom to ensure that we don't
993 spill registers used in gen_opi()) */
994 for(p=vstack;p<=vtop;p++) {
995 /* look at second register (if long long) */
996 r = p->r2 & VT_VALMASK;
997 if (r < VT_CONST && (reg_classes[r] & rc))
998 goto save_found;
999 r = p->r & VT_VALMASK;
1000 if (r < VT_CONST && (reg_classes[r] & rc)) {
1001 save_found:
1002 save_reg(r);
1003 return r;
1006 /* Should never comes here */
1007 return -1;
1010 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1011 if needed */
1012 static void move_reg(int r, int s, int t)
1014 SValue sv;
1016 if (r != s) {
1017 save_reg(r);
1018 sv.type.t = t;
1019 sv.type.ref = NULL;
1020 sv.r = s;
1021 sv.c.i = 0;
1022 load(r, &sv);
1026 /* get address of vtop (vtop MUST BE an lvalue) */
1027 ST_FUNC void gaddrof(void)
1029 if (vtop->r & VT_REF)
1030 gv(RC_INT);
1031 vtop->r &= ~VT_LVAL;
1032 /* tricky: if saved lvalue, then we can go back to lvalue */
1033 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1034 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1039 #ifdef CONFIG_TCC_BCHECK
1040 /* generate lvalue bound code */
1041 static void gbound(void)
1043 int lval_type;
1044 CType type1;
1046 vtop->r &= ~VT_MUSTBOUND;
1047 /* if lvalue, then use checking code before dereferencing */
1048 if (vtop->r & VT_LVAL) {
1049 /* if not VT_BOUNDED value, then make one */
1050 if (!(vtop->r & VT_BOUNDED)) {
1051 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1052 /* must save type because we must set it to int to get pointer */
1053 type1 = vtop->type;
1054 vtop->type.t = VT_PTR;
1055 gaddrof();
1056 vpushi(0);
1057 gen_bounded_ptr_add();
1058 vtop->r |= lval_type;
1059 vtop->type = type1;
1061 /* then check for dereferencing */
1062 gen_bounded_ptr_deref();
1065 #endif
1067 /* store vtop a register belonging to class 'rc'. lvalues are
1068 converted to values. Cannot be used if cannot be converted to
1069 register value (such as structures). */
1070 ST_FUNC int gv(int rc)
1072 int r, bit_pos, bit_size, size, align, i;
1073 int rc2;
1075 /* NOTE: get_reg can modify vstack[] */
1076 if (vtop->type.t & VT_BITFIELD) {
1077 CType type;
1078 int bits = 32;
1079 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1080 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1081 /* remove bit field info to avoid loops */
1082 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1083 /* cast to int to propagate signedness in following ops */
1084 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1085 type.t = VT_LLONG;
1086 bits = 64;
1087 } else
1088 type.t = VT_INT;
1089 if((vtop->type.t & VT_UNSIGNED) ||
1090 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1091 type.t |= VT_UNSIGNED;
1092 gen_cast(&type);
1093 /* generate shifts */
1094 vpushi(bits - (bit_pos + bit_size));
1095 gen_op(TOK_SHL);
1096 vpushi(bits - bit_size);
1097 /* NOTE: transformed to SHR if unsigned */
1098 gen_op(TOK_SAR);
1099 r = gv(rc);
1100 } else {
1101 if (is_float(vtop->type.t) &&
1102 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1103 Sym *sym;
1104 int *ptr;
1105 unsigned long offset;
1106 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1107 CValue check;
1108 #endif
1110 /* XXX: unify with initializers handling ? */
1111 /* CPUs usually cannot use float constants, so we store them
1112 generically in data segment */
1113 size = type_size(&vtop->type, &align);
1114 offset = (data_section->data_offset + align - 1) & -align;
1115 data_section->data_offset = offset;
1116 /* XXX: not portable yet */
1117 #if defined(__i386__) || defined(__x86_64__)
1118 /* Zero pad x87 tenbyte long doubles */
1119 if (size == LDOUBLE_SIZE) {
1120 vtop->c.tab[2] &= 0xffff;
1121 #if LDOUBLE_SIZE == 16
1122 vtop->c.tab[3] = 0;
1123 #endif
1125 #endif
1126 ptr = section_ptr_add(data_section, size);
1127 size = size >> 2;
1128 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1129 check.d = 1;
1130 if(check.tab[0])
1131 for(i=0;i<size;i++)
1132 ptr[i] = vtop->c.tab[size-1-i];
1133 else
1134 #endif
1135 for(i=0;i<size;i++)
1136 ptr[i] = vtop->c.tab[i];
1137 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1138 vtop->r |= VT_LVAL | VT_SYM;
1139 vtop->sym = sym;
1140 vtop->c.i = 0;
1142 #ifdef CONFIG_TCC_BCHECK
1143 if (vtop->r & VT_MUSTBOUND)
1144 gbound();
1145 #endif
1147 r = vtop->r & VT_VALMASK;
1148 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1149 #ifndef TCC_TARGET_ARM64
1150 if (rc == RC_IRET)
1151 rc2 = RC_LRET;
1152 #ifdef TCC_TARGET_X86_64
1153 else if (rc == RC_FRET)
1154 rc2 = RC_QRET;
1155 #endif
1156 #endif
1157 /* need to reload if:
1158 - constant
1159 - lvalue (need to dereference pointer)
1160 - already a register, but not in the right class */
1161 if (r >= VT_CONST
1162 || (vtop->r & VT_LVAL)
1163 || !(reg_classes[r] & rc)
1164 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1165 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1166 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1167 #else
1168 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1169 #endif
1172 r = get_reg(rc);
1173 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1174 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1175 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1176 #else
1177 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1178 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1179 unsigned long long ll;
1180 #endif
1181 int r2, original_type;
1182 original_type = vtop->type.t;
1183 /* two register type load : expand to two words
1184 temporarily */
1185 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1186 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1187 /* load constant */
1188 ll = vtop->c.i;
1189 vtop->c.i = ll; /* first word */
1190 load(r, vtop);
1191 vtop->r = r; /* save register value */
1192 vpushi(ll >> 32); /* second word */
1193 } else
1194 #endif
1195 if (vtop->r & VT_LVAL) {
1196 /* We do not want to modifier the long long
1197 pointer here, so the safest (and less
1198 efficient) is to save all the other registers
1199 in the stack. XXX: totally inefficient. */
1200 #if 0
1201 save_regs(1);
1202 #else
1203 /* lvalue_save: save only if used further down the stack */
1204 save_reg_upstack(vtop->r, 1);
1205 #endif
1206 /* load from memory */
1207 vtop->type.t = load_type;
1208 load(r, vtop);
1209 vdup();
1210 vtop[-1].r = r; /* save register value */
1211 /* increment pointer to get second word */
1212 vtop->type.t = addr_type;
1213 gaddrof();
1214 vpushi(load_size);
1215 gen_op('+');
1216 vtop->r |= VT_LVAL;
1217 vtop->type.t = load_type;
1218 } else {
1219 /* move registers */
1220 load(r, vtop);
1221 vdup();
1222 vtop[-1].r = r; /* save register value */
1223 vtop->r = vtop[-1].r2;
1225 /* Allocate second register. Here we rely on the fact that
1226 get_reg() tries first to free r2 of an SValue. */
1227 r2 = get_reg(rc2);
1228 load(r2, vtop);
1229 vpop();
1230 /* write second register */
1231 vtop->r2 = r2;
1232 vtop->type.t = original_type;
1233 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1234 int t1, t;
1235 /* lvalue of scalar type : need to use lvalue type
1236 because of possible cast */
1237 t = vtop->type.t;
1238 t1 = t;
1239 /* compute memory access type */
1240 if (vtop->r & VT_REF)
1241 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1242 t = VT_PTR;
1243 #else
1244 t = VT_INT;
1245 #endif
1246 else if (vtop->r & VT_LVAL_BYTE)
1247 t = VT_BYTE;
1248 else if (vtop->r & VT_LVAL_SHORT)
1249 t = VT_SHORT;
1250 if (vtop->r & VT_LVAL_UNSIGNED)
1251 t |= VT_UNSIGNED;
1252 vtop->type.t = t;
1253 load(r, vtop);
1254 /* restore wanted type */
1255 vtop->type.t = t1;
1256 } else {
1257 /* one register type load */
1258 load(r, vtop);
1261 vtop->r = r;
1262 #ifdef TCC_TARGET_C67
1263 /* uses register pairs for doubles */
1264 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1265 vtop->r2 = r+1;
1266 #endif
1268 return r;
1271 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1272 ST_FUNC void gv2(int rc1, int rc2)
1274 int v;
1276 /* generate more generic register first. But VT_JMP or VT_CMP
1277 values must be generated first in all cases to avoid possible
1278 reload errors */
1279 v = vtop[0].r & VT_VALMASK;
1280 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1281 vswap();
1282 gv(rc1);
1283 vswap();
1284 gv(rc2);
1285 /* test if reload is needed for first register */
1286 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1287 vswap();
1288 gv(rc1);
1289 vswap();
1291 } else {
1292 gv(rc2);
1293 vswap();
1294 gv(rc1);
1295 vswap();
1296 /* test if reload is needed for first register */
1297 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1298 gv(rc2);
1303 #ifndef TCC_TARGET_ARM64
1304 /* wrapper around RC_FRET to return a register by type */
1305 static int rc_fret(int t)
1307 #ifdef TCC_TARGET_X86_64
1308 if (t == VT_LDOUBLE) {
1309 return RC_ST0;
1311 #endif
1312 return RC_FRET;
1314 #endif
1316 /* wrapper around REG_FRET to return a register by type */
1317 static int reg_fret(int t)
1319 #ifdef TCC_TARGET_X86_64
1320 if (t == VT_LDOUBLE) {
1321 return TREG_ST0;
1323 #endif
1324 return REG_FRET;
1327 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1328 /* expand 64bit on stack in two ints */
1329 static void lexpand(void)
1331 int u, v;
1332 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1333 v = vtop->r & (VT_VALMASK | VT_LVAL);
1334 if (v == VT_CONST) {
1335 vdup();
1336 vtop[0].c.i >>= 32;
1337 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1338 vdup();
1339 vtop[0].c.i += 4;
1340 } else {
1341 gv(RC_INT);
1342 vdup();
1343 vtop[0].r = vtop[-1].r2;
1344 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1346 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1348 #endif
1350 #ifdef TCC_TARGET_ARM
1351 /* expand long long on stack */
1352 ST_FUNC void lexpand_nr(void)
1354 int u,v;
1356 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1357 vdup();
1358 vtop->r2 = VT_CONST;
1359 vtop->type.t = VT_INT | u;
1360 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1361 if (v == VT_CONST) {
1362 vtop[-1].c.i = vtop->c.i;
1363 vtop->c.i = vtop->c.i >> 32;
1364 vtop->r = VT_CONST;
1365 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1366 vtop->c.i += 4;
1367 vtop->r = vtop[-1].r;
1368 } else if (v > VT_CONST) {
1369 vtop--;
1370 lexpand();
1371 } else
1372 vtop->r = vtop[-1].r2;
1373 vtop[-1].r2 = VT_CONST;
1374 vtop[-1].type.t = VT_INT | u;
1376 #endif
1378 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1379 /* build a long long from two ints */
1380 static void lbuild(int t)
1382 gv2(RC_INT, RC_INT);
1383 vtop[-1].r2 = vtop[0].r;
1384 vtop[-1].type.t = t;
1385 vpop();
1387 #endif
1389 /* convert stack entry to register and duplicate its value in another
1390 register */
1391 static void gv_dup(void)
1393 int rc, t, r, r1;
1394 SValue sv;
1396 t = vtop->type.t;
1397 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1398 if ((t & VT_BTYPE) == VT_LLONG) {
1399 lexpand();
1400 gv_dup();
1401 vswap();
1402 vrotb(3);
1403 gv_dup();
1404 vrotb(4);
1405 /* stack: H L L1 H1 */
1406 lbuild(t);
1407 vrotb(3);
1408 vrotb(3);
1409 vswap();
1410 lbuild(t);
1411 vswap();
1412 } else
1413 #endif
1415 /* duplicate value */
1416 rc = RC_INT;
1417 sv.type.t = VT_INT;
1418 if (is_float(t)) {
1419 rc = RC_FLOAT;
1420 #ifdef TCC_TARGET_X86_64
1421 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1422 rc = RC_ST0;
1424 #endif
1425 sv.type.t = t;
1427 r = gv(rc);
1428 r1 = get_reg(rc);
1429 sv.r = r;
1430 sv.c.i = 0;
1431 load(r1, &sv); /* move r to r1 */
1432 vdup();
1433 /* duplicates value */
1434 if (r != r1)
1435 vtop->r = r1;
1439 /* Generate value test
1441 * Generate a test for any value (jump, comparison and integers) */
1442 ST_FUNC int gvtst(int inv, int t)
1444 int v = vtop->r & VT_VALMASK;
1445 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1446 vpushi(0);
1447 gen_op(TOK_NE);
1449 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1450 /* constant jmp optimization */
1451 if ((vtop->c.i != 0) != inv)
1452 t = gjmp(t);
1453 vtop--;
1454 return t;
1456 return gtst(inv, t);
1459 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1460 /* generate CPU independent (unsigned) long long operations */
1461 static void gen_opl(int op)
1463 int t, a, b, op1, c, i;
1464 int func;
1465 unsigned short reg_iret = REG_IRET;
1466 unsigned short reg_lret = REG_LRET;
1467 SValue tmp;
1469 switch(op) {
1470 case '/':
1471 case TOK_PDIV:
1472 func = TOK___divdi3;
1473 goto gen_func;
1474 case TOK_UDIV:
1475 func = TOK___udivdi3;
1476 goto gen_func;
1477 case '%':
1478 func = TOK___moddi3;
1479 goto gen_mod_func;
1480 case TOK_UMOD:
1481 func = TOK___umoddi3;
1482 gen_mod_func:
1483 #ifdef TCC_ARM_EABI
1484 reg_iret = TREG_R2;
1485 reg_lret = TREG_R3;
1486 #endif
1487 gen_func:
1488 /* call generic long long function */
1489 vpush_global_sym(&func_old_type, func);
1490 vrott(3);
1491 gfunc_call(2);
1492 vpushi(0);
1493 vtop->r = reg_iret;
1494 vtop->r2 = reg_lret;
1495 break;
1496 case '^':
1497 case '&':
1498 case '|':
1499 case '*':
1500 case '+':
1501 case '-':
1502 //pv("gen_opl A",0,2);
1503 t = vtop->type.t;
1504 vswap();
1505 lexpand();
1506 vrotb(3);
1507 lexpand();
1508 /* stack: L1 H1 L2 H2 */
1509 tmp = vtop[0];
1510 vtop[0] = vtop[-3];
1511 vtop[-3] = tmp;
1512 tmp = vtop[-2];
1513 vtop[-2] = vtop[-3];
1514 vtop[-3] = tmp;
1515 vswap();
1516 /* stack: H1 H2 L1 L2 */
1517 //pv("gen_opl B",0,4);
1518 if (op == '*') {
1519 vpushv(vtop - 1);
1520 vpushv(vtop - 1);
1521 gen_op(TOK_UMULL);
1522 lexpand();
1523 /* stack: H1 H2 L1 L2 ML MH */
1524 for(i=0;i<4;i++)
1525 vrotb(6);
1526 /* stack: ML MH H1 H2 L1 L2 */
1527 tmp = vtop[0];
1528 vtop[0] = vtop[-2];
1529 vtop[-2] = tmp;
1530 /* stack: ML MH H1 L2 H2 L1 */
1531 gen_op('*');
1532 vrotb(3);
1533 vrotb(3);
1534 gen_op('*');
1535 /* stack: ML MH M1 M2 */
1536 gen_op('+');
1537 gen_op('+');
1538 } else if (op == '+' || op == '-') {
1539 /* XXX: add non carry method too (for MIPS or alpha) */
1540 if (op == '+')
1541 op1 = TOK_ADDC1;
1542 else
1543 op1 = TOK_SUBC1;
1544 gen_op(op1);
1545 /* stack: H1 H2 (L1 op L2) */
1546 vrotb(3);
1547 vrotb(3);
1548 gen_op(op1 + 1); /* TOK_xxxC2 */
1549 } else {
1550 gen_op(op);
1551 /* stack: H1 H2 (L1 op L2) */
1552 vrotb(3);
1553 vrotb(3);
1554 /* stack: (L1 op L2) H1 H2 */
1555 gen_op(op);
1556 /* stack: (L1 op L2) (H1 op H2) */
1558 /* stack: L H */
1559 lbuild(t);
1560 break;
1561 case TOK_SAR:
1562 case TOK_SHR:
1563 case TOK_SHL:
1564 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1565 t = vtop[-1].type.t;
1566 vswap();
1567 lexpand();
1568 vrotb(3);
1569 /* stack: L H shift */
1570 c = (int)vtop->c.i;
1571 /* constant: simpler */
1572 /* NOTE: all comments are for SHL. the other cases are
1573 done by swaping words */
1574 vpop();
1575 if (op != TOK_SHL)
1576 vswap();
1577 if (c >= 32) {
1578 /* stack: L H */
1579 vpop();
1580 if (c > 32) {
1581 vpushi(c - 32);
1582 gen_op(op);
1584 if (op != TOK_SAR) {
1585 vpushi(0);
1586 } else {
1587 gv_dup();
1588 vpushi(31);
1589 gen_op(TOK_SAR);
1591 vswap();
1592 } else {
1593 vswap();
1594 gv_dup();
1595 /* stack: H L L */
1596 vpushi(c);
1597 gen_op(op);
1598 vswap();
1599 vpushi(32 - c);
1600 if (op == TOK_SHL)
1601 gen_op(TOK_SHR);
1602 else
1603 gen_op(TOK_SHL);
1604 vrotb(3);
1605 /* stack: L L H */
1606 vpushi(c);
1607 if (op == TOK_SHL)
1608 gen_op(TOK_SHL);
1609 else
1610 gen_op(TOK_SHR);
1611 gen_op('|');
1613 if (op != TOK_SHL)
1614 vswap();
1615 lbuild(t);
1616 } else {
1617 /* XXX: should provide a faster fallback on x86 ? */
1618 switch(op) {
1619 case TOK_SAR:
1620 func = TOK___ashrdi3;
1621 goto gen_func;
1622 case TOK_SHR:
1623 func = TOK___lshrdi3;
1624 goto gen_func;
1625 case TOK_SHL:
1626 func = TOK___ashldi3;
1627 goto gen_func;
1630 break;
1631 default:
1632 /* compare operations */
1633 t = vtop->type.t;
1634 vswap();
1635 lexpand();
1636 vrotb(3);
1637 lexpand();
1638 /* stack: L1 H1 L2 H2 */
1639 tmp = vtop[-1];
1640 vtop[-1] = vtop[-2];
1641 vtop[-2] = tmp;
1642 /* stack: L1 L2 H1 H2 */
1643 /* compare high */
1644 op1 = op;
1645 /* when values are equal, we need to compare low words. since
1646 the jump is inverted, we invert the test too. */
1647 if (op1 == TOK_LT)
1648 op1 = TOK_LE;
1649 else if (op1 == TOK_GT)
1650 op1 = TOK_GE;
1651 else if (op1 == TOK_ULT)
1652 op1 = TOK_ULE;
1653 else if (op1 == TOK_UGT)
1654 op1 = TOK_UGE;
1655 a = 0;
1656 b = 0;
1657 gen_op(op1);
1658 if (op == TOK_NE) {
1659 b = gvtst(0, 0);
1660 } else {
1661 a = gvtst(1, 0);
1662 if (op != TOK_EQ) {
1663 /* generate non equal test */
1664 vpushi(TOK_NE);
1665 vtop->r = VT_CMP;
1666 b = gvtst(0, 0);
1669 /* compare low. Always unsigned */
1670 op1 = op;
1671 if (op1 == TOK_LT)
1672 op1 = TOK_ULT;
1673 else if (op1 == TOK_LE)
1674 op1 = TOK_ULE;
1675 else if (op1 == TOK_GT)
1676 op1 = TOK_UGT;
1677 else if (op1 == TOK_GE)
1678 op1 = TOK_UGE;
1679 gen_op(op1);
1680 a = gvtst(1, a);
1681 gsym(b);
1682 vseti(VT_JMPI, a);
1683 break;
1686 #endif
1688 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1690 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1691 return (a ^ b) >> 63 ? -x : x;
1694 static int gen_opic_lt(uint64_t a, uint64_t b)
1696 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1699 /* handle integer constant optimizations and various machine
1700 independent opt */
1701 static void gen_opic(int op)
1703 SValue *v1 = vtop - 1;
1704 SValue *v2 = vtop;
1705 int t1 = v1->type.t & VT_BTYPE;
1706 int t2 = v2->type.t & VT_BTYPE;
1707 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1708 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1709 uint64_t l1 = c1 ? v1->c.i : 0;
1710 uint64_t l2 = c2 ? v2->c.i : 0;
1711 int shm = (t1 == VT_LLONG) ? 63 : 31;
1713 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1714 l1 = ((uint32_t)l1 |
1715 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1716 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1717 l2 = ((uint32_t)l2 |
1718 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1720 if (c1 && c2) {
1721 switch(op) {
1722 case '+': l1 += l2; break;
1723 case '-': l1 -= l2; break;
1724 case '&': l1 &= l2; break;
1725 case '^': l1 ^= l2; break;
1726 case '|': l1 |= l2; break;
1727 case '*': l1 *= l2; break;
1729 case TOK_PDIV:
1730 case '/':
1731 case '%':
1732 case TOK_UDIV:
1733 case TOK_UMOD:
1734 /* if division by zero, generate explicit division */
1735 if (l2 == 0) {
1736 if (const_wanted)
1737 tcc_error("division by zero in constant");
1738 goto general_case;
1740 switch(op) {
1741 default: l1 = gen_opic_sdiv(l1, l2); break;
1742 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1743 case TOK_UDIV: l1 = l1 / l2; break;
1744 case TOK_UMOD: l1 = l1 % l2; break;
1746 break;
1747 case TOK_SHL: l1 <<= (l2 & shm); break;
1748 case TOK_SHR: l1 >>= (l2 & shm); break;
1749 case TOK_SAR:
1750 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1751 break;
1752 /* tests */
1753 case TOK_ULT: l1 = l1 < l2; break;
1754 case TOK_UGE: l1 = l1 >= l2; break;
1755 case TOK_EQ: l1 = l1 == l2; break;
1756 case TOK_NE: l1 = l1 != l2; break;
1757 case TOK_ULE: l1 = l1 <= l2; break;
1758 case TOK_UGT: l1 = l1 > l2; break;
1759 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1760 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1761 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1762 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1763 /* logical */
1764 case TOK_LAND: l1 = l1 && l2; break;
1765 case TOK_LOR: l1 = l1 || l2; break;
1766 default:
1767 goto general_case;
1769 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1770 l1 = ((uint32_t)l1 |
1771 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1772 v1->c.i = l1;
1773 vtop--;
1774 } else {
1775 /* if commutative ops, put c2 as constant */
1776 if (c1 && (op == '+' || op == '&' || op == '^' ||
1777 op == '|' || op == '*')) {
1778 vswap();
1779 c2 = c1; //c = c1, c1 = c2, c2 = c;
1780 l2 = l1; //l = l1, l1 = l2, l2 = l;
1782 if (!const_wanted &&
1783 c1 && ((l1 == 0 &&
1784 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1785 (l1 == -1 && op == TOK_SAR))) {
1786 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1787 vtop--;
1788 } else if (!const_wanted &&
1789 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1790 (l2 == -1 && op == '|') ||
1791 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1792 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1793 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1794 if (l2 == 1)
1795 vtop->c.i = 0;
1796 vswap();
1797 vtop--;
1798 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1799 op == TOK_PDIV) &&
1800 l2 == 1) ||
1801 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1802 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1803 l2 == 0) ||
1804 (op == '&' &&
1805 l2 == -1))) {
1806 /* filter out NOP operations like x*1, x-0, x&-1... */
1807 vtop--;
1808 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1809 /* try to use shifts instead of muls or divs */
1810 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1811 int n = -1;
1812 while (l2) {
1813 l2 >>= 1;
1814 n++;
1816 vtop->c.i = n;
1817 if (op == '*')
1818 op = TOK_SHL;
1819 else if (op == TOK_PDIV)
1820 op = TOK_SAR;
1821 else
1822 op = TOK_SHR;
1824 goto general_case;
1825 } else if (c2 && (op == '+' || op == '-') &&
1826 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1827 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1828 /* symbol + constant case */
1829 if (op == '-')
1830 l2 = -l2;
1831 l2 += vtop[-1].c.i;
1832 /* The backends can't always deal with addends to symbols
1833 larger than +-1<<31. Don't construct such. */
1834 if ((int)l2 != l2)
1835 goto general_case;
1836 vtop--;
1837 vtop->c.i = l2;
1838 } else {
1839 general_case:
1840 /* call low level op generator */
1841 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1842 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1843 gen_opl(op);
1844 else
1845 gen_opi(op);
1850 /* generate a floating point operation with constant propagation */
1851 static void gen_opif(int op)
1853 int c1, c2;
1854 SValue *v1, *v2;
1855 long double f1, f2;
1857 v1 = vtop - 1;
1858 v2 = vtop;
1859 /* currently, we cannot do computations with forward symbols */
1860 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1861 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1862 if (c1 && c2) {
1863 if (v1->type.t == VT_FLOAT) {
1864 f1 = v1->c.f;
1865 f2 = v2->c.f;
1866 } else if (v1->type.t == VT_DOUBLE) {
1867 f1 = v1->c.d;
1868 f2 = v2->c.d;
1869 } else {
1870 f1 = v1->c.ld;
1871 f2 = v2->c.ld;
1874 /* NOTE: we only do constant propagation if finite number (not
1875 NaN or infinity) (ANSI spec) */
1876 if (!ieee_finite(f1) || !ieee_finite(f2))
1877 goto general_case;
1879 switch(op) {
1880 case '+': f1 += f2; break;
1881 case '-': f1 -= f2; break;
1882 case '*': f1 *= f2; break;
1883 case '/':
1884 if (f2 == 0.0) {
1885 if (const_wanted)
1886 tcc_error("division by zero in constant");
1887 goto general_case;
1889 f1 /= f2;
1890 break;
1891 /* XXX: also handles tests ? */
1892 default:
1893 goto general_case;
1895 /* XXX: overflow test ? */
1896 if (v1->type.t == VT_FLOAT) {
1897 v1->c.f = f1;
1898 } else if (v1->type.t == VT_DOUBLE) {
1899 v1->c.d = f1;
1900 } else {
1901 v1->c.ld = f1;
1903 vtop--;
1904 } else {
1905 general_case:
1906 gen_opf(op);
1910 static int pointed_size(CType *type)
1912 int align;
1913 return type_size(pointed_type(type), &align);
1916 static void vla_runtime_pointed_size(CType *type)
1918 int align;
1919 vla_runtime_type_size(pointed_type(type), &align);
1922 static inline int is_null_pointer(SValue *p)
1924 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1925 return 0;
1926 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1927 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1928 ((p->type.t & VT_BTYPE) == VT_PTR &&
1929 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1932 static inline int is_integer_btype(int bt)
1934 return (bt == VT_BYTE || bt == VT_SHORT ||
1935 bt == VT_INT || bt == VT_LLONG);
1938 /* check types for comparison or subtraction of pointers */
1939 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1941 CType *type1, *type2, tmp_type1, tmp_type2;
1942 int bt1, bt2;
1944 /* null pointers are accepted for all comparisons as gcc */
1945 if (is_null_pointer(p1) || is_null_pointer(p2))
1946 return;
1947 type1 = &p1->type;
1948 type2 = &p2->type;
1949 bt1 = type1->t & VT_BTYPE;
1950 bt2 = type2->t & VT_BTYPE;
1951 /* accept comparison between pointer and integer with a warning */
1952 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1953 if (op != TOK_LOR && op != TOK_LAND )
1954 tcc_warning("comparison between pointer and integer");
1955 return;
1958 /* both must be pointers or implicit function pointers */
1959 if (bt1 == VT_PTR) {
1960 type1 = pointed_type(type1);
1961 } else if (bt1 != VT_FUNC)
1962 goto invalid_operands;
1964 if (bt2 == VT_PTR) {
1965 type2 = pointed_type(type2);
1966 } else if (bt2 != VT_FUNC) {
1967 invalid_operands:
1968 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1970 if ((type1->t & VT_BTYPE) == VT_VOID ||
1971 (type2->t & VT_BTYPE) == VT_VOID)
1972 return;
1973 tmp_type1 = *type1;
1974 tmp_type2 = *type2;
1975 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1976 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1977 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1978 /* gcc-like error if '-' is used */
1979 if (op == '-')
1980 goto invalid_operands;
1981 else
1982 tcc_warning("comparison of distinct pointer types lacks a cast");
1986 /* generic gen_op: handles types problems */
1987 ST_FUNC void gen_op(int op)
1989 int u, t1, t2, bt1, bt2, t;
1990 CType type1;
1992 redo:
1993 t1 = vtop[-1].type.t;
1994 t2 = vtop[0].type.t;
1995 bt1 = t1 & VT_BTYPE;
1996 bt2 = t2 & VT_BTYPE;
1998 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1999 tcc_error("operation on a struct");
2000 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2001 if (bt2 == VT_FUNC) {
2002 mk_pointer(&vtop->type);
2003 gaddrof();
2005 if (bt1 == VT_FUNC) {
2006 vswap();
2007 mk_pointer(&vtop->type);
2008 gaddrof();
2009 vswap();
2011 goto redo;
2012 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2013 /* at least one operand is a pointer */
2014 /* relationnal op: must be both pointers */
2015 if (op >= TOK_ULT && op <= TOK_LOR) {
2016 check_comparison_pointer_types(vtop - 1, vtop, op);
2017 /* pointers are handled are unsigned */
2018 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2019 t = VT_LLONG | VT_UNSIGNED;
2020 #else
2021 t = VT_INT | VT_UNSIGNED;
2022 #endif
2023 goto std_op;
2025 /* if both pointers, then it must be the '-' op */
2026 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2027 if (op != '-')
2028 tcc_error("cannot use pointers here");
2029 check_comparison_pointer_types(vtop - 1, vtop, op);
2030 /* XXX: check that types are compatible */
2031 if (vtop[-1].type.t & VT_VLA) {
2032 vla_runtime_pointed_size(&vtop[-1].type);
2033 } else {
2034 vpushi(pointed_size(&vtop[-1].type));
2036 vrott(3);
2037 gen_opic(op);
2038 /* set to integer type */
2039 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2040 vtop->type.t = VT_LLONG;
2041 #else
2042 vtop->type.t = VT_INT;
2043 #endif
2044 vswap();
2045 gen_op(TOK_PDIV);
2046 } else {
2047 /* exactly one pointer : must be '+' or '-'. */
2048 if (op != '-' && op != '+')
2049 tcc_error("cannot use pointers here");
2050 /* Put pointer as first operand */
2051 if (bt2 == VT_PTR) {
2052 vswap();
2053 t = t1, t1 = t2, t2 = t;
2055 #if PTR_SIZE == 4
2056 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2057 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2058 gen_cast(&int_type);
2059 #endif
2060 type1 = vtop[-1].type;
2061 type1.t &= ~VT_ARRAY;
2062 if (vtop[-1].type.t & VT_VLA)
2063 vla_runtime_pointed_size(&vtop[-1].type);
2064 else {
2065 u = pointed_size(&vtop[-1].type);
2066 if (u < 0)
2067 tcc_error("unknown array element size");
2068 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2069 vpushll(u);
2070 #else
2071 /* XXX: cast to int ? (long long case) */
2072 vpushi(u);
2073 #endif
2075 gen_op('*');
2076 #if 0
2077 /* #ifdef CONFIG_TCC_BCHECK
2078 The main reason to removing this code:
2079 #include <stdio.h>
2080 int main ()
2082 int v[10];
2083 int i = 10;
2084 int j = 9;
2085 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2086 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2088 When this code is on. then the output looks like
2089 v+i-j = 0xfffffffe
2090 v+(i-j) = 0xbff84000
2092 /* if evaluating constant expression, no code should be
2093 generated, so no bound check */
2094 if (tcc_state->do_bounds_check && !const_wanted) {
2095 /* if bounded pointers, we generate a special code to
2096 test bounds */
2097 if (op == '-') {
2098 vpushi(0);
2099 vswap();
2100 gen_op('-');
2102 gen_bounded_ptr_add();
2103 } else
2104 #endif
2106 gen_opic(op);
2108 /* put again type if gen_opic() swaped operands */
2109 vtop->type = type1;
2111 } else if (is_float(bt1) || is_float(bt2)) {
2112 /* compute bigger type and do implicit casts */
2113 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2114 t = VT_LDOUBLE;
2115 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2116 t = VT_DOUBLE;
2117 } else {
2118 t = VT_FLOAT;
2120 /* floats can only be used for a few operations */
2121 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2122 (op < TOK_ULT || op > TOK_GT))
2123 tcc_error("invalid operands for binary operation");
2124 goto std_op;
2125 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2126 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2127 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2128 t |= VT_UNSIGNED;
2129 goto std_op;
2130 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2131 /* cast to biggest op */
2132 t = VT_LLONG;
2133 /* convert to unsigned if it does not fit in a long long */
2134 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2135 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2136 t |= VT_UNSIGNED;
2137 goto std_op;
2138 } else {
2139 /* integer operations */
2140 t = VT_INT;
2141 /* convert to unsigned if it does not fit in an integer */
2142 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2143 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2144 t |= VT_UNSIGNED;
2145 std_op:
2146 /* XXX: currently, some unsigned operations are explicit, so
2147 we modify them here */
2148 if (t & VT_UNSIGNED) {
2149 if (op == TOK_SAR)
2150 op = TOK_SHR;
2151 else if (op == '/')
2152 op = TOK_UDIV;
2153 else if (op == '%')
2154 op = TOK_UMOD;
2155 else if (op == TOK_LT)
2156 op = TOK_ULT;
2157 else if (op == TOK_GT)
2158 op = TOK_UGT;
2159 else if (op == TOK_LE)
2160 op = TOK_ULE;
2161 else if (op == TOK_GE)
2162 op = TOK_UGE;
2164 vswap();
2165 type1.t = t;
2166 gen_cast(&type1);
2167 vswap();
2168 /* special case for shifts and long long: we keep the shift as
2169 an integer */
2170 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2171 type1.t = VT_INT;
2172 gen_cast(&type1);
2173 if (is_float(t))
2174 gen_opif(op);
2175 else
2176 gen_opic(op);
2177 if (op >= TOK_ULT && op <= TOK_GT) {
2178 /* relationnal op: the result is an int */
2179 vtop->type.t = VT_INT;
2180 } else {
2181 vtop->type.t = t;
2184 // Make sure that we have converted to an rvalue:
2185 if (vtop->r & VT_LVAL)
2186 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2189 #ifndef TCC_TARGET_ARM
2190 /* generic itof for unsigned long long case */
2191 static void gen_cvt_itof1(int t)
2193 #ifdef TCC_TARGET_ARM64
2194 gen_cvt_itof(t);
2195 #else
2196 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2197 (VT_LLONG | VT_UNSIGNED)) {
2199 if (t == VT_FLOAT)
2200 vpush_global_sym(&func_old_type, TOK___floatundisf);
2201 #if LDOUBLE_SIZE != 8
2202 else if (t == VT_LDOUBLE)
2203 vpush_global_sym(&func_old_type, TOK___floatundixf);
2204 #endif
2205 else
2206 vpush_global_sym(&func_old_type, TOK___floatundidf);
2207 vrott(2);
2208 gfunc_call(1);
2209 vpushi(0);
2210 vtop->r = reg_fret(t);
2211 } else {
2212 gen_cvt_itof(t);
2214 #endif
2216 #endif
2218 /* generic ftoi for unsigned long long case */
2219 static void gen_cvt_ftoi1(int t)
2221 #ifdef TCC_TARGET_ARM64
2222 gen_cvt_ftoi(t);
2223 #else
2224 int st;
2226 if (t == (VT_LLONG | VT_UNSIGNED)) {
2227 /* not handled natively */
2228 st = vtop->type.t & VT_BTYPE;
2229 if (st == VT_FLOAT)
2230 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2231 #if LDOUBLE_SIZE != 8
2232 else if (st == VT_LDOUBLE)
2233 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2234 #endif
2235 else
2236 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2237 vrott(2);
2238 gfunc_call(1);
2239 vpushi(0);
2240 vtop->r = REG_IRET;
2241 vtop->r2 = REG_LRET;
2242 } else {
2243 gen_cvt_ftoi(t);
2245 #endif
2248 /* force char or short cast */
2249 static void force_charshort_cast(int t)
2251 int bits, dbt;
2252 dbt = t & VT_BTYPE;
2253 /* XXX: add optimization if lvalue : just change type and offset */
2254 if (dbt == VT_BYTE)
2255 bits = 8;
2256 else
2257 bits = 16;
2258 if (t & VT_UNSIGNED) {
2259 vpushi((1 << bits) - 1);
2260 gen_op('&');
2261 } else {
2262 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2263 bits = 64 - bits;
2264 else
2265 bits = 32 - bits;
2266 vpushi(bits);
2267 gen_op(TOK_SHL);
2268 /* result must be signed or the SAR is converted to an SHL
2269 This was not the case when "t" was a signed short
2270 and the last value on the stack was an unsigned int */
2271 vtop->type.t &= ~VT_UNSIGNED;
2272 vpushi(bits);
2273 gen_op(TOK_SAR);
2277 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2278 static void gen_cast(CType *type)
2280 int sbt, dbt, sf, df, c, p;
2282 /* special delayed cast for char/short */
2283 /* XXX: in some cases (multiple cascaded casts), it may still
2284 be incorrect */
2285 if (vtop->r & VT_MUSTCAST) {
2286 vtop->r &= ~VT_MUSTCAST;
2287 force_charshort_cast(vtop->type.t);
2290 /* bitfields first get cast to ints */
2291 if (vtop->type.t & VT_BITFIELD) {
2292 gv(RC_INT);
2295 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2296 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2298 if (sbt != dbt) {
2299 sf = is_float(sbt);
2300 df = is_float(dbt);
2301 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2302 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2303 if (c) {
2304 /* constant case: we can do it now */
2305 /* XXX: in ISOC, cannot do it if error in convert */
2306 if (sbt == VT_FLOAT)
2307 vtop->c.ld = vtop->c.f;
2308 else if (sbt == VT_DOUBLE)
2309 vtop->c.ld = vtop->c.d;
2311 if (df) {
2312 if ((sbt & VT_BTYPE) == VT_LLONG) {
2313 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2314 vtop->c.ld = vtop->c.i;
2315 else
2316 vtop->c.ld = -(long double)-vtop->c.i;
2317 } else if(!sf) {
2318 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2319 vtop->c.ld = (uint32_t)vtop->c.i;
2320 else
2321 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2324 if (dbt == VT_FLOAT)
2325 vtop->c.f = (float)vtop->c.ld;
2326 else if (dbt == VT_DOUBLE)
2327 vtop->c.d = (double)vtop->c.ld;
2328 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2329 vtop->c.i = vtop->c.ld;
2330 } else if (sf && dbt == VT_BOOL) {
2331 vtop->c.i = (vtop->c.ld != 0);
2332 } else {
2333 if(sf)
2334 vtop->c.i = vtop->c.ld;
2335 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2337 else if (sbt & VT_UNSIGNED)
2338 vtop->c.i = (uint32_t)vtop->c.i;
2339 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2340 else if (sbt == VT_PTR)
2342 #endif
2343 else if (sbt != VT_LLONG)
2344 vtop->c.i = ((uint32_t)vtop->c.i |
2345 -(vtop->c.i & 0x80000000));
2347 if (dbt == (VT_LLONG|VT_UNSIGNED))
2349 else if (dbt == VT_BOOL)
2350 vtop->c.i = (vtop->c.i != 0);
2351 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2352 else if (dbt == VT_PTR)
2354 #endif
2355 else if (dbt != VT_LLONG) {
2356 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2357 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2358 0xffffffff);
2359 vtop->c.i &= m;
2360 if (!(dbt & VT_UNSIGNED))
2361 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2364 } else if (p && dbt == VT_BOOL) {
2365 vtop->r = VT_CONST;
2366 vtop->c.i = 1;
2367 } else {
2368 /* non constant case: generate code */
2369 if (sf && df) {
2370 /* convert from fp to fp */
2371 gen_cvt_ftof(dbt);
2372 } else if (df) {
2373 /* convert int to fp */
2374 gen_cvt_itof1(dbt);
2375 } else if (sf) {
2376 /* convert fp to int */
2377 if (dbt == VT_BOOL) {
2378 vpushi(0);
2379 gen_op(TOK_NE);
2380 } else {
2381 /* we handle char/short/etc... with generic code */
2382 if (dbt != (VT_INT | VT_UNSIGNED) &&
2383 dbt != (VT_LLONG | VT_UNSIGNED) &&
2384 dbt != VT_LLONG)
2385 dbt = VT_INT;
2386 gen_cvt_ftoi1(dbt);
2387 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2388 /* additional cast for char/short... */
2389 vtop->type.t = dbt;
2390 gen_cast(type);
2393 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2394 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2395 if ((sbt & VT_BTYPE) != VT_LLONG) {
2396 /* scalar to long long */
2397 /* machine independent conversion */
2398 gv(RC_INT);
2399 /* generate high word */
2400 if (sbt == (VT_INT | VT_UNSIGNED)) {
2401 vpushi(0);
2402 gv(RC_INT);
2403 } else {
2404 if (sbt == VT_PTR) {
2405 /* cast from pointer to int before we apply
2406 shift operation, which pointers don't support*/
2407 gen_cast(&int_type);
2409 gv_dup();
2410 vpushi(31);
2411 gen_op(TOK_SAR);
2413 /* patch second register */
2414 vtop[-1].r2 = vtop->r;
2415 vpop();
2417 #else
2418 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2419 (dbt & VT_BTYPE) == VT_PTR ||
2420 (dbt & VT_BTYPE) == VT_FUNC) {
2421 if ((sbt & VT_BTYPE) != VT_LLONG &&
2422 (sbt & VT_BTYPE) != VT_PTR &&
2423 (sbt & VT_BTYPE) != VT_FUNC) {
2424 /* need to convert from 32bit to 64bit */
2425 gv(RC_INT);
2426 if (sbt != (VT_INT | VT_UNSIGNED)) {
2427 #if defined(TCC_TARGET_ARM64)
2428 gen_cvt_sxtw();
2429 #elif defined(TCC_TARGET_X86_64)
2430 int r = gv(RC_INT);
2431 /* x86_64 specific: movslq */
2432 o(0x6348);
2433 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2434 #else
2435 #error
2436 #endif
2439 #endif
2440 } else if (dbt == VT_BOOL) {
2441 /* scalar to bool */
2442 vpushi(0);
2443 gen_op(TOK_NE);
2444 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2445 (dbt & VT_BTYPE) == VT_SHORT) {
2446 if (sbt == VT_PTR) {
2447 vtop->type.t = VT_INT;
2448 tcc_warning("nonportable conversion from pointer to char/short");
2450 force_charshort_cast(dbt);
2451 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2452 } else if ((dbt & VT_BTYPE) == VT_INT) {
2453 /* scalar to int */
2454 if ((sbt & VT_BTYPE) == VT_LLONG) {
2455 /* from long long: just take low order word */
2456 lexpand();
2457 vpop();
2459 /* if lvalue and single word type, nothing to do because
2460 the lvalue already contains the real type size (see
2461 VT_LVAL_xxx constants) */
2462 #endif
2465 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2466 /* if we are casting between pointer types,
2467 we must update the VT_LVAL_xxx size */
2468 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2469 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2471 vtop->type = *type;
2474 /* return type size as known at compile time. Put alignment at 'a' */
2475 ST_FUNC int type_size(CType *type, int *a)
2477 Sym *s;
2478 int bt;
2480 bt = type->t & VT_BTYPE;
2481 if (bt == VT_STRUCT) {
2482 /* struct/union */
2483 s = type->ref;
2484 *a = s->r;
2485 return s->c;
2486 } else if (bt == VT_PTR) {
2487 if (type->t & VT_ARRAY) {
2488 int ts;
2490 s = type->ref;
2491 ts = type_size(&s->type, a);
2493 if (ts < 0 && s->c < 0)
2494 ts = -ts;
2496 return ts * s->c;
2497 } else {
2498 *a = PTR_SIZE;
2499 return PTR_SIZE;
2501 } else if (bt == VT_LDOUBLE) {
2502 *a = LDOUBLE_ALIGN;
2503 return LDOUBLE_SIZE;
2504 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2505 #ifdef TCC_TARGET_I386
2506 #ifdef TCC_TARGET_PE
2507 *a = 8;
2508 #else
2509 *a = 4;
2510 #endif
2511 #elif defined(TCC_TARGET_ARM)
2512 #ifdef TCC_ARM_EABI
2513 *a = 8;
2514 #else
2515 *a = 4;
2516 #endif
2517 #else
2518 *a = 8;
2519 #endif
2520 return 8;
2521 } else if (bt == VT_INT || bt == VT_FLOAT) {
2522 *a = 4;
2523 return 4;
2524 } else if (bt == VT_SHORT) {
2525 *a = 2;
2526 return 2;
2527 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2528 *a = 8;
2529 return 16;
2530 } else if (bt == VT_ENUM) {
2531 *a = 4;
2532 /* Enums might be incomplete, so don't just return '4' here. */
2533 return type->ref->c;
2534 } else {
2535 /* char, void, function, _Bool */
2536 *a = 1;
2537 return 1;
2541 /* push type size as known at runtime time on top of value stack. Put
2542 alignment at 'a' */
2543 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2545 if (type->t & VT_VLA) {
2546 type_size(&type->ref->type, a);
2547 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2548 } else {
2549 vpushi(type_size(type, a));
2553 static void vla_sp_restore(void) {
2554 if (vlas_in_scope) {
2555 gen_vla_sp_restore(vla_sp_loc);
2559 static void vla_sp_restore_root(void) {
2560 if (vlas_in_scope) {
2561 gen_vla_sp_restore(vla_sp_root_loc);
2565 /* return the pointed type of t */
2566 static inline CType *pointed_type(CType *type)
2568 return &type->ref->type;
2571 /* modify type so that its it is a pointer to type. */
2572 ST_FUNC void mk_pointer(CType *type)
2574 Sym *s;
2575 s = sym_push(SYM_FIELD, type, 0, -1);
2576 type->t = VT_PTR | (type->t & ~VT_TYPE);
2577 type->ref = s;
2580 /* compare function types. OLD functions match any new functions */
2581 static int is_compatible_func(CType *type1, CType *type2)
2583 Sym *s1, *s2;
2585 s1 = type1->ref;
2586 s2 = type2->ref;
2587 if (!is_compatible_types(&s1->type, &s2->type))
2588 return 0;
2589 /* check func_call */
2590 if (s1->a.func_call != s2->a.func_call)
2591 return 0;
2592 /* XXX: not complete */
2593 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2594 return 1;
2595 if (s1->c != s2->c)
2596 return 0;
2597 while (s1 != NULL) {
2598 if (s2 == NULL)
2599 return 0;
2600 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2601 return 0;
2602 s1 = s1->next;
2603 s2 = s2->next;
2605 if (s2)
2606 return 0;
2607 return 1;
2610 /* return true if type1 and type2 are the same. If unqualified is
2611 true, qualifiers on the types are ignored.
2613 - enums are not checked as gcc __builtin_types_compatible_p ()
2615 static int compare_types(CType *type1, CType *type2, int unqualified)
2617 int bt1, t1, t2;
2619 t1 = type1->t & VT_TYPE;
2620 t2 = type2->t & VT_TYPE;
2621 if (unqualified) {
2622 /* strip qualifiers before comparing */
2623 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2624 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2626 /* Default Vs explicit signedness only matters for char */
2627 if ((t1 & VT_BTYPE) != VT_BYTE) {
2628 t1 &= ~VT_DEFSIGN;
2629 t2 &= ~VT_DEFSIGN;
2631 /* An enum is compatible with (unsigned) int. Ideally we would
2632 store the enums signedness in type->ref.a.<some_bit> and
2633 only accept unsigned enums with unsigned int and vice versa.
2634 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2635 from pointer target types, so we can't add it here either. */
2636 if ((t1 & VT_BTYPE) == VT_ENUM) {
2637 t1 = VT_INT;
2638 if (type1->ref->a.unsigned_enum)
2639 t1 |= VT_UNSIGNED;
2641 if ((t2 & VT_BTYPE) == VT_ENUM) {
2642 t2 = VT_INT;
2643 if (type2->ref->a.unsigned_enum)
2644 t2 |= VT_UNSIGNED;
2646 /* XXX: bitfields ? */
2647 if (t1 != t2)
2648 return 0;
2649 /* test more complicated cases */
2650 bt1 = t1 & VT_BTYPE;
2651 if (bt1 == VT_PTR) {
2652 type1 = pointed_type(type1);
2653 type2 = pointed_type(type2);
2654 return is_compatible_types(type1, type2);
2655 } else if (bt1 == VT_STRUCT) {
2656 return (type1->ref == type2->ref);
2657 } else if (bt1 == VT_FUNC) {
2658 return is_compatible_func(type1, type2);
2659 } else {
2660 return 1;
2664 /* return true if type1 and type2 are exactly the same (including
2665 qualifiers).
2667 static int is_compatible_types(CType *type1, CType *type2)
2669 return compare_types(type1,type2,0);
2672 /* return true if type1 and type2 are the same (ignoring qualifiers).
2674 static int is_compatible_parameter_types(CType *type1, CType *type2)
2676 return compare_types(type1,type2,1);
2679 /* print a type. If 'varstr' is not NULL, then the variable is also
2680 printed in the type */
2681 /* XXX: union */
2682 /* XXX: add array and function pointers */
2683 static void type_to_str(char *buf, int buf_size,
2684 CType *type, const char *varstr)
2686 int bt, v, t;
2687 Sym *s, *sa;
2688 char buf1[256];
2689 const char *tstr;
2691 t = type->t & VT_TYPE;
2692 bt = t & VT_BTYPE;
2693 buf[0] = '\0';
2694 if (t & VT_CONSTANT)
2695 pstrcat(buf, buf_size, "const ");
2696 if (t & VT_VOLATILE)
2697 pstrcat(buf, buf_size, "volatile ");
2698 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2699 pstrcat(buf, buf_size, "unsigned ");
2700 else if (t & VT_DEFSIGN)
2701 pstrcat(buf, buf_size, "signed ");
2702 switch(bt) {
2703 case VT_VOID:
2704 tstr = "void";
2705 goto add_tstr;
2706 case VT_BOOL:
2707 tstr = "_Bool";
2708 goto add_tstr;
2709 case VT_BYTE:
2710 tstr = "char";
2711 goto add_tstr;
2712 case VT_SHORT:
2713 tstr = "short";
2714 goto add_tstr;
2715 case VT_INT:
2716 tstr = "int";
2717 goto add_tstr;
2718 case VT_LONG:
2719 tstr = "long";
2720 goto add_tstr;
2721 case VT_LLONG:
2722 tstr = "long long";
2723 goto add_tstr;
2724 case VT_FLOAT:
2725 tstr = "float";
2726 goto add_tstr;
2727 case VT_DOUBLE:
2728 tstr = "double";
2729 goto add_tstr;
2730 case VT_LDOUBLE:
2731 tstr = "long double";
2732 add_tstr:
2733 pstrcat(buf, buf_size, tstr);
2734 break;
2735 case VT_ENUM:
2736 case VT_STRUCT:
2737 if (bt == VT_STRUCT)
2738 tstr = "struct ";
2739 else
2740 tstr = "enum ";
2741 pstrcat(buf, buf_size, tstr);
2742 v = type->ref->v & ~SYM_STRUCT;
2743 if (v >= SYM_FIRST_ANOM)
2744 pstrcat(buf, buf_size, "<anonymous>");
2745 else
2746 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2747 break;
2748 case VT_FUNC:
2749 s = type->ref;
2750 type_to_str(buf, buf_size, &s->type, varstr);
2751 pstrcat(buf, buf_size, "(");
2752 sa = s->next;
2753 while (sa != NULL) {
2754 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2755 pstrcat(buf, buf_size, buf1);
2756 sa = sa->next;
2757 if (sa)
2758 pstrcat(buf, buf_size, ", ");
2760 pstrcat(buf, buf_size, ")");
2761 goto no_var;
2762 case VT_PTR:
2763 s = type->ref;
2764 if (t & VT_ARRAY) {
2765 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2766 type_to_str(buf, buf_size, &s->type, buf1);
2767 goto no_var;
2769 pstrcpy(buf1, sizeof(buf1), "*");
2770 if (t & VT_CONSTANT)
2771 pstrcat(buf1, buf_size, "const ");
2772 if (t & VT_VOLATILE)
2773 pstrcat(buf1, buf_size, "volatile ");
2774 if (varstr)
2775 pstrcat(buf1, sizeof(buf1), varstr);
2776 type_to_str(buf, buf_size, &s->type, buf1);
2777 goto no_var;
2779 if (varstr) {
2780 pstrcat(buf, buf_size, " ");
2781 pstrcat(buf, buf_size, varstr);
2783 no_var: ;
2786 /* verify type compatibility to store vtop in 'dt' type, and generate
2787 casts if needed. */
2788 static void gen_assign_cast(CType *dt)
2790 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2791 char buf1[256], buf2[256];
2792 int dbt, sbt;
2794 st = &vtop->type; /* source type */
2795 dbt = dt->t & VT_BTYPE;
2796 sbt = st->t & VT_BTYPE;
2797 if (sbt == VT_VOID || dbt == VT_VOID) {
2798 if (sbt == VT_VOID && dbt == VT_VOID)
2799 ; /*
2800 It is Ok if both are void
2801 A test program:
2802 void func1() {}
2803 void func2() {
2804 return func1();
2806 gcc accepts this program
2808 else
2809 tcc_error("cannot cast from/to void");
2811 if (dt->t & VT_CONSTANT)
2812 tcc_warning("assignment of read-only location");
2813 switch(dbt) {
2814 case VT_PTR:
2815 /* special cases for pointers */
2816 /* '0' can also be a pointer */
2817 if (is_null_pointer(vtop))
2818 goto type_ok;
2819 /* accept implicit pointer to integer cast with warning */
2820 if (is_integer_btype(sbt)) {
2821 tcc_warning("assignment makes pointer from integer without a cast");
2822 goto type_ok;
2824 type1 = pointed_type(dt);
2825 /* a function is implicitely a function pointer */
2826 if (sbt == VT_FUNC) {
2827 if ((type1->t & VT_BTYPE) != VT_VOID &&
2828 !is_compatible_types(pointed_type(dt), st))
2829 tcc_warning("assignment from incompatible pointer type");
2830 goto type_ok;
2832 if (sbt != VT_PTR)
2833 goto error;
2834 type2 = pointed_type(st);
2835 if ((type1->t & VT_BTYPE) == VT_VOID ||
2836 (type2->t & VT_BTYPE) == VT_VOID) {
2837 /* void * can match anything */
2838 } else {
2839 /* exact type match, except for qualifiers */
2840 tmp_type1 = *type1;
2841 tmp_type2 = *type2;
2842 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2843 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2844 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2845 /* Like GCC don't warn by default for merely changes
2846 in pointer target signedness. Do warn for different
2847 base types, though, in particular for unsigned enums
2848 and signed int targets. */
2849 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2850 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2851 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2853 else
2854 tcc_warning("assignment from incompatible pointer type");
2857 /* check const and volatile */
2858 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2859 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2860 tcc_warning("assignment discards qualifiers from pointer target type");
2861 break;
2862 case VT_BYTE:
2863 case VT_SHORT:
2864 case VT_INT:
2865 case VT_LLONG:
2866 if (sbt == VT_PTR || sbt == VT_FUNC) {
2867 tcc_warning("assignment makes integer from pointer without a cast");
2868 } else if (sbt == VT_STRUCT) {
2869 goto case_VT_STRUCT;
2871 /* XXX: more tests */
2872 break;
2873 case VT_STRUCT:
2874 case_VT_STRUCT:
2875 tmp_type1 = *dt;
2876 tmp_type2 = *st;
2877 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2878 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2879 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2880 error:
2881 type_to_str(buf1, sizeof(buf1), st, NULL);
2882 type_to_str(buf2, sizeof(buf2), dt, NULL);
2883 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2885 break;
2887 type_ok:
2888 gen_cast(dt);
2891 /* store vtop in lvalue pushed on stack */
2892 ST_FUNC void vstore(void)
2894 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2896 ft = vtop[-1].type.t;
2897 sbt = vtop->type.t & VT_BTYPE;
2898 dbt = ft & VT_BTYPE;
2899 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2900 (sbt == VT_INT && dbt == VT_SHORT))
2901 && !(vtop->type.t & VT_BITFIELD)) {
2902 /* optimize char/short casts */
2903 delayed_cast = VT_MUSTCAST;
2904 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2905 ((1 << VT_STRUCT_SHIFT) - 1));
2906 /* XXX: factorize */
2907 if (ft & VT_CONSTANT)
2908 tcc_warning("assignment of read-only location");
2909 } else {
2910 delayed_cast = 0;
2911 if (!(ft & VT_BITFIELD))
2912 gen_assign_cast(&vtop[-1].type);
2915 if (sbt == VT_STRUCT) {
2916 /* if structure, only generate pointer */
2917 /* structure assignment : generate memcpy */
2918 /* XXX: optimize if small size */
2919 size = type_size(&vtop->type, &align);
2921 /* destination */
2922 vswap();
2923 vtop->type.t = VT_PTR;
2924 gaddrof();
2926 /* address of memcpy() */
2927 #ifdef TCC_ARM_EABI
2928 if(!(align & 7))
2929 vpush_global_sym(&func_old_type, TOK_memcpy8);
2930 else if(!(align & 3))
2931 vpush_global_sym(&func_old_type, TOK_memcpy4);
2932 else
2933 #endif
2934 /* Use memmove, rather than memcpy, as dest and src may be same: */
2935 vpush_global_sym(&func_old_type, TOK_memmove);
2937 vswap();
2938 /* source */
2939 vpushv(vtop - 2);
2940 vtop->type.t = VT_PTR;
2941 gaddrof();
2942 /* type size */
2943 vpushi(size);
2944 gfunc_call(3);
2946 /* leave source on stack */
2947 } else if (ft & VT_BITFIELD) {
2948 /* bitfield store handling */
2950 /* save lvalue as expression result (example: s.b = s.a = n;) */
2951 vdup(), vtop[-1] = vtop[-2];
2953 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2954 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2955 /* remove bit field info to avoid loops */
2956 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2958 if((ft & VT_BTYPE) == VT_BOOL) {
2959 gen_cast(&vtop[-1].type);
2960 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2963 /* duplicate destination */
2964 vdup();
2965 vtop[-1] = vtop[-2];
2967 /* mask and shift source */
2968 if((ft & VT_BTYPE) != VT_BOOL) {
2969 if((ft & VT_BTYPE) == VT_LLONG) {
2970 vpushll((1ULL << bit_size) - 1ULL);
2971 } else {
2972 vpushi((1 << bit_size) - 1);
2974 gen_op('&');
2976 vpushi(bit_pos);
2977 gen_op(TOK_SHL);
2978 /* load destination, mask and or with source */
2979 vswap();
2980 if((ft & VT_BTYPE) == VT_LLONG) {
2981 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2982 } else {
2983 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2985 gen_op('&');
2986 gen_op('|');
2987 /* store result */
2988 vstore();
2989 /* ... and discard */
2990 vpop();
2992 } else {
2993 #ifdef CONFIG_TCC_BCHECK
2994 /* bound check case */
2995 if (vtop[-1].r & VT_MUSTBOUND) {
2996 vswap();
2997 gbound();
2998 vswap();
3000 #endif
3001 rc = RC_INT;
3002 if (is_float(ft)) {
3003 rc = RC_FLOAT;
3004 #ifdef TCC_TARGET_X86_64
3005 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3006 rc = RC_ST0;
3007 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3008 rc = RC_FRET;
3010 #endif
3012 r = gv(rc); /* generate value */
3013 /* if lvalue was saved on stack, must read it */
3014 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3015 SValue sv;
3016 t = get_reg(RC_INT);
3017 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3018 sv.type.t = VT_PTR;
3019 #else
3020 sv.type.t = VT_INT;
3021 #endif
3022 sv.r = VT_LOCAL | VT_LVAL;
3023 sv.c.i = vtop[-1].c.i;
3024 load(t, &sv);
3025 vtop[-1].r = t | VT_LVAL;
3027 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3028 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3029 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3030 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3031 #else
3032 if ((ft & VT_BTYPE) == VT_LLONG) {
3033 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3034 #endif
3035 vtop[-1].type.t = load_type;
3036 store(r, vtop - 1);
3037 vswap();
3038 /* convert to int to increment easily */
3039 vtop->type.t = addr_type;
3040 gaddrof();
3041 vpushi(load_size);
3042 gen_op('+');
3043 vtop->r |= VT_LVAL;
3044 vswap();
3045 vtop[-1].type.t = load_type;
3046 /* XXX: it works because r2 is spilled last ! */
3047 store(vtop->r2, vtop - 1);
3048 } else {
3049 store(r, vtop - 1);
3052 vswap();
3053 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3054 vtop->r |= delayed_cast;
3058 /* post defines POST/PRE add. c is the token ++ or -- */
3059 ST_FUNC void inc(int post, int c)
3061 test_lvalue();
3062 vdup(); /* save lvalue */
3063 if (post) {
3064 gv_dup(); /* duplicate value */
3065 vrotb(3);
3066 vrotb(3);
3068 /* add constant */
3069 vpushi(c - TOK_MID);
3070 gen_op('+');
3071 vstore(); /* store value */
3072 if (post)
3073 vpop(); /* if post op, return saved value */
3076 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3078 /* read the string */
3079 if (tok != TOK_STR)
3080 expect(msg);
3081 cstr_new(astr);
3082 while (tok == TOK_STR) {
3083 /* XXX: add \0 handling too ? */
3084 cstr_cat(astr, tokc.str.data, -1);
3085 next();
3087 cstr_ccat(astr, '\0');
3090 /* If I is >= 1 and a power of two, returns log2(i)+1.
3091 If I is 0 returns 0. */
3092 static int exact_log2p1(int i)
3094 int ret;
3095 if (!i)
3096 return 0;
3097 for (ret = 1; i >= 1 << 8; ret += 8)
3098 i >>= 8;
3099 if (i >= 1 << 4)
3100 ret += 4, i >>= 4;
3101 if (i >= 1 << 2)
3102 ret += 2, i >>= 2;
3103 if (i >= 1 << 1)
3104 ret++;
3105 return ret;
3108 /* Parse GNUC __attribute__ extension. Currently, the following
3109 extensions are recognized:
3110 - aligned(n) : set data/function alignment.
3111 - packed : force data alignment to 1
3112 - section(x) : generate data/code in this section.
3113 - unused : currently ignored, but may be used someday.
3114 - regparm(n) : pass function parameters in registers (i386 only)
3116 static void parse_attribute(AttributeDef *ad)
3118 int t, n;
3119 CString astr;
3121 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3122 next();
3123 skip('(');
3124 skip('(');
3125 while (tok != ')') {
3126 if (tok < TOK_IDENT)
3127 expect("attribute name");
3128 t = tok;
3129 next();
3130 switch(t) {
3131 case TOK_SECTION1:
3132 case TOK_SECTION2:
3133 skip('(');
3134 parse_mult_str(&astr, "section name");
3135 ad->section = find_section(tcc_state, (char *)astr.data);
3136 skip(')');
3137 cstr_free(&astr);
3138 break;
3139 case TOK_ALIAS1:
3140 case TOK_ALIAS2:
3141 skip('(');
3142 parse_mult_str(&astr, "alias(\"target\")");
3143 ad->alias_target = /* save string as token, for later */
3144 tok_alloc((char*)astr.data, astr.size-1)->tok;
3145 skip(')');
3146 cstr_free(&astr);
3147 break;
3148 case TOK_VISIBILITY1:
3149 case TOK_VISIBILITY2:
3150 skip('(');
3151 parse_mult_str(&astr,
3152 "visibility(\"default|hidden|internal|protected\")");
3153 if (!strcmp (astr.data, "default"))
3154 ad->a.visibility = STV_DEFAULT;
3155 else if (!strcmp (astr.data, "hidden"))
3156 ad->a.visibility = STV_HIDDEN;
3157 else if (!strcmp (astr.data, "internal"))
3158 ad->a.visibility = STV_INTERNAL;
3159 else if (!strcmp (astr.data, "protected"))
3160 ad->a.visibility = STV_PROTECTED;
3161 else
3162 expect("visibility(\"default|hidden|internal|protected\")");
3163 skip(')');
3164 cstr_free(&astr);
3165 break;
3166 case TOK_ALIGNED1:
3167 case TOK_ALIGNED2:
3168 if (tok == '(') {
3169 next();
3170 n = expr_const();
3171 if (n <= 0 || (n & (n - 1)) != 0)
3172 tcc_error("alignment must be a positive power of two");
3173 skip(')');
3174 } else {
3175 n = MAX_ALIGN;
3177 ad->a.aligned = exact_log2p1(n);
3178 if (n != 1 << (ad->a.aligned - 1))
3179 tcc_error("alignment of %d is larger than implemented", n);
3180 break;
3181 case TOK_PACKED1:
3182 case TOK_PACKED2:
3183 ad->a.packed = 1;
3184 break;
3185 case TOK_WEAK1:
3186 case TOK_WEAK2:
3187 ad->a.weak = 1;
3188 break;
3189 case TOK_UNUSED1:
3190 case TOK_UNUSED2:
3191 /* currently, no need to handle it because tcc does not
3192 track unused objects */
3193 break;
3194 case TOK_NORETURN1:
3195 case TOK_NORETURN2:
3196 /* currently, no need to handle it because tcc does not
3197 track unused objects */
3198 break;
3199 case TOK_CDECL1:
3200 case TOK_CDECL2:
3201 case TOK_CDECL3:
3202 ad->a.func_call = FUNC_CDECL;
3203 break;
3204 case TOK_STDCALL1:
3205 case TOK_STDCALL2:
3206 case TOK_STDCALL3:
3207 ad->a.func_call = FUNC_STDCALL;
3208 break;
3209 #ifdef TCC_TARGET_I386
3210 case TOK_REGPARM1:
3211 case TOK_REGPARM2:
3212 skip('(');
3213 n = expr_const();
3214 if (n > 3)
3215 n = 3;
3216 else if (n < 0)
3217 n = 0;
3218 if (n > 0)
3219 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3220 skip(')');
3221 break;
3222 case TOK_FASTCALL1:
3223 case TOK_FASTCALL2:
3224 case TOK_FASTCALL3:
3225 ad->a.func_call = FUNC_FASTCALLW;
3226 break;
3227 #endif
3228 case TOK_MODE:
3229 skip('(');
3230 switch(tok) {
3231 case TOK_MODE_DI:
3232 ad->a.mode = VT_LLONG + 1;
3233 break;
3234 case TOK_MODE_QI:
3235 ad->a.mode = VT_BYTE + 1;
3236 break;
3237 case TOK_MODE_HI:
3238 ad->a.mode = VT_SHORT + 1;
3239 break;
3240 case TOK_MODE_SI:
3241 case TOK_MODE_word:
3242 ad->a.mode = VT_INT + 1;
3243 break;
3244 default:
3245 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3246 break;
3248 next();
3249 skip(')');
3250 break;
3251 case TOK_DLLEXPORT:
3252 ad->a.func_export = 1;
3253 break;
3254 case TOK_DLLIMPORT:
3255 ad->a.func_import = 1;
3256 break;
3257 default:
3258 if (tcc_state->warn_unsupported)
3259 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3260 /* skip parameters */
3261 if (tok == '(') {
3262 int parenthesis = 0;
3263 do {
3264 if (tok == '(')
3265 parenthesis++;
3266 else if (tok == ')')
3267 parenthesis--;
3268 next();
3269 } while (parenthesis && tok != -1);
3271 break;
3273 if (tok != ',')
3274 break;
3275 next();
3277 skip(')');
3278 skip(')');
3282 static Sym * find_field (CType *type, int v)
3284 Sym *s = type->ref;
3285 v |= SYM_FIELD;
3286 while ((s = s->next) != NULL) {
3287 if ((s->v & SYM_FIELD) &&
3288 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3289 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3290 Sym *ret = find_field (&s->type, v);
3291 if (ret)
3292 return ret;
3294 if (s->v == v)
3295 break;
3297 return s;
3300 static void struct_add_offset (Sym *s, int offset)
3302 while ((s = s->next) != NULL) {
3303 if ((s->v & SYM_FIELD) &&
3304 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3305 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3306 struct_add_offset(s->type.ref, offset);
3307 } else
3308 s->c += offset;
3312 static void struct_layout(CType *type, AttributeDef *ad)
3314 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3315 int pcc = !tcc_state->ms_bitfields;
3316 Sym *f;
3317 if (ad->a.aligned)
3318 maxalign = 1 << (ad->a.aligned - 1);
3319 else
3320 maxalign = 1;
3321 offset = 0;
3322 c = 0;
3323 bit_pos = 0;
3324 prevbt = VT_STRUCT; /* make it never match */
3325 prev_bit_size = 0;
3326 for (f = type->ref->next; f; f = f->next) {
3327 int typealign, bit_size;
3328 int size = type_size(&f->type, &typealign);
3329 if (f->type.t & VT_BITFIELD)
3330 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3331 else
3332 bit_size = -1;
3333 if (bit_size == 0 && pcc) {
3334 /* Zero-width bit-fields in PCC mode aren't affected
3335 by any packing (attribute or pragma). */
3336 align = typealign;
3337 } else if (f->r > 1) {
3338 align = f->r;
3339 } else if (ad->a.packed || f->r == 1) {
3340 align = 1;
3341 /* Packed fields or packed records don't let the base type
3342 influence the records type alignment. */
3343 typealign = 1;
3344 } else {
3345 align = typealign;
3347 if (type->ref->type.t != TOK_STRUCT) {
3348 if (pcc && bit_size >= 0)
3349 size = (bit_size + 7) >> 3;
3350 /* Bit position is already zero from our caller. */
3351 offset = 0;
3352 if (size > c)
3353 c = size;
3354 } else if (bit_size < 0) {
3355 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3356 prevbt = VT_STRUCT;
3357 prev_bit_size = 0;
3358 c = (c + addbytes + align - 1) & -align;
3359 offset = c;
3360 if (size > 0)
3361 c += size;
3362 bit_pos = 0;
3363 } else {
3364 /* A bit-field. Layout is more complicated. There are two
3365 options TCC implements: PCC compatible and MS compatible
3366 (PCC compatible is what GCC uses for almost all targets).
3367 In PCC layout the overall size of the struct (in c) is
3368 _excluding_ the current run of bit-fields (that is,
3369 there's at least additional bit_pos bits after c). In
3370 MS layout c does include the current run of bit-fields.
3372 This matters for calculating the natural alignment buckets
3373 in PCC mode. */
3375 /* 'align' will be used to influence records alignment,
3376 so it's the max of specified and type alignment, except
3377 in certain cases that depend on the mode. */
3378 if (align < typealign)
3379 align = typealign;
3380 if (pcc) {
3381 /* In PCC layout a non-packed bit-field is placed adjacent
3382 to the preceding bit-fields, except if it would overflow
3383 its container (depending on base type) or it's a zero-width
3384 bit-field. Packed non-zero-width bit-fields always are
3385 placed adjacent. */
3386 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3387 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3388 if (bit_size == 0 ||
3389 ((typealign != 1 || size == 1) &&
3390 (ofs2 / (typealign * 8)) > (size/typealign))) {
3391 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3392 bit_pos = 0;
3393 } else while (bit_pos + bit_size > size * 8) {
3394 c += size;
3395 bit_pos -= size * 8;
3397 offset = c;
3398 /* In PCC layout named bit-fields influence the alignment
3399 of the containing struct using the base types alignment,
3400 except for packed fields (which here have correct
3401 align/typealign). */
3402 if ((f->v & SYM_FIRST_ANOM))
3403 align = 1;
3404 } else {
3405 bt = f->type.t & VT_BTYPE;
3406 if ((bit_pos + bit_size > size * 8) ||
3407 (bit_size > 0) == (bt != prevbt)) {
3408 c = (c + typealign - 1) & -typealign;
3409 offset = c;
3410 bit_pos = 0;
3411 /* In MS bitfield mode a bit-field run always uses
3412 at least as many bits as the underlying type.
3413 To start a new run it's also required that this
3414 or the last bit-field had non-zero width. */
3415 if (bit_size || prev_bit_size)
3416 c += size;
3418 /* In MS layout the records alignment is normally
3419 influenced by the field, except for a zero-width
3420 field at the start of a run (but by further zero-width
3421 fields it is again). */
3422 if (bit_size == 0 && prevbt != bt)
3423 align = 1;
3424 prevbt = bt;
3425 prev_bit_size = bit_size;
3427 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3428 | (bit_pos << VT_STRUCT_SHIFT);
3429 bit_pos += bit_size;
3430 if (pcc && bit_pos >= size * 8) {
3431 c += size;
3432 bit_pos -= size * 8;
3435 if (align > maxalign)
3436 maxalign = align;
3437 #if 0
3438 printf("set field %s offset=%d c=%d",
3439 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3440 if (f->type.t & VT_BITFIELD) {
3441 printf(" pos=%d size=%d",
3442 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3443 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3445 printf("\n");
3446 #endif
3448 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3449 Sym *ass;
3450 /* An anonymous struct/union. Adjust member offsets
3451 to reflect the real offset of our containing struct.
3452 Also set the offset of this anon member inside
3453 the outer struct to be zero. Via this it
3454 works when accessing the field offset directly
3455 (from base object), as well as when recursing
3456 members in initializer handling. */
3457 int v2 = f->type.ref->v;
3458 if (!(v2 & SYM_FIELD) &&
3459 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3460 Sym **pps;
3461 /* This happens only with MS extensions. The
3462 anon member has a named struct type, so it
3463 potentially is shared with other references.
3464 We need to unshare members so we can modify
3465 them. */
3466 ass = f->type.ref;
3467 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3468 &f->type.ref->type, 0,
3469 f->type.ref->c);
3470 pps = &f->type.ref->next;
3471 while ((ass = ass->next) != NULL) {
3472 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3473 pps = &((*pps)->next);
3475 *pps = NULL;
3477 struct_add_offset(f->type.ref, offset);
3478 f->c = 0;
3479 } else {
3480 f->c = offset;
3483 f->r = 0;
3485 /* store size and alignment */
3486 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3487 + maxalign - 1) & -maxalign;
3488 type->ref->r = maxalign;
3491 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3492 static void struct_decl(CType *type, AttributeDef *ad, int u)
3494 int a, v, size, align, flexible, alignoverride;
3495 long c;
3496 int bit_size, bsize, bt;
3497 Sym *s, *ss, **ps;
3498 AttributeDef ad1;
3499 CType type1, btype;
3501 a = tok; /* save decl type */
3502 next();
3503 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3504 parse_attribute(ad);
3505 if (tok != '{') {
3506 v = tok;
3507 next();
3508 /* struct already defined ? return it */
3509 if (v < TOK_IDENT)
3510 expect("struct/union/enum name");
3511 s = struct_find(v);
3512 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3513 if (s->type.t != a)
3514 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3515 goto do_decl;
3517 } else {
3518 v = anon_sym++;
3520 /* Record the original enum/struct/union token. */
3521 type1.t = a;
3522 type1.ref = NULL;
3523 /* we put an undefined size for struct/union */
3524 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3525 s->r = 0; /* default alignment is zero as gcc */
3526 /* put struct/union/enum name in type */
3527 do_decl:
3528 type->t = u;
3529 type->ref = s;
3531 if (tok == '{') {
3532 next();
3533 if (s->c != -1)
3534 tcc_error("struct/union/enum already defined");
3535 /* cannot be empty */
3536 c = 0;
3537 /* non empty enums are not allowed */
3538 if (a == TOK_ENUM) {
3539 int seen_neg = 0;
3540 int seen_wide = 0;
3541 for(;;) {
3542 CType *t = &int_type;
3543 v = tok;
3544 if (v < TOK_UIDENT)
3545 expect("identifier");
3546 ss = sym_find(v);
3547 if (ss && !local_stack)
3548 tcc_error("redefinition of enumerator '%s'",
3549 get_tok_str(v, NULL));
3550 next();
3551 if (tok == '=') {
3552 next();
3553 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3554 c = expr_const64();
3555 #else
3556 /* We really want to support long long enums
3557 on i386 as well, but the Sym structure only
3558 holds a 'long' for associated constants,
3559 and enlarging it would bump its size (no
3560 available padding). So punt for now. */
3561 c = expr_const();
3562 #endif
3564 if (c < 0)
3565 seen_neg = 1;
3566 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3567 seen_wide = 1, t = &size_type;
3568 /* enum symbols have static storage */
3569 ss = sym_push(v, t, VT_CONST, c);
3570 ss->type.t |= VT_STATIC;
3571 if (tok != ',')
3572 break;
3573 next();
3574 c++;
3575 /* NOTE: we accept a trailing comma */
3576 if (tok == '}')
3577 break;
3579 if (!seen_neg)
3580 s->a.unsigned_enum = 1;
3581 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3582 skip('}');
3583 } else {
3584 ps = &s->next;
3585 flexible = 0;
3586 while (tok != '}') {
3587 if (!parse_btype(&btype, &ad1)) {
3588 skip(';');
3589 continue;
3591 while (1) {
3592 if (flexible)
3593 tcc_error("flexible array member '%s' not at the end of struct",
3594 get_tok_str(v, NULL));
3595 bit_size = -1;
3596 v = 0;
3597 type1 = btype;
3598 if (tok != ':') {
3599 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3600 if (v == 0) {
3601 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3602 expect("identifier");
3603 else {
3604 int v = btype.ref->v;
3605 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3606 if (tcc_state->ms_extensions == 0)
3607 expect("identifier");
3611 if (type_size(&type1, &align) < 0) {
3612 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3613 flexible = 1;
3614 else
3615 tcc_error("field '%s' has incomplete type",
3616 get_tok_str(v, NULL));
3618 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3619 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3620 tcc_error("invalid type for '%s'",
3621 get_tok_str(v, NULL));
3623 if (tok == ':') {
3624 next();
3625 bit_size = expr_const();
3626 /* XXX: handle v = 0 case for messages */
3627 if (bit_size < 0)
3628 tcc_error("negative width in bit-field '%s'",
3629 get_tok_str(v, NULL));
3630 if (v && bit_size == 0)
3631 tcc_error("zero width for bit-field '%s'",
3632 get_tok_str(v, NULL));
3633 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3634 parse_attribute(&ad1);
3636 size = type_size(&type1, &align);
3637 /* Only remember non-default alignment. */
3638 alignoverride = 0;
3639 if (ad1.a.aligned) {
3640 int speca = 1 << (ad1.a.aligned - 1);
3641 alignoverride = speca;
3642 } else if (ad1.a.packed || ad->a.packed) {
3643 alignoverride = 1;
3644 } else if (*tcc_state->pack_stack_ptr) {
3645 if (align > *tcc_state->pack_stack_ptr)
3646 alignoverride = *tcc_state->pack_stack_ptr;
3648 if (bit_size >= 0) {
3649 bt = type1.t & VT_BTYPE;
3650 if (bt != VT_INT &&
3651 bt != VT_BYTE &&
3652 bt != VT_SHORT &&
3653 bt != VT_BOOL &&
3654 bt != VT_ENUM &&
3655 bt != VT_LLONG)
3656 tcc_error("bitfields must have scalar type");
3657 bsize = size * 8;
3658 if (bit_size > bsize) {
3659 tcc_error("width of '%s' exceeds its type",
3660 get_tok_str(v, NULL));
3661 } else if (bit_size == bsize) {
3662 /* no need for bit fields */
3664 } else {
3665 type1.t |= VT_BITFIELD |
3666 (0 << VT_STRUCT_SHIFT) |
3667 (bit_size << (VT_STRUCT_SHIFT + 6));
3670 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3671 /* Remember we've seen a real field to check
3672 for placement of flexible array member. */
3673 c = 1;
3675 /* If member is a struct or bit-field, enforce
3676 placing into the struct (as anonymous). */
3677 if (v == 0 &&
3678 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3679 bit_size >= 0)) {
3680 v = anon_sym++;
3682 if (v) {
3683 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3684 *ps = ss;
3685 ps = &ss->next;
3687 if (tok == ';' || tok == TOK_EOF)
3688 break;
3689 skip(',');
3691 skip(';');
3693 skip('}');
3694 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3695 parse_attribute(ad);
3696 struct_layout(type, ad);
3701 /* return 1 if basic type is a type size (short, long, long long) */
3702 ST_FUNC int is_btype_size(int bt)
3704 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3707 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3708 are added to the element type, copied because it could be a typedef. */
3709 static void parse_btype_qualify(CType *type, int qualifiers)
3711 while (type->t & VT_ARRAY) {
3712 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3713 type = &type->ref->type;
3715 type->t |= qualifiers;
3718 /* return 0 if no type declaration. otherwise, return the basic type
3719 and skip it.
3721 static int parse_btype(CType *type, AttributeDef *ad)
3723 int t, u, bt_size, complete, type_found, typespec_found, g;
3724 Sym *s;
3725 CType type1;
3727 memset(ad, 0, sizeof(AttributeDef));
3728 complete = 0;
3729 type_found = 0;
3730 typespec_found = 0;
3731 t = 0;
3732 while(1) {
3733 switch(tok) {
3734 case TOK_EXTENSION:
3735 /* currently, we really ignore extension */
3736 next();
3737 continue;
3739 /* basic types */
3740 case TOK_CHAR:
3741 u = VT_BYTE;
3742 basic_type:
3743 next();
3744 basic_type1:
3745 if (complete)
3746 tcc_error("too many basic types");
3747 t |= u;
3748 bt_size = is_btype_size (u & VT_BTYPE);
3749 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3750 complete = 1;
3751 typespec_found = 1;
3752 break;
3753 case TOK_VOID:
3754 u = VT_VOID;
3755 goto basic_type;
3756 case TOK_SHORT:
3757 u = VT_SHORT;
3758 goto basic_type;
3759 case TOK_INT:
3760 u = VT_INT;
3761 goto basic_type;
3762 case TOK_LONG:
3763 next();
3764 if ((t & VT_BTYPE) == VT_DOUBLE) {
3765 #ifndef TCC_TARGET_PE
3766 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3767 #endif
3768 } else if ((t & VT_BTYPE) == VT_LONG) {
3769 t = (t & ~VT_BTYPE) | VT_LLONG;
3770 } else {
3771 u = VT_LONG;
3772 goto basic_type1;
3774 break;
3775 #ifdef TCC_TARGET_ARM64
3776 case TOK_UINT128:
3777 /* GCC's __uint128_t appears in some Linux header files. Make it a
3778 synonym for long double to get the size and alignment right. */
3779 u = VT_LDOUBLE;
3780 goto basic_type;
3781 #endif
3782 case TOK_BOOL:
3783 u = VT_BOOL;
3784 goto basic_type;
3785 case TOK_FLOAT:
3786 u = VT_FLOAT;
3787 goto basic_type;
3788 case TOK_DOUBLE:
3789 next();
3790 if ((t & VT_BTYPE) == VT_LONG) {
3791 #ifdef TCC_TARGET_PE
3792 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3793 #else
3794 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3795 #endif
3796 } else {
3797 u = VT_DOUBLE;
3798 goto basic_type1;
3800 break;
3801 case TOK_ENUM:
3802 struct_decl(&type1, ad, VT_ENUM);
3803 basic_type2:
3804 u = type1.t;
3805 type->ref = type1.ref;
3806 goto basic_type1;
3807 case TOK_STRUCT:
3808 case TOK_UNION:
3809 struct_decl(&type1, ad, VT_STRUCT);
3810 goto basic_type2;
3812 /* type modifiers */
3813 case TOK_CONST1:
3814 case TOK_CONST2:
3815 case TOK_CONST3:
3816 type->t = t;
3817 parse_btype_qualify(type, VT_CONSTANT);
3818 t = type->t;
3819 next();
3820 break;
3821 case TOK_VOLATILE1:
3822 case TOK_VOLATILE2:
3823 case TOK_VOLATILE3:
3824 type->t = t;
3825 parse_btype_qualify(type, VT_VOLATILE);
3826 t = type->t;
3827 next();
3828 break;
3829 case TOK_SIGNED1:
3830 case TOK_SIGNED2:
3831 case TOK_SIGNED3:
3832 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3833 tcc_error("signed and unsigned modifier");
3834 typespec_found = 1;
3835 t |= VT_DEFSIGN;
3836 next();
3837 break;
3838 case TOK_REGISTER:
3839 case TOK_AUTO:
3840 case TOK_RESTRICT1:
3841 case TOK_RESTRICT2:
3842 case TOK_RESTRICT3:
3843 next();
3844 break;
3845 case TOK_UNSIGNED:
3846 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3847 tcc_error("signed and unsigned modifier");
3848 t |= VT_DEFSIGN | VT_UNSIGNED;
3849 next();
3850 typespec_found = 1;
3851 break;
3853 /* storage */
3854 case TOK_EXTERN:
3855 g = VT_EXTERN;
3856 goto storage;
3857 case TOK_STATIC:
3858 g = VT_STATIC;
3859 goto storage;
3860 case TOK_TYPEDEF:
3861 g = VT_TYPEDEF;
3862 goto storage;
3863 storage:
3864 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3865 tcc_error("multiple storage classes");
3866 t |= g;
3867 next();
3868 break;
3869 case TOK_INLINE1:
3870 case TOK_INLINE2:
3871 case TOK_INLINE3:
3872 t |= VT_INLINE;
3873 next();
3874 break;
3876 /* GNUC attribute */
3877 case TOK_ATTRIBUTE1:
3878 case TOK_ATTRIBUTE2:
3879 parse_attribute(ad);
3880 if (ad->a.mode) {
3881 u = ad->a.mode -1;
3882 t = (t & ~VT_BTYPE) | u;
3884 break;
3885 /* GNUC typeof */
3886 case TOK_TYPEOF1:
3887 case TOK_TYPEOF2:
3888 case TOK_TYPEOF3:
3889 next();
3890 parse_expr_type(&type1);
3891 /* remove all storage modifiers except typedef */
3892 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3893 goto basic_type2;
3894 default:
3895 if (typespec_found)
3896 goto the_end;
3897 s = sym_find(tok);
3898 if (!s || !(s->type.t & VT_TYPEDEF))
3899 goto the_end;
3901 type->t = ((s->type.t & ~VT_TYPEDEF) |
3902 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3903 type->ref = s->type.ref;
3904 if (t & (VT_CONSTANT | VT_VOLATILE))
3905 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3906 t = type->t;
3908 if (s->r) {
3909 /* get attributes from typedef */
3910 if (0 == ad->a.aligned)
3911 ad->a.aligned = s->a.aligned;
3912 if (0 == ad->a.func_call)
3913 ad->a.func_call = s->a.func_call;
3914 ad->a.packed |= s->a.packed;
3916 next();
3917 typespec_found = 1;
3918 break;
3920 type_found = 1;
3922 the_end:
3923 if (tcc_state->char_is_unsigned) {
3924 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3925 t |= VT_UNSIGNED;
3928 /* long is never used as type */
3929 if ((t & VT_BTYPE) == VT_LONG)
3930 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3931 defined TCC_TARGET_PE
3932 t = (t & ~VT_BTYPE) | VT_INT;
3933 #else
3934 t = (t & ~VT_BTYPE) | VT_LLONG;
3935 #endif
3936 type->t = t;
3937 return type_found;
3940 /* convert a function parameter type (array to pointer and function to
3941 function pointer) */
3942 static inline void convert_parameter_type(CType *pt)
3944 /* remove const and volatile qualifiers (XXX: const could be used
3945 to indicate a const function parameter */
3946 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3947 /* array must be transformed to pointer according to ANSI C */
3948 pt->t &= ~VT_ARRAY;
3949 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3950 mk_pointer(pt);
3954 ST_FUNC void parse_asm_str(CString *astr)
3956 skip('(');
3957 parse_mult_str(astr, "string constant");
3960 /* Parse an asm label and return the token */
3961 static int asm_label_instr(void)
3963 int v;
3964 CString astr;
3966 next();
3967 parse_asm_str(&astr);
3968 skip(')');
3969 #ifdef ASM_DEBUG
3970 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3971 #endif
3972 v = tok_alloc(astr.data, astr.size - 1)->tok;
3973 cstr_free(&astr);
3974 return v;
3977 static void post_type(CType *type, AttributeDef *ad, int storage)
3979 int n, l, t1, arg_size, align;
3980 Sym **plast, *s, *first;
3981 AttributeDef ad1;
3982 CType pt;
3984 if (tok == '(') {
3985 /* function declaration */
3986 next();
3987 l = 0;
3988 first = NULL;
3989 plast = &first;
3990 arg_size = 0;
3991 if (tok != ')') {
3992 for(;;) {
3993 /* read param name and compute offset */
3994 if (l != FUNC_OLD) {
3995 if (!parse_btype(&pt, &ad1)) {
3996 if (l) {
3997 tcc_error("invalid type");
3998 } else {
3999 l = FUNC_OLD;
4000 goto old_proto;
4003 l = FUNC_NEW;
4004 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4005 break;
4006 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4007 if ((pt.t & VT_BTYPE) == VT_VOID)
4008 tcc_error("parameter declared as void");
4009 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4010 } else {
4011 old_proto:
4012 n = tok;
4013 if (n < TOK_UIDENT)
4014 expect("identifier");
4015 pt.t = VT_INT;
4016 next();
4018 convert_parameter_type(&pt);
4019 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4020 *plast = s;
4021 plast = &s->next;
4022 if (tok == ')')
4023 break;
4024 skip(',');
4025 if (l == FUNC_NEW && tok == TOK_DOTS) {
4026 l = FUNC_ELLIPSIS;
4027 next();
4028 break;
4032 /* if no parameters, then old type prototype */
4033 if (l == 0)
4034 l = FUNC_OLD;
4035 skip(')');
4036 /* NOTE: const is ignored in returned type as it has a special
4037 meaning in gcc / C++ */
4038 type->t &= ~VT_CONSTANT;
4039 /* some ancient pre-K&R C allows a function to return an array
4040 and the array brackets to be put after the arguments, such
4041 that "int c()[]" means something like "int[] c()" */
4042 if (tok == '[') {
4043 next();
4044 skip(']'); /* only handle simple "[]" */
4045 type->t |= VT_PTR;
4047 /* we push a anonymous symbol which will contain the function prototype */
4048 ad->a.func_args = arg_size;
4049 s = sym_push(SYM_FIELD, type, 0, l);
4050 s->a = ad->a;
4051 s->next = first;
4052 type->t = VT_FUNC;
4053 type->ref = s;
4054 } else if (tok == '[') {
4055 int saved_nocode_wanted = nocode_wanted;
4056 /* array definition */
4057 next();
4058 if (tok == TOK_RESTRICT1)
4059 next();
4060 n = -1;
4061 t1 = 0;
4062 if (tok != ']') {
4063 if (!local_stack || (storage & VT_STATIC))
4064 vpushi(expr_const());
4065 else {
4066 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4067 length must always be evaluated, even under nocode_wanted,
4068 so that its size slot is initialized (e.g. under sizeof
4069 or typeof). */
4070 nocode_wanted = 0;
4071 gexpr();
4073 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4074 n = vtop->c.i;
4075 if (n < 0)
4076 tcc_error("invalid array size");
4077 } else {
4078 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4079 tcc_error("size of variable length array should be an integer");
4080 t1 = VT_VLA;
4083 skip(']');
4084 /* parse next post type */
4085 post_type(type, ad, storage);
4086 if (type->t == VT_FUNC)
4087 tcc_error("declaration of an array of functions");
4088 t1 |= type->t & VT_VLA;
4090 if (t1 & VT_VLA) {
4091 loc -= type_size(&int_type, &align);
4092 loc &= -align;
4093 n = loc;
4095 vla_runtime_type_size(type, &align);
4096 gen_op('*');
4097 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4098 vswap();
4099 vstore();
4101 if (n != -1)
4102 vpop();
4103 nocode_wanted = saved_nocode_wanted;
4105 /* we push an anonymous symbol which will contain the array
4106 element type */
4107 s = sym_push(SYM_FIELD, type, 0, n);
4108 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4109 type->ref = s;
4113 /* Parse a type declaration (except basic type), and return the type
4114 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4115 expected. 'type' should contain the basic type. 'ad' is the
4116 attribute definition of the basic type. It can be modified by
4117 type_decl().
4119 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4121 Sym *s;
4122 CType type1, *type2;
4123 int qualifiers, storage;
4125 while (tok == '*') {
4126 qualifiers = 0;
4127 redo:
4128 next();
4129 switch(tok) {
4130 case TOK_CONST1:
4131 case TOK_CONST2:
4132 case TOK_CONST3:
4133 qualifiers |= VT_CONSTANT;
4134 goto redo;
4135 case TOK_VOLATILE1:
4136 case TOK_VOLATILE2:
4137 case TOK_VOLATILE3:
4138 qualifiers |= VT_VOLATILE;
4139 goto redo;
4140 case TOK_RESTRICT1:
4141 case TOK_RESTRICT2:
4142 case TOK_RESTRICT3:
4143 goto redo;
4144 /* XXX: clarify attribute handling */
4145 case TOK_ATTRIBUTE1:
4146 case TOK_ATTRIBUTE2:
4147 parse_attribute(ad);
4148 break;
4150 mk_pointer(type);
4151 type->t |= qualifiers;
4154 /* recursive type */
4155 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4156 type1.t = 0; /* XXX: same as int */
4157 if (tok == '(') {
4158 next();
4159 /* XXX: this is not correct to modify 'ad' at this point, but
4160 the syntax is not clear */
4161 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4162 parse_attribute(ad);
4163 type_decl(&type1, ad, v, td);
4164 skip(')');
4165 } else {
4166 /* type identifier */
4167 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4168 *v = tok;
4169 next();
4170 } else {
4171 if (!(td & TYPE_ABSTRACT))
4172 expect("identifier");
4173 *v = 0;
4176 storage = type->t & VT_STORAGE;
4177 type->t &= ~VT_STORAGE;
4178 post_type(type, ad, storage);
4179 type->t |= storage;
4180 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4181 parse_attribute(ad);
4183 if (!type1.t)
4184 return;
4185 /* append type at the end of type1 */
4186 type2 = &type1;
4187 for(;;) {
4188 s = type2->ref;
4189 type2 = &s->type;
4190 if (!type2->t) {
4191 *type2 = *type;
4192 break;
4195 *type = type1;
4196 type->t |= storage;
4199 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4200 ST_FUNC int lvalue_type(int t)
4202 int bt, r;
4203 r = VT_LVAL;
4204 bt = t & VT_BTYPE;
4205 if (bt == VT_BYTE || bt == VT_BOOL)
4206 r |= VT_LVAL_BYTE;
4207 else if (bt == VT_SHORT)
4208 r |= VT_LVAL_SHORT;
4209 else
4210 return r;
4211 if (t & VT_UNSIGNED)
4212 r |= VT_LVAL_UNSIGNED;
4213 return r;
4216 /* indirection with full error checking and bound check */
4217 ST_FUNC void indir(void)
4219 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4220 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4221 return;
4222 expect("pointer");
4224 if (vtop->r & VT_LVAL)
4225 gv(RC_INT);
4226 vtop->type = *pointed_type(&vtop->type);
4227 /* Arrays and functions are never lvalues */
4228 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4229 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4230 vtop->r |= lvalue_type(vtop->type.t);
4231 /* if bound checking, the referenced pointer must be checked */
4232 #ifdef CONFIG_TCC_BCHECK
4233 if (tcc_state->do_bounds_check)
4234 vtop->r |= VT_MUSTBOUND;
4235 #endif
4239 /* pass a parameter to a function and do type checking and casting */
4240 static void gfunc_param_typed(Sym *func, Sym *arg)
4242 int func_type;
4243 CType type;
4245 func_type = func->c;
4246 if (func_type == FUNC_OLD ||
4247 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4248 /* default casting : only need to convert float to double */
4249 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4250 type.t = VT_DOUBLE;
4251 gen_cast(&type);
4252 } else if (vtop->type.t & VT_BITFIELD) {
4253 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4254 type.ref = vtop->type.ref;
4255 gen_cast(&type);
4257 } else if (arg == NULL) {
4258 tcc_error("too many arguments to function");
4259 } else {
4260 type = arg->type;
4261 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4262 gen_assign_cast(&type);
4266 /* parse an expression of the form '(type)' or '(expr)' and return its
4267 type */
4268 static void parse_expr_type(CType *type)
4270 int n;
4271 AttributeDef ad;
4273 skip('(');
4274 if (parse_btype(type, &ad)) {
4275 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4276 } else {
4277 expr_type(type);
4279 skip(')');
4282 static void parse_type(CType *type)
4284 AttributeDef ad;
4285 int n;
4287 if (!parse_btype(type, &ad)) {
4288 expect("type");
4290 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4293 static void vpush_tokc(int t)
4295 CType type;
4296 type.t = t;
4297 type.ref = 0;
4298 vsetc(&type, VT_CONST, &tokc);
4301 ST_FUNC void unary(void)
4303 int n, t, align, size, r, sizeof_caller;
4304 CType type;
4305 Sym *s;
4306 AttributeDef ad;
4308 sizeof_caller = in_sizeof;
4309 in_sizeof = 0;
4310 /* XXX: GCC 2.95.3 does not generate a table although it should be
4311 better here */
4312 tok_next:
4313 switch(tok) {
4314 case TOK_EXTENSION:
4315 next();
4316 goto tok_next;
4317 case TOK_CINT:
4318 case TOK_CCHAR:
4319 case TOK_LCHAR:
4320 vpushi(tokc.i);
4321 next();
4322 break;
4323 case TOK_CUINT:
4324 vpush_tokc(VT_INT | VT_UNSIGNED);
4325 next();
4326 break;
4327 case TOK_CLLONG:
4328 vpush_tokc(VT_LLONG);
4329 next();
4330 break;
4331 case TOK_CULLONG:
4332 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4333 next();
4334 break;
4335 case TOK_CFLOAT:
4336 vpush_tokc(VT_FLOAT);
4337 next();
4338 break;
4339 case TOK_CDOUBLE:
4340 vpush_tokc(VT_DOUBLE);
4341 next();
4342 break;
4343 case TOK_CLDOUBLE:
4344 vpush_tokc(VT_LDOUBLE);
4345 next();
4346 break;
4347 case TOK___FUNCTION__:
4348 if (!gnu_ext)
4349 goto tok_identifier;
4350 /* fall thru */
4351 case TOK___FUNC__:
4353 void *ptr;
4354 int len;
4355 /* special function name identifier */
4356 len = strlen(funcname) + 1;
4357 /* generate char[len] type */
4358 type.t = VT_BYTE;
4359 mk_pointer(&type);
4360 type.t |= VT_ARRAY;
4361 type.ref->c = len;
4362 vpush_ref(&type, data_section, data_section->data_offset, len);
4363 ptr = section_ptr_add(data_section, len);
4364 memcpy(ptr, funcname, len);
4365 next();
4367 break;
4368 case TOK_LSTR:
4369 #ifdef TCC_TARGET_PE
4370 t = VT_SHORT | VT_UNSIGNED;
4371 #else
4372 t = VT_INT;
4373 #endif
4374 goto str_init;
4375 case TOK_STR:
4376 /* string parsing */
4377 t = VT_BYTE;
4378 str_init:
4379 if (tcc_state->warn_write_strings)
4380 t |= VT_CONSTANT;
4381 type.t = t;
4382 mk_pointer(&type);
4383 type.t |= VT_ARRAY;
4384 memset(&ad, 0, sizeof(AttributeDef));
4385 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4386 break;
4387 case '(':
4388 next();
4389 /* cast ? */
4390 if (parse_btype(&type, &ad)) {
4391 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4392 skip(')');
4393 /* check ISOC99 compound literal */
4394 if (tok == '{') {
4395 /* data is allocated locally by default */
4396 if (global_expr)
4397 r = VT_CONST;
4398 else
4399 r = VT_LOCAL;
4400 /* all except arrays are lvalues */
4401 if (!(type.t & VT_ARRAY))
4402 r |= lvalue_type(type.t);
4403 memset(&ad, 0, sizeof(AttributeDef));
4404 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4405 } else {
4406 if (sizeof_caller) {
4407 vpush(&type);
4408 return;
4410 unary();
4411 gen_cast(&type);
4413 } else if (tok == '{') {
4414 int saved_nocode_wanted = nocode_wanted;
4415 if (const_wanted)
4416 tcc_error("expected constant");
4417 /* save all registers */
4418 save_regs(0);
4419 /* statement expression : we do not accept break/continue
4420 inside as GCC does. We do retain the nocode_wanted state,
4421 as statement expressions can't ever be entered from the
4422 outside, so any reactivation of code emission (from labels
4423 or loop heads) can be disabled again after the end of it. */
4424 block(NULL, NULL, 1);
4425 nocode_wanted = saved_nocode_wanted;
4426 skip(')');
4427 } else {
4428 gexpr();
4429 skip(')');
4431 break;
4432 case '*':
4433 next();
4434 unary();
4435 indir();
4436 break;
4437 case '&':
4438 next();
4439 unary();
4440 /* functions names must be treated as function pointers,
4441 except for unary '&' and sizeof. Since we consider that
4442 functions are not lvalues, we only have to handle it
4443 there and in function calls. */
4444 /* arrays can also be used although they are not lvalues */
4445 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4446 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4447 test_lvalue();
4448 mk_pointer(&vtop->type);
4449 gaddrof();
4450 break;
4451 case '!':
4452 next();
4453 unary();
4454 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4455 CType boolean;
4456 boolean.t = VT_BOOL;
4457 gen_cast(&boolean);
4458 vtop->c.i = !vtop->c.i;
4459 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4460 vtop->c.i ^= 1;
4461 else {
4462 save_regs(1);
4463 vseti(VT_JMP, gvtst(1, 0));
4465 break;
4466 case '~':
4467 next();
4468 unary();
4469 vpushi(-1);
4470 gen_op('^');
4471 break;
4472 case '+':
4473 next();
4474 unary();
4475 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4476 tcc_error("pointer not accepted for unary plus");
4477 /* In order to force cast, we add zero, except for floating point
4478 where we really need an noop (otherwise -0.0 will be transformed
4479 into +0.0). */
4480 if (!is_float(vtop->type.t)) {
4481 vpushi(0);
4482 gen_op('+');
4484 break;
4485 case TOK_SIZEOF:
4486 case TOK_ALIGNOF1:
4487 case TOK_ALIGNOF2:
4488 t = tok;
4489 next();
4490 in_sizeof++;
4491 unary_type(&type); // Perform a in_sizeof = 0;
4492 size = type_size(&type, &align);
4493 if (t == TOK_SIZEOF) {
4494 if (!(type.t & VT_VLA)) {
4495 if (size < 0)
4496 tcc_error("sizeof applied to an incomplete type");
4497 vpushs(size);
4498 } else {
4499 vla_runtime_type_size(&type, &align);
4501 } else {
4502 vpushs(align);
4504 vtop->type.t |= VT_UNSIGNED;
4505 break;
4507 case TOK_builtin_expect:
4509 /* __builtin_expect is a no-op for now */
4510 next();
4511 skip('(');
4512 expr_eq();
4513 skip(',');
4514 expr_eq();
4515 vpop();
4516 skip(')');
4518 break;
4519 case TOK_builtin_types_compatible_p:
4521 CType type1, type2;
4522 next();
4523 skip('(');
4524 parse_type(&type1);
4525 skip(',');
4526 parse_type(&type2);
4527 skip(')');
4528 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4529 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4530 vpushi(is_compatible_types(&type1, &type2));
4532 break;
4533 case TOK_builtin_choose_expr:
4535 int64_t c;
4536 next();
4537 skip('(');
4538 c = expr_const64();
4539 skip(',');
4540 if (!c) {
4541 nocode_wanted++;
4543 expr_eq();
4544 if (!c) {
4545 vpop();
4546 nocode_wanted--;
4548 skip(',');
4549 if (c) {
4550 nocode_wanted++;
4552 expr_eq();
4553 if (c) {
4554 vpop();
4555 nocode_wanted--;
4557 skip(')');
4559 break;
4560 case TOK_builtin_constant_p:
4562 int res;
4563 next();
4564 skip('(');
4565 nocode_wanted++;
4566 expr_eq();
4567 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4568 vpop();
4569 nocode_wanted--;
4570 skip(')');
4571 vpushi(res);
4573 break;
4574 case TOK_builtin_frame_address:
4575 case TOK_builtin_return_address:
4577 int tok1 = tok;
4578 int level;
4579 CType type;
4580 next();
4581 skip('(');
4582 if (tok != TOK_CINT) {
4583 tcc_error("%s only takes positive integers",
4584 tok1 == TOK_builtin_return_address ?
4585 "__builtin_return_address" :
4586 "__builtin_frame_address");
4588 level = (uint32_t)tokc.i;
4589 next();
4590 skip(')');
4591 type.t = VT_VOID;
4592 mk_pointer(&type);
4593 vset(&type, VT_LOCAL, 0); /* local frame */
4594 while (level--) {
4595 mk_pointer(&vtop->type);
4596 indir(); /* -> parent frame */
4598 if (tok1 == TOK_builtin_return_address) {
4599 // assume return address is just above frame pointer on stack
4600 vpushi(PTR_SIZE);
4601 gen_op('+');
4602 mk_pointer(&vtop->type);
4603 indir();
4606 break;
4607 #ifdef TCC_TARGET_X86_64
4608 #ifdef TCC_TARGET_PE
4609 case TOK_builtin_va_start:
4611 next();
4612 skip('(');
4613 expr_eq();
4614 skip(',');
4615 expr_eq();
4616 skip(')');
4617 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4618 tcc_error("__builtin_va_start expects a local variable");
4619 vtop->r &= ~(VT_LVAL | VT_REF);
4620 vtop->type = char_pointer_type;
4621 vtop->c.i += 8;
4622 vstore();
4624 break;
4625 #else
4626 case TOK_builtin_va_arg_types:
4628 CType type;
4629 next();
4630 skip('(');
4631 parse_type(&type);
4632 skip(')');
4633 vpushi(classify_x86_64_va_arg(&type));
4635 break;
4636 #endif
4637 #endif
4639 #ifdef TCC_TARGET_ARM64
4640 case TOK___va_start: {
4641 next();
4642 skip('(');
4643 expr_eq();
4644 skip(',');
4645 expr_eq();
4646 skip(')');
4647 //xx check types
4648 gen_va_start();
4649 vpushi(0);
4650 vtop->type.t = VT_VOID;
4651 break;
4653 case TOK___va_arg: {
4654 CType type;
4655 next();
4656 skip('(');
4657 expr_eq();
4658 skip(',');
4659 parse_type(&type);
4660 skip(')');
4661 //xx check types
4662 gen_va_arg(&type);
4663 vtop->type = type;
4664 break;
4666 case TOK___arm64_clear_cache: {
4667 next();
4668 skip('(');
4669 expr_eq();
4670 skip(',');
4671 expr_eq();
4672 skip(')');
4673 gen_clear_cache();
4674 vpushi(0);
4675 vtop->type.t = VT_VOID;
4676 break;
4678 #endif
4679 /* pre operations */
4680 case TOK_INC:
4681 case TOK_DEC:
4682 t = tok;
4683 next();
4684 unary();
4685 inc(0, t);
4686 break;
4687 case '-':
4688 next();
4689 unary();
4690 t = vtop->type.t & VT_BTYPE;
4691 if (is_float(t)) {
4692 /* In IEEE negate(x) isn't subtract(0,x), but rather
4693 subtract(-0, x). */
4694 vpush(&vtop->type);
4695 if (t == VT_FLOAT)
4696 vtop->c.f = -1.0 * 0.0;
4697 else if (t == VT_DOUBLE)
4698 vtop->c.d = -1.0 * 0.0;
4699 else
4700 vtop->c.ld = -1.0 * 0.0;
4701 } else
4702 vpushi(0);
4703 vswap();
4704 gen_op('-');
4705 break;
4706 case TOK_LAND:
4707 if (!gnu_ext)
4708 goto tok_identifier;
4709 next();
4710 /* allow to take the address of a label */
4711 if (tok < TOK_UIDENT)
4712 expect("label identifier");
4713 s = label_find(tok);
4714 if (!s) {
4715 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4716 } else {
4717 if (s->r == LABEL_DECLARED)
4718 s->r = LABEL_FORWARD;
4720 if (!s->type.t) {
4721 s->type.t = VT_VOID;
4722 mk_pointer(&s->type);
4723 s->type.t |= VT_STATIC;
4725 vpushsym(&s->type, s);
4726 next();
4727 break;
4729 // special qnan , snan and infinity values
4730 case TOK___NAN__:
4731 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4732 next();
4733 break;
4734 case TOK___SNAN__:
4735 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4736 next();
4737 break;
4738 case TOK___INF__:
4739 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4740 next();
4741 break;
4743 default:
4744 tok_identifier:
4745 t = tok;
4746 next();
4747 if (t < TOK_UIDENT)
4748 expect("identifier");
4749 s = sym_find(t);
4750 if (!s) {
4751 const char *name = get_tok_str(t, NULL);
4752 if (tok != '(')
4753 tcc_error("'%s' undeclared", name);
4754 /* for simple function calls, we tolerate undeclared
4755 external reference to int() function */
4756 if (tcc_state->warn_implicit_function_declaration
4757 #ifdef TCC_TARGET_PE
4758 /* people must be warned about using undeclared WINAPI functions
4759 (which usually start with uppercase letter) */
4760 || (name[0] >= 'A' && name[0] <= 'Z')
4761 #endif
4763 tcc_warning("implicit declaration of function '%s'", name);
4764 s = external_global_sym(t, &func_old_type, 0);
4767 r = s->r;
4768 /* A symbol that has a register is a local register variable,
4769 which starts out as VT_LOCAL value. */
4770 if ((r & VT_VALMASK) < VT_CONST)
4771 r = (r & ~VT_VALMASK) | VT_LOCAL;
4773 vset(&s->type, r, s->c);
4774 /* Point to s as backpointer (even without r&VT_SYM).
4775 Will be used by at least the x86 inline asm parser for
4776 regvars. */
4777 vtop->sym = s;
4778 if (vtop->r & VT_SYM) {
4779 vtop->c.i = 0;
4781 break;
4784 /* post operations */
4785 while (1) {
4786 if (tok == TOK_INC || tok == TOK_DEC) {
4787 inc(1, tok);
4788 next();
4789 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4790 int qualifiers;
4791 /* field */
4792 if (tok == TOK_ARROW)
4793 indir();
4794 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4795 test_lvalue();
4796 gaddrof();
4797 /* expect pointer on structure */
4798 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4799 expect("struct or union");
4800 if (tok == TOK_CDOUBLE)
4801 expect("field name");
4802 next();
4803 if (tok == TOK_CINT || tok == TOK_CUINT)
4804 expect("field name");
4805 s = find_field(&vtop->type, tok);
4806 if (!s)
4807 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4808 /* add field offset to pointer */
4809 vtop->type = char_pointer_type; /* change type to 'char *' */
4810 vpushi(s->c);
4811 gen_op('+');
4812 /* change type to field type, and set to lvalue */
4813 vtop->type = s->type;
4814 vtop->type.t |= qualifiers;
4815 /* an array is never an lvalue */
4816 if (!(vtop->type.t & VT_ARRAY)) {
4817 vtop->r |= lvalue_type(vtop->type.t);
4818 #ifdef CONFIG_TCC_BCHECK
4819 /* if bound checking, the referenced pointer must be checked */
4820 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4821 vtop->r |= VT_MUSTBOUND;
4822 #endif
4824 next();
4825 } else if (tok == '[') {
4826 next();
4827 gexpr();
4828 gen_op('+');
4829 indir();
4830 skip(']');
4831 } else if (tok == '(') {
4832 SValue ret;
4833 Sym *sa;
4834 int nb_args, ret_nregs, ret_align, regsize, variadic;
4836 /* function call */
4837 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4838 /* pointer test (no array accepted) */
4839 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4840 vtop->type = *pointed_type(&vtop->type);
4841 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4842 goto error_func;
4843 } else {
4844 error_func:
4845 expect("function pointer");
4847 } else {
4848 vtop->r &= ~VT_LVAL; /* no lvalue */
4850 /* get return type */
4851 s = vtop->type.ref;
4852 next();
4853 sa = s->next; /* first parameter */
4854 nb_args = regsize = 0;
4855 ret.r2 = VT_CONST;
4856 /* compute first implicit argument if a structure is returned */
4857 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4858 variadic = (s->c == FUNC_ELLIPSIS);
4859 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4860 &ret_align, &regsize);
4861 if (!ret_nregs) {
4862 /* get some space for the returned structure */
4863 size = type_size(&s->type, &align);
4864 #ifdef TCC_TARGET_ARM64
4865 /* On arm64, a small struct is return in registers.
4866 It is much easier to write it to memory if we know
4867 that we are allowed to write some extra bytes, so
4868 round the allocated space up to a power of 2: */
4869 if (size < 16)
4870 while (size & (size - 1))
4871 size = (size | (size - 1)) + 1;
4872 #endif
4873 loc = (loc - size) & -align;
4874 ret.type = s->type;
4875 ret.r = VT_LOCAL | VT_LVAL;
4876 /* pass it as 'int' to avoid structure arg passing
4877 problems */
4878 vseti(VT_LOCAL, loc);
4879 ret.c = vtop->c;
4880 nb_args++;
4882 } else {
4883 ret_nregs = 1;
4884 ret.type = s->type;
4887 if (ret_nregs) {
4888 /* return in register */
4889 if (is_float(ret.type.t)) {
4890 ret.r = reg_fret(ret.type.t);
4891 #ifdef TCC_TARGET_X86_64
4892 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4893 ret.r2 = REG_QRET;
4894 #endif
4895 } else {
4896 #ifndef TCC_TARGET_ARM64
4897 #ifdef TCC_TARGET_X86_64
4898 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4899 #else
4900 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4901 #endif
4902 ret.r2 = REG_LRET;
4903 #endif
4904 ret.r = REG_IRET;
4906 ret.c.i = 0;
4908 if (tok != ')') {
4909 for(;;) {
4910 expr_eq();
4911 gfunc_param_typed(s, sa);
4912 nb_args++;
4913 if (sa)
4914 sa = sa->next;
4915 if (tok == ')')
4916 break;
4917 skip(',');
4920 if (sa)
4921 tcc_error("too few arguments to function");
4922 skip(')');
4923 gfunc_call(nb_args);
4925 /* return value */
4926 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4927 vsetc(&ret.type, r, &ret.c);
4928 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4931 /* handle packed struct return */
4932 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4933 int addr, offset;
4935 size = type_size(&s->type, &align);
4936 /* We're writing whole regs often, make sure there's enough
4937 space. Assume register size is power of 2. */
4938 if (regsize > align)
4939 align = regsize;
4940 loc = (loc - size) & -align;
4941 addr = loc;
4942 offset = 0;
4943 for (;;) {
4944 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4945 vswap();
4946 vstore();
4947 vtop--;
4948 if (--ret_nregs == 0)
4949 break;
4950 offset += regsize;
4952 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4954 } else {
4955 break;
4960 ST_FUNC void expr_prod(void)
4962 int t;
4964 unary();
4965 while (tok == '*' || tok == '/' || tok == '%') {
4966 t = tok;
4967 next();
4968 unary();
4969 gen_op(t);
4973 ST_FUNC void expr_sum(void)
4975 int t;
4977 expr_prod();
4978 while (tok == '+' || tok == '-') {
4979 t = tok;
4980 next();
4981 expr_prod();
4982 gen_op(t);
4986 static void expr_shift(void)
4988 int t;
4990 expr_sum();
4991 while (tok == TOK_SHL || tok == TOK_SAR) {
4992 t = tok;
4993 next();
4994 expr_sum();
4995 gen_op(t);
4999 static void expr_cmp(void)
5001 int t;
5003 expr_shift();
5004 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5005 tok == TOK_ULT || tok == TOK_UGE) {
5006 t = tok;
5007 next();
5008 expr_shift();
5009 gen_op(t);
5013 static void expr_cmpeq(void)
5015 int t;
5017 expr_cmp();
5018 while (tok == TOK_EQ || tok == TOK_NE) {
5019 t = tok;
5020 next();
5021 expr_cmp();
5022 gen_op(t);
5026 static void expr_and(void)
5028 expr_cmpeq();
5029 while (tok == '&') {
5030 next();
5031 expr_cmpeq();
5032 gen_op('&');
5036 static void expr_xor(void)
5038 expr_and();
5039 while (tok == '^') {
5040 next();
5041 expr_and();
5042 gen_op('^');
5046 static void expr_or(void)
5048 expr_xor();
5049 while (tok == '|') {
5050 next();
5051 expr_xor();
5052 gen_op('|');
5056 static void expr_land(void)
5058 expr_or();
5059 if (tok == TOK_LAND) {
5060 int t = 0;
5061 for(;;) {
5062 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5063 CType ctb;
5064 ctb.t = VT_BOOL;
5065 gen_cast(&ctb);
5066 if (vtop->c.i) {
5067 vpop();
5068 } else {
5069 nocode_wanted++;
5070 while (tok == TOK_LAND) {
5071 next();
5072 expr_or();
5073 vpop();
5075 nocode_wanted--;
5076 if (t)
5077 gsym(t);
5078 gen_cast(&int_type);
5079 break;
5081 } else {
5082 if (!t)
5083 save_regs(1);
5084 t = gvtst(1, t);
5086 if (tok != TOK_LAND) {
5087 if (t)
5088 vseti(VT_JMPI, t);
5089 else
5090 vpushi(1);
5091 break;
5093 next();
5094 expr_or();
5099 static void expr_lor(void)
5101 expr_land();
5102 if (tok == TOK_LOR) {
5103 int t = 0;
5104 for(;;) {
5105 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5106 CType ctb;
5107 ctb.t = VT_BOOL;
5108 gen_cast(&ctb);
5109 if (!vtop->c.i) {
5110 vpop();
5111 } else {
5112 nocode_wanted++;
5113 while (tok == TOK_LOR) {
5114 next();
5115 expr_land();
5116 vpop();
5118 nocode_wanted--;
5119 if (t)
5120 gsym(t);
5121 gen_cast(&int_type);
5122 break;
5124 } else {
5125 if (!t)
5126 save_regs(1);
5127 t = gvtst(0, t);
5129 if (tok != TOK_LOR) {
5130 if (t)
5131 vseti(VT_JMP, t);
5132 else
5133 vpushi(0);
5134 break;
5136 next();
5137 expr_land();
5142 /* Assuming vtop is a value used in a conditional context
5143 (i.e. compared with zero) return 0 if it's false, 1 if
5144 true and -1 if it can't be statically determined. */
5145 static int condition_3way(void)
5147 int c = -1;
5148 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5149 (!(vtop->r & VT_SYM) ||
5150 !(vtop->sym->type.t & VT_WEAK))) {
5151 CType boolean;
5152 boolean.t = VT_BOOL;
5153 vdup();
5154 gen_cast(&boolean);
5155 c = vtop->c.i;
5156 vpop();
5158 return c;
5161 static void expr_cond(void)
5163 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5164 SValue sv;
5165 CType type, type1, type2;
5167 expr_lor();
5168 if (tok == '?') {
5169 next();
5170 c = condition_3way();
5171 g = (tok == ':' && gnu_ext);
5172 if (c < 0) {
5173 /* needed to avoid having different registers saved in
5174 each branch */
5175 if (is_float(vtop->type.t)) {
5176 rc = RC_FLOAT;
5177 #ifdef TCC_TARGET_X86_64
5178 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5179 rc = RC_ST0;
5181 #endif
5182 } else
5183 rc = RC_INT;
5184 gv(rc);
5185 save_regs(1);
5186 if (g)
5187 gv_dup();
5188 tt = gvtst(1, 0);
5190 } else {
5191 if (!g)
5192 vpop();
5193 tt = 0;
5196 if (1) {
5197 if (c == 0)
5198 nocode_wanted++;
5199 if (!g)
5200 gexpr();
5202 type1 = vtop->type;
5203 sv = *vtop; /* save value to handle it later */
5204 vtop--; /* no vpop so that FP stack is not flushed */
5205 skip(':');
5207 u = 0;
5208 if (c < 0)
5209 u = gjmp(0);
5210 gsym(tt);
5212 if (c == 0)
5213 nocode_wanted--;
5214 if (c == 1)
5215 nocode_wanted++;
5216 expr_cond();
5217 if (c == 1)
5218 nocode_wanted--;
5220 type2 = vtop->type;
5221 t1 = type1.t;
5222 bt1 = t1 & VT_BTYPE;
5223 t2 = type2.t;
5224 bt2 = t2 & VT_BTYPE;
5225 /* cast operands to correct type according to ISOC rules */
5226 if (is_float(bt1) || is_float(bt2)) {
5227 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5228 type.t = VT_LDOUBLE;
5230 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5231 type.t = VT_DOUBLE;
5232 } else {
5233 type.t = VT_FLOAT;
5235 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5236 /* cast to biggest op */
5237 type.t = VT_LLONG;
5238 /* convert to unsigned if it does not fit in a long long */
5239 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5240 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5241 type.t |= VT_UNSIGNED;
5242 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5243 /* If one is a null ptr constant the result type
5244 is the other. */
5245 if (is_null_pointer (vtop))
5246 type = type1;
5247 else if (is_null_pointer (&sv))
5248 type = type2;
5249 /* XXX: test pointer compatibility, C99 has more elaborate
5250 rules here. */
5251 else
5252 type = type1;
5253 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5254 /* XXX: test function pointer compatibility */
5255 type = bt1 == VT_FUNC ? type1 : type2;
5256 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5257 /* XXX: test structure compatibility */
5258 type = bt1 == VT_STRUCT ? type1 : type2;
5259 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5260 /* NOTE: as an extension, we accept void on only one side */
5261 type.t = VT_VOID;
5262 } else {
5263 /* integer operations */
5264 type.t = VT_INT;
5265 /* convert to unsigned if it does not fit in an integer */
5266 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5267 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5268 type.t |= VT_UNSIGNED;
5270 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5271 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5272 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5273 islv &= c < 0;
5275 /* now we convert second operand */
5276 if (c != 1) {
5277 gen_cast(&type);
5278 if (islv) {
5279 mk_pointer(&vtop->type);
5280 gaddrof();
5281 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5282 gaddrof();
5285 rc = RC_INT;
5286 if (is_float(type.t)) {
5287 rc = RC_FLOAT;
5288 #ifdef TCC_TARGET_X86_64
5289 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5290 rc = RC_ST0;
5292 #endif
5293 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5294 /* for long longs, we use fixed registers to avoid having
5295 to handle a complicated move */
5296 rc = RC_IRET;
5299 tt = r2 = 0;
5300 if (c < 0) {
5301 r2 = gv(rc);
5302 tt = gjmp(0);
5304 gsym(u);
5306 /* this is horrible, but we must also convert first
5307 operand */
5308 if (c != 0) {
5309 *vtop = sv;
5310 gen_cast(&type);
5311 if (islv) {
5312 mk_pointer(&vtop->type);
5313 gaddrof();
5314 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5315 gaddrof();
5318 if (c < 0) {
5319 r1 = gv(rc);
5320 move_reg(r2, r1, type.t);
5321 vtop->r = r2;
5322 gsym(tt);
5323 if (islv)
5324 indir();
5330 static void expr_eq(void)
5332 int t;
5334 expr_cond();
5335 if (tok == '=' ||
5336 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5337 tok == TOK_A_XOR || tok == TOK_A_OR ||
5338 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5339 test_lvalue();
5340 t = tok;
5341 next();
5342 if (t == '=') {
5343 expr_eq();
5344 } else {
5345 vdup();
5346 expr_eq();
5347 gen_op(t & 0x7f);
5349 vstore();
5353 ST_FUNC void gexpr(void)
5355 while (1) {
5356 expr_eq();
5357 if (tok != ',')
5358 break;
5359 vpop();
5360 next();
5364 /* parse an expression and return its type without any side effect. */
5365 static void expr_type(CType *type)
5368 nocode_wanted++;
5369 gexpr();
5370 *type = vtop->type;
5371 vpop();
5372 nocode_wanted--;
5375 /* parse a unary expression and return its type without any side
5376 effect. */
5377 static void unary_type(CType *type)
5379 nocode_wanted++;
5380 unary();
5381 *type = vtop->type;
5382 vpop();
5383 nocode_wanted--;
5386 /* parse a constant expression and return value in vtop. */
5387 static void expr_const1(void)
5389 const_wanted++;
5390 expr_cond();
5391 const_wanted--;
5394 /* parse an integer constant and return its value. */
5395 static inline int64_t expr_const64(void)
5397 int64_t c;
5398 expr_const1();
5399 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5400 expect("constant expression");
5401 c = vtop->c.i;
5402 vpop();
5403 return c;
5406 /* parse an integer constant and return its value.
5407 Complain if it doesn't fit 32bit (signed or unsigned). */
5408 ST_FUNC int expr_const(void)
5410 int c;
5411 int64_t wc = expr_const64();
5412 c = wc;
5413 if (c != wc && (unsigned)c != wc)
5414 tcc_error("constant exceeds 32 bit");
5415 return c;
5418 /* return the label token if current token is a label, otherwise
5419 return zero */
5420 static int is_label(void)
5422 int last_tok;
5424 /* fast test first */
5425 if (tok < TOK_UIDENT)
5426 return 0;
5427 /* no need to save tokc because tok is an identifier */
5428 last_tok = tok;
5429 next();
5430 if (tok == ':') {
5431 next();
5432 return last_tok;
5433 } else {
5434 unget_tok(last_tok);
5435 return 0;
5439 static void label_or_decl(int l)
5441 int last_tok;
5443 /* fast test first */
5444 if (tok >= TOK_UIDENT)
5446 /* no need to save tokc because tok is an identifier */
5447 last_tok = tok;
5448 next();
5449 if (tok == ':') {
5450 unget_tok(last_tok);
5451 return;
5453 unget_tok(last_tok);
5455 decl(l);
5458 #ifndef TCC_TARGET_ARM64
5459 static void gfunc_return(CType *func_type)
5461 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5462 CType type, ret_type;
5463 int ret_align, ret_nregs, regsize;
5464 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5465 &ret_align, &regsize);
5466 if (0 == ret_nregs) {
5467 /* if returning structure, must copy it to implicit
5468 first pointer arg location */
5469 type = *func_type;
5470 mk_pointer(&type);
5471 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5472 indir();
5473 vswap();
5474 /* copy structure value to pointer */
5475 vstore();
5476 } else {
5477 /* returning structure packed into registers */
5478 int r, size, addr, align;
5479 size = type_size(func_type,&align);
5480 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5481 (vtop->c.i & (ret_align-1)))
5482 && (align & (ret_align-1))) {
5483 loc = (loc - size) & -ret_align;
5484 addr = loc;
5485 type = *func_type;
5486 vset(&type, VT_LOCAL | VT_LVAL, addr);
5487 vswap();
5488 vstore();
5489 vpop();
5490 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5492 vtop->type = ret_type;
5493 if (is_float(ret_type.t))
5494 r = rc_fret(ret_type.t);
5495 else
5496 r = RC_IRET;
5498 if (ret_nregs == 1)
5499 gv(r);
5500 else {
5501 for (;;) {
5502 vdup();
5503 gv(r);
5504 vpop();
5505 if (--ret_nregs == 0)
5506 break;
5507 /* We assume that when a structure is returned in multiple
5508 registers, their classes are consecutive values of the
5509 suite s(n) = 2^n */
5510 r <<= 1;
5511 vtop->c.i += regsize;
5515 } else if (is_float(func_type->t)) {
5516 gv(rc_fret(func_type->t));
5517 } else {
5518 gv(RC_IRET);
5520 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5522 #endif
5524 static int case_cmp(const void *pa, const void *pb)
5526 int64_t a = (*(struct case_t**) pa)->v1;
5527 int64_t b = (*(struct case_t**) pb)->v1;
5528 return a < b ? -1 : a > b;
5531 static void gcase(struct case_t **base, int len, int *bsym)
5533 struct case_t *p;
5534 int e;
5535 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5536 gv(RC_INT);
5537 while (len > 4) {
5538 /* binary search */
5539 p = base[len/2];
5540 vdup();
5541 if (ll)
5542 vpushll(p->v2);
5543 else
5544 vpushi(p->v2);
5545 gen_op(TOK_LE);
5546 e = gtst(1, 0);
5547 vdup();
5548 if (ll)
5549 vpushll(p->v1);
5550 else
5551 vpushi(p->v1);
5552 gen_op(TOK_GE);
5553 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5554 /* x < v1 */
5555 gcase(base, len/2, bsym);
5556 if (cur_switch->def_sym)
5557 gjmp_addr(cur_switch->def_sym);
5558 else
5559 *bsym = gjmp(*bsym);
5560 /* x > v2 */
5561 gsym(e);
5562 e = len/2 + 1;
5563 base += e; len -= e;
5565 /* linear scan */
5566 while (len--) {
5567 p = *base++;
5568 vdup();
5569 if (ll)
5570 vpushll(p->v2);
5571 else
5572 vpushi(p->v2);
5573 if (p->v1 == p->v2) {
5574 gen_op(TOK_EQ);
5575 gtst_addr(0, p->sym);
5576 } else {
5577 gen_op(TOK_LE);
5578 e = gtst(1, 0);
5579 vdup();
5580 if (ll)
5581 vpushll(p->v1);
5582 else
5583 vpushi(p->v1);
5584 gen_op(TOK_GE);
5585 gtst_addr(0, p->sym);
5586 gsym(e);
5591 static void block(int *bsym, int *csym, int is_expr)
5593 int a, b, c, d, cond;
5594 Sym *s;
5596 /* generate line number info */
5597 if (tcc_state->do_debug)
5598 tcc_debug_line(tcc_state);
5600 if (is_expr) {
5601 /* default return value is (void) */
5602 vpushi(0);
5603 vtop->type.t = VT_VOID;
5606 if (tok == TOK_IF) {
5607 /* if test */
5608 int saved_nocode_wanted = nocode_wanted;
5609 next();
5610 skip('(');
5611 gexpr();
5612 skip(')');
5613 cond = condition_3way();
5614 if (cond == 1)
5615 a = 0, vpop();
5616 else
5617 a = gvtst(1, 0);
5618 if (cond == 0)
5619 nocode_wanted |= 0x20000000;
5620 block(bsym, csym, 0);
5621 if (cond != 1)
5622 nocode_wanted = saved_nocode_wanted;
5623 c = tok;
5624 if (c == TOK_ELSE) {
5625 next();
5626 d = gjmp(0);
5627 gsym(a);
5628 if (cond == 1)
5629 nocode_wanted |= 0x20000000;
5630 block(bsym, csym, 0);
5631 gsym(d); /* patch else jmp */
5632 if (cond != 0)
5633 nocode_wanted = saved_nocode_wanted;
5634 } else
5635 gsym(a);
5636 } else if (tok == TOK_WHILE) {
5637 int saved_nocode_wanted;
5638 nocode_wanted &= ~0x20000000;
5639 next();
5640 d = ind;
5641 vla_sp_restore();
5642 skip('(');
5643 gexpr();
5644 skip(')');
5645 a = gvtst(1, 0);
5646 b = 0;
5647 ++local_scope;
5648 saved_nocode_wanted = nocode_wanted;
5649 block(&a, &b, 0);
5650 nocode_wanted = saved_nocode_wanted;
5651 --local_scope;
5652 gjmp_addr(d);
5653 gsym(a);
5654 gsym_addr(b, d);
5655 } else if (tok == '{') {
5656 Sym *llabel;
5657 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5659 next();
5660 /* record local declaration stack position */
5661 s = local_stack;
5662 llabel = local_label_stack;
5663 ++local_scope;
5665 /* handle local labels declarations */
5666 if (tok == TOK_LABEL) {
5667 next();
5668 for(;;) {
5669 if (tok < TOK_UIDENT)
5670 expect("label identifier");
5671 label_push(&local_label_stack, tok, LABEL_DECLARED);
5672 next();
5673 if (tok == ',') {
5674 next();
5675 } else {
5676 skip(';');
5677 break;
5681 while (tok != '}') {
5682 label_or_decl(VT_LOCAL);
5683 if (tok != '}') {
5684 if (is_expr)
5685 vpop();
5686 block(bsym, csym, is_expr);
5689 /* pop locally defined labels */
5690 label_pop(&local_label_stack, llabel);
5691 /* pop locally defined symbols */
5692 --local_scope;
5693 /* In the is_expr case (a statement expression is finished here),
5694 vtop might refer to symbols on the local_stack. Either via the
5695 type or via vtop->sym. We can't pop those nor any that in turn
5696 might be referred to. To make it easier we don't roll back
5697 any symbols in that case; some upper level call to block() will
5698 do that. We do have to remove such symbols from the lookup
5699 tables, though. sym_pop will do that. */
5700 sym_pop(&local_stack, s, is_expr);
5702 /* Pop VLA frames and restore stack pointer if required */
5703 if (vlas_in_scope > saved_vlas_in_scope) {
5704 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5705 vla_sp_restore();
5707 vlas_in_scope = saved_vlas_in_scope;
5709 next();
5710 } else if (tok == TOK_RETURN) {
5711 next();
5712 if (tok != ';') {
5713 gexpr();
5714 gen_assign_cast(&func_vt);
5715 gfunc_return(&func_vt);
5717 skip(';');
5718 /* jump unless last stmt in top-level block */
5719 if (tok != '}' || local_scope != 1)
5720 rsym = gjmp(rsym);
5721 nocode_wanted |= 0x20000000;
5722 } else if (tok == TOK_BREAK) {
5723 /* compute jump */
5724 if (!bsym)
5725 tcc_error("cannot break");
5726 *bsym = gjmp(*bsym);
5727 next();
5728 skip(';');
5729 nocode_wanted |= 0x20000000;
5730 } else if (tok == TOK_CONTINUE) {
5731 /* compute jump */
5732 if (!csym)
5733 tcc_error("cannot continue");
5734 vla_sp_restore_root();
5735 *csym = gjmp(*csym);
5736 next();
5737 skip(';');
5738 } else if (tok == TOK_FOR) {
5739 int e;
5740 int saved_nocode_wanted;
5741 nocode_wanted &= ~0x20000000;
5742 next();
5743 skip('(');
5744 s = local_stack;
5745 ++local_scope;
5746 if (tok != ';') {
5747 /* c99 for-loop init decl? */
5748 if (!decl0(VT_LOCAL, 1)) {
5749 /* no, regular for-loop init expr */
5750 gexpr();
5751 vpop();
5754 skip(';');
5755 d = ind;
5756 c = ind;
5757 vla_sp_restore();
5758 a = 0;
5759 b = 0;
5760 if (tok != ';') {
5761 gexpr();
5762 a = gvtst(1, 0);
5764 skip(';');
5765 if (tok != ')') {
5766 e = gjmp(0);
5767 c = ind;
5768 vla_sp_restore();
5769 gexpr();
5770 vpop();
5771 gjmp_addr(d);
5772 gsym(e);
5774 skip(')');
5775 saved_nocode_wanted = nocode_wanted;
5776 block(&a, &b, 0);
5777 nocode_wanted = saved_nocode_wanted;
5778 gjmp_addr(c);
5779 gsym(a);
5780 gsym_addr(b, c);
5781 --local_scope;
5782 sym_pop(&local_stack, s, 0);
5784 } else
5785 if (tok == TOK_DO) {
5786 int saved_nocode_wanted;
5787 nocode_wanted &= ~0x20000000;
5788 next();
5789 a = 0;
5790 b = 0;
5791 d = ind;
5792 vla_sp_restore();
5793 saved_nocode_wanted = nocode_wanted;
5794 block(&a, &b, 0);
5795 skip(TOK_WHILE);
5796 skip('(');
5797 gsym(b);
5798 gexpr();
5799 c = gvtst(0, 0);
5800 gsym_addr(c, d);
5801 nocode_wanted = saved_nocode_wanted;
5802 skip(')');
5803 gsym(a);
5804 skip(';');
5805 } else
5806 if (tok == TOK_SWITCH) {
5807 struct switch_t *saved, sw;
5808 int saved_nocode_wanted = nocode_wanted;
5809 SValue switchval;
5810 next();
5811 skip('(');
5812 gexpr();
5813 skip(')');
5814 switchval = *vtop--;
5815 a = 0;
5816 b = gjmp(0); /* jump to first case */
5817 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5818 saved = cur_switch;
5819 cur_switch = &sw;
5820 block(&a, csym, 0);
5821 nocode_wanted = saved_nocode_wanted;
5822 a = gjmp(a); /* add implicit break */
5823 /* case lookup */
5824 gsym(b);
5825 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5826 for (b = 1; b < sw.n; b++)
5827 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5828 tcc_error("duplicate case value");
5829 /* Our switch table sorting is signed, so the compared
5830 value needs to be as well when it's 64bit. */
5831 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5832 switchval.type.t &= ~VT_UNSIGNED;
5833 vpushv(&switchval);
5834 gcase(sw.p, sw.n, &a);
5835 vpop();
5836 if (sw.def_sym)
5837 gjmp_addr(sw.def_sym);
5838 dynarray_reset(&sw.p, &sw.n);
5839 cur_switch = saved;
5840 /* break label */
5841 gsym(a);
5842 } else
5843 if (tok == TOK_CASE) {
5844 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5845 if (!cur_switch)
5846 expect("switch");
5847 nocode_wanted &= ~0x20000000;
5848 next();
5849 cr->v1 = cr->v2 = expr_const64();
5850 if (gnu_ext && tok == TOK_DOTS) {
5851 next();
5852 cr->v2 = expr_const64();
5853 if (cr->v2 < cr->v1)
5854 tcc_warning("empty case range");
5856 cr->sym = ind;
5857 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5858 skip(':');
5859 is_expr = 0;
5860 goto block_after_label;
5861 } else
5862 if (tok == TOK_DEFAULT) {
5863 next();
5864 skip(':');
5865 if (!cur_switch)
5866 expect("switch");
5867 if (cur_switch->def_sym)
5868 tcc_error("too many 'default'");
5869 cur_switch->def_sym = ind;
5870 is_expr = 0;
5871 goto block_after_label;
5872 } else
5873 if (tok == TOK_GOTO) {
5874 next();
5875 if (tok == '*' && gnu_ext) {
5876 /* computed goto */
5877 next();
5878 gexpr();
5879 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5880 expect("pointer");
5881 ggoto();
5882 } else if (tok >= TOK_UIDENT) {
5883 s = label_find(tok);
5884 /* put forward definition if needed */
5885 if (!s) {
5886 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5887 } else {
5888 if (s->r == LABEL_DECLARED)
5889 s->r = LABEL_FORWARD;
5891 vla_sp_restore_root();
5892 if (s->r & LABEL_FORWARD)
5893 s->jnext = gjmp(s->jnext);
5894 else
5895 gjmp_addr(s->jnext);
5896 next();
5897 } else {
5898 expect("label identifier");
5900 skip(';');
5901 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5902 asm_instr();
5903 } else {
5904 b = is_label();
5905 if (b) {
5906 /* label case */
5907 s = label_find(b);
5908 if (s) {
5909 if (s->r == LABEL_DEFINED)
5910 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5911 gsym(s->jnext);
5912 s->r = LABEL_DEFINED;
5913 } else {
5914 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5916 s->jnext = ind;
5917 vla_sp_restore();
5918 /* we accept this, but it is a mistake */
5919 block_after_label:
5920 nocode_wanted &= ~0x20000000;
5921 if (tok == '}') {
5922 tcc_warning("deprecated use of label at end of compound statement");
5923 } else {
5924 if (is_expr)
5925 vpop();
5926 block(bsym, csym, is_expr);
5928 } else {
5929 /* expression case */
5930 if (tok != ';') {
5931 if (is_expr) {
5932 vpop();
5933 gexpr();
5934 } else {
5935 gexpr();
5936 vpop();
5939 skip(';');
5944 #define EXPR_CONST 1
5945 #define EXPR_ANY 2
5947 static void parse_init_elem(int expr_type)
5949 int saved_global_expr;
5950 switch(expr_type) {
5951 case EXPR_CONST:
5952 /* compound literals must be allocated globally in this case */
5953 saved_global_expr = global_expr;
5954 global_expr = 1;
5955 expr_const1();
5956 global_expr = saved_global_expr;
5957 /* NOTE: symbols are accepted */
5958 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5959 #ifdef TCC_TARGET_PE
5960 || (vtop->type.t & VT_IMPORT)
5961 #endif
5963 tcc_error("initializer element is not constant");
5964 break;
5965 case EXPR_ANY:
5966 expr_eq();
5967 break;
5971 /* t is the array or struct type. c is the array or struct
5972 address. cur_field is the pointer to the current
5973 value, for arrays the 'c' member contains the current start
5974 index and the 'r' contains the end index (in case of range init).
5975 'size_only' is true if only size info is needed (only used
5976 in arrays) */
5977 static void decl_designator(CType *type, Section *sec, unsigned long c,
5978 Sym **cur_field, int size_only)
5980 Sym *s, *f;
5981 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5982 CType type1;
5984 notfirst = 0;
5985 elem_size = 0;
5986 nb_elems = 1;
5987 if (gnu_ext && (l = is_label()) != 0)
5988 goto struct_field;
5989 while (tok == '[' || tok == '.') {
5990 if (tok == '[') {
5991 if (!(type->t & VT_ARRAY))
5992 expect("array type");
5993 s = type->ref;
5994 next();
5995 index = expr_const();
5996 if (index < 0 || (s->c >= 0 && index >= s->c))
5997 tcc_error("invalid index");
5998 if (tok == TOK_DOTS && gnu_ext) {
5999 next();
6000 index_last = expr_const();
6001 if (index_last < 0 ||
6002 (s->c >= 0 && index_last >= s->c) ||
6003 index_last < index)
6004 tcc_error("invalid index");
6005 } else {
6006 index_last = index;
6008 skip(']');
6009 if (!notfirst) {
6010 (*cur_field)->c = index;
6011 (*cur_field)->r = index_last;
6013 type = pointed_type(type);
6014 elem_size = type_size(type, &align);
6015 c += index * elem_size;
6016 /* NOTE: we only support ranges for last designator */
6017 nb_elems = index_last - index + 1;
6018 if (nb_elems != 1) {
6019 notfirst = 1;
6020 break;
6022 } else {
6023 next();
6024 l = tok;
6025 next();
6026 struct_field:
6027 if ((type->t & VT_BTYPE) != VT_STRUCT)
6028 expect("struct/union type");
6029 f = find_field(type, l);
6030 if (!f)
6031 expect("field");
6032 if (!notfirst)
6033 *cur_field = f;
6034 /* XXX: fix this mess by using explicit storage field */
6035 type1 = f->type;
6036 type1.t |= (type->t & ~VT_TYPE);
6037 type = &type1;
6038 c += f->c;
6040 notfirst = 1;
6042 if (notfirst) {
6043 if (tok == '=') {
6044 next();
6045 } else {
6046 if (!gnu_ext)
6047 expect("=");
6049 } else {
6050 if (type->t & VT_ARRAY) {
6051 index = (*cur_field)->c;
6052 if (type->ref->c >= 0 && index >= type->ref->c)
6053 tcc_error("index too large");
6054 type = pointed_type(type);
6055 c += index * type_size(type, &align);
6056 } else {
6057 f = *cur_field;
6058 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6059 *cur_field = f = f->next;
6060 if (!f)
6061 tcc_error("too many field init");
6062 /* XXX: fix this mess by using explicit storage field */
6063 type1 = f->type;
6064 type1.t |= (type->t & ~VT_TYPE);
6065 type = &type1;
6066 c += f->c;
6069 decl_initializer(type, sec, c, 0, size_only);
6071 /* XXX: make it more general */
6072 if (!size_only && nb_elems > 1) {
6073 unsigned long c_end;
6074 uint8_t *src, *dst;
6075 int i;
6077 if (!sec) {
6078 vset(type, VT_LOCAL|VT_LVAL, c);
6079 for (i = 1; i < nb_elems; i++) {
6080 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6081 vswap();
6082 vstore();
6084 vpop();
6085 } else {
6086 c_end = c + nb_elems * elem_size;
6087 if (c_end > sec->data_allocated)
6088 section_realloc(sec, c_end);
6089 src = sec->data + c;
6090 dst = src;
6091 for(i = 1; i < nb_elems; i++) {
6092 dst += elem_size;
6093 memcpy(dst, src, elem_size);
6099 /* store a value or an expression directly in global data or in local array */
6100 static void init_putv(CType *type, Section *sec, unsigned long c)
6102 int bt, bit_pos, bit_size;
6103 void *ptr;
6104 unsigned long long bit_mask;
6105 CType dtype;
6107 dtype = *type;
6108 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6110 if (sec) {
6111 int size, align;
6112 /* XXX: not portable */
6113 /* XXX: generate error if incorrect relocation */
6114 gen_assign_cast(&dtype);
6115 bt = type->t & VT_BTYPE;
6116 size = type_size(type, &align);
6117 if (c + size > sec->data_allocated) {
6118 section_realloc(sec, c + size);
6120 ptr = sec->data + c;
6121 /* XXX: make code faster ? */
6122 if (!(type->t & VT_BITFIELD)) {
6123 bit_pos = 0;
6124 bit_size = PTR_SIZE * 8;
6125 bit_mask = -1LL;
6126 } else {
6127 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6128 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6129 bit_mask = (1LL << bit_size) - 1;
6131 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6132 vtop->sym->v >= SYM_FIRST_ANOM &&
6133 /* XXX This rejects compount literals like
6134 '(void *){ptr}'. The problem is that '&sym' is
6135 represented the same way, which would be ruled out
6136 by the SYM_FIRST_ANOM check above, but also '"string"'
6137 in 'char *p = "string"' is represented the same
6138 with the type being VT_PTR and the symbol being an
6139 anonymous one. That is, there's no difference in vtop
6140 between '(void *){x}' and '&(void *){x}'. Ignore
6141 pointer typed entities here. Hopefully no real code
6142 will every use compound literals with scalar type. */
6143 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6144 /* These come from compound literals, memcpy stuff over. */
6145 Section *ssec;
6146 ElfW(Sym) *esym;
6147 ElfW_Rel *rel;
6148 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6149 ssec = tcc_state->sections[esym->st_shndx];
6150 memmove (ptr, ssec->data + esym->st_value, size);
6151 if (ssec->reloc) {
6152 /* We need to copy over all memory contents, and that
6153 includes relocations. Use the fact that relocs are
6154 created it order, so look from the end of relocs
6155 until we hit one before the copied region. */
6156 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6157 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6158 while (num_relocs--) {
6159 rel--;
6160 if (rel->r_offset >= esym->st_value + size)
6161 continue;
6162 if (rel->r_offset < esym->st_value)
6163 break;
6164 /* Note: if the same fields are initialized multiple
6165 times (possible with designators) then we possibly
6166 add multiple relocations for the same offset here.
6167 That would lead to wrong code, the last reloc needs
6168 to win. We clean this up later after the whole
6169 initializer is parsed. */
6170 put_elf_reloca(symtab_section, sec,
6171 c + rel->r_offset - esym->st_value,
6172 ELFW(R_TYPE)(rel->r_info),
6173 ELFW(R_SYM)(rel->r_info),
6174 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6175 rel->r_addend
6176 #else
6178 #endif
6182 } else {
6183 if ((vtop->r & VT_SYM) &&
6184 (bt == VT_BYTE ||
6185 bt == VT_SHORT ||
6186 bt == VT_DOUBLE ||
6187 bt == VT_LDOUBLE ||
6188 #if PTR_SIZE == 8
6189 (bt == VT_LLONG && bit_size != 64) ||
6190 bt == VT_INT
6191 #else
6192 bt == VT_LLONG ||
6193 (bt == VT_INT && bit_size != 32)
6194 #endif
6196 tcc_error("initializer element is not computable at load time");
6197 switch(bt) {
6198 /* XXX: when cross-compiling we assume that each type has the
6199 same representation on host and target, which is likely to
6200 be wrong in the case of long double */
6201 case VT_BOOL:
6202 vtop->c.i = (vtop->c.i != 0);
6203 case VT_BYTE:
6204 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6205 break;
6206 case VT_SHORT:
6207 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6208 break;
6209 case VT_DOUBLE:
6210 *(double *)ptr = vtop->c.d;
6211 break;
6212 case VT_LDOUBLE:
6213 if (sizeof(long double) == LDOUBLE_SIZE)
6214 *(long double *)ptr = vtop->c.ld;
6215 else if (sizeof(double) == LDOUBLE_SIZE)
6216 *(double *)ptr = vtop->c.ld;
6217 else
6218 tcc_error("can't cross compile long double constants");
6219 break;
6220 #if PTR_SIZE != 8
6221 case VT_LLONG:
6222 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6223 break;
6224 #else
6225 case VT_LLONG:
6226 #endif
6227 case VT_PTR:
6229 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6230 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6231 if (vtop->r & VT_SYM)
6232 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6233 else
6234 *(addr_t *)ptr |= val;
6235 #else
6236 if (vtop->r & VT_SYM)
6237 greloc(sec, vtop->sym, c, R_DATA_PTR);
6238 *(addr_t *)ptr |= val;
6239 #endif
6240 break;
6242 default:
6244 int val = (vtop->c.i & bit_mask) << bit_pos;
6245 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6246 if (vtop->r & VT_SYM)
6247 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6248 else
6249 *(int *)ptr |= val;
6250 #else
6251 if (vtop->r & VT_SYM)
6252 greloc(sec, vtop->sym, c, R_DATA_PTR);
6253 *(int *)ptr |= val;
6254 #endif
6255 break;
6259 vtop--;
6260 } else {
6261 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6262 vswap();
6263 vstore();
6264 vpop();
6268 /* put zeros for variable based init */
6269 static void init_putz(Section *sec, unsigned long c, int size)
6271 if (sec) {
6272 /* nothing to do because globals are already set to zero */
6273 } else {
6274 vpush_global_sym(&func_old_type, TOK_memset);
6275 vseti(VT_LOCAL, c);
6276 #ifdef TCC_TARGET_ARM
6277 vpushs(size);
6278 vpushi(0);
6279 #else
6280 vpushi(0);
6281 vpushs(size);
6282 #endif
6283 gfunc_call(3);
6287 /* 't' contains the type and storage info. 'c' is the offset of the
6288 object in section 'sec'. If 'sec' is NULL, it means stack based
6289 allocation. 'first' is true if array '{' must be read (multi
6290 dimension implicit array init handling). 'size_only' is true if
6291 size only evaluation is wanted (only for arrays). */
6292 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6293 int first, int size_only)
6295 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6296 int size1, align1;
6297 int have_elem;
6298 Sym *s, *f;
6299 Sym indexsym;
6300 CType *t1;
6302 /* If we currently are at an '}' or ',' we have read an initializer
6303 element in one of our callers, and not yet consumed it. */
6304 have_elem = tok == '}' || tok == ',';
6305 if (!have_elem && tok != '{' &&
6306 /* In case of strings we have special handling for arrays, so
6307 don't consume them as initializer value (which would commit them
6308 to some anonymous symbol). */
6309 tok != TOK_LSTR && tok != TOK_STR &&
6310 !size_only) {
6311 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6312 have_elem = 1;
6315 if (have_elem &&
6316 !(type->t & VT_ARRAY) &&
6317 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6318 The source type might have VT_CONSTANT set, which is
6319 of course assignable to non-const elements. */
6320 is_compatible_parameter_types(type, &vtop->type)) {
6321 init_putv(type, sec, c);
6322 } else if (type->t & VT_ARRAY) {
6323 s = type->ref;
6324 n = s->c;
6325 array_length = 0;
6326 t1 = pointed_type(type);
6327 size1 = type_size(t1, &align1);
6329 no_oblock = 1;
6330 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6331 tok == '{') {
6332 if (tok != '{')
6333 tcc_error("character array initializer must be a literal,"
6334 " optionally enclosed in braces");
6335 skip('{');
6336 no_oblock = 0;
6339 /* only parse strings here if correct type (otherwise: handle
6340 them as ((w)char *) expressions */
6341 if ((tok == TOK_LSTR &&
6342 #ifdef TCC_TARGET_PE
6343 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6344 #else
6345 (t1->t & VT_BTYPE) == VT_INT
6346 #endif
6347 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6348 while (tok == TOK_STR || tok == TOK_LSTR) {
6349 int cstr_len, ch;
6351 /* compute maximum number of chars wanted */
6352 if (tok == TOK_STR)
6353 cstr_len = tokc.str.size;
6354 else
6355 cstr_len = tokc.str.size / sizeof(nwchar_t);
6356 cstr_len--;
6357 nb = cstr_len;
6358 if (n >= 0 && nb > (n - array_length))
6359 nb = n - array_length;
6360 if (!size_only) {
6361 if (cstr_len > nb)
6362 tcc_warning("initializer-string for array is too long");
6363 /* in order to go faster for common case (char
6364 string in global variable, we handle it
6365 specifically */
6366 if (sec && tok == TOK_STR && size1 == 1) {
6367 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6368 } else {
6369 for(i=0;i<nb;i++) {
6370 if (tok == TOK_STR)
6371 ch = ((unsigned char *)tokc.str.data)[i];
6372 else
6373 ch = ((nwchar_t *)tokc.str.data)[i];
6374 vpushi(ch);
6375 init_putv(t1, sec, c + (array_length + i) * size1);
6379 array_length += nb;
6380 next();
6382 /* only add trailing zero if enough storage (no
6383 warning in this case since it is standard) */
6384 if (n < 0 || array_length < n) {
6385 if (!size_only) {
6386 vpushi(0);
6387 init_putv(t1, sec, c + (array_length * size1));
6389 array_length++;
6391 } else {
6392 indexsym.c = 0;
6393 indexsym.r = 0;
6394 f = &indexsym;
6396 do_init_list:
6397 while (tok != '}' || have_elem) {
6398 decl_designator(type, sec, c, &f, size_only);
6399 have_elem = 0;
6400 index = f->c;
6401 /* must put zero in holes (note that doing it that way
6402 ensures that it even works with designators) */
6403 if (!size_only && array_length < index) {
6404 init_putz(sec, c + array_length * size1,
6405 (index - array_length) * size1);
6407 if (type->t & VT_ARRAY) {
6408 index = indexsym.c = ++indexsym.r;
6409 } else {
6410 index = index + type_size(&f->type, &align1);
6411 if (s->type.t == TOK_UNION)
6412 f = NULL;
6413 else
6414 f = f->next;
6416 if (index > array_length)
6417 array_length = index;
6419 if (type->t & VT_ARRAY) {
6420 /* special test for multi dimensional arrays (may not
6421 be strictly correct if designators are used at the
6422 same time) */
6423 if (no_oblock && index >= n)
6424 break;
6425 } else {
6426 if (no_oblock && f == NULL)
6427 break;
6429 if (tok == '}')
6430 break;
6431 skip(',');
6434 /* put zeros at the end */
6435 if (!size_only && array_length < n) {
6436 init_putz(sec, c + array_length * size1,
6437 (n - array_length) * size1);
6439 if (!no_oblock)
6440 skip('}');
6441 /* patch type size if needed, which happens only for array types */
6442 if (n < 0)
6443 s->c = array_length;
6444 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6445 size1 = 1;
6446 no_oblock = 1;
6447 if (first || tok == '{') {
6448 skip('{');
6449 no_oblock = 0;
6451 s = type->ref;
6452 f = s->next;
6453 array_length = 0;
6454 n = s->c;
6455 goto do_init_list;
6456 } else if (tok == '{') {
6457 next();
6458 decl_initializer(type, sec, c, first, size_only);
6459 skip('}');
6460 } else if (size_only) {
6461 /* If we supported only ISO C we wouldn't have to accept calling
6462 this on anything than an array size_only==1 (and even then
6463 only on the outermost level, so no recursion would be needed),
6464 because initializing a flex array member isn't supported.
6465 But GNU C supports it, so we need to recurse even into
6466 subfields of structs and arrays when size_only is set. */
6467 /* just skip expression */
6468 parlevel = parlevel1 = 0;
6469 while ((parlevel > 0 || parlevel1 > 0 ||
6470 (tok != '}' && tok != ',')) && tok != -1) {
6471 if (tok == '(')
6472 parlevel++;
6473 else if (tok == ')') {
6474 if (parlevel == 0 && parlevel1 == 0)
6475 break;
6476 parlevel--;
6478 else if (tok == '{')
6479 parlevel1++;
6480 else if (tok == '}') {
6481 if (parlevel == 0 && parlevel1 == 0)
6482 break;
6483 parlevel1--;
6485 next();
6487 } else {
6488 if (!have_elem) {
6489 /* This should happen only when we haven't parsed
6490 the init element above for fear of committing a
6491 string constant to memory too early. */
6492 if (tok != TOK_STR && tok != TOK_LSTR)
6493 expect("string constant");
6494 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6496 init_putv(type, sec, c);
6500 /* parse an initializer for type 't' if 'has_init' is non zero, and
6501 allocate space in local or global data space ('r' is either
6502 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6503 variable 'v' of scope 'scope' is declared before initializers
6504 are parsed. If 'v' is zero, then a reference to the new object
6505 is put in the value stack. If 'has_init' is 2, a special parsing
6506 is done to handle string constants. */
6507 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6508 int has_init, int v, int scope)
6510 int size, align, addr, data_offset;
6511 int level;
6512 ParseState saved_parse_state = {0};
6513 TokenString *init_str = NULL;
6514 Section *sec;
6515 Sym *flexible_array;
6517 flexible_array = NULL;
6518 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6519 Sym *field = type->ref->next;
6520 if (field) {
6521 while (field->next)
6522 field = field->next;
6523 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6524 flexible_array = field;
6528 size = type_size(type, &align);
6529 /* If unknown size, we must evaluate it before
6530 evaluating initializers because
6531 initializers can generate global data too
6532 (e.g. string pointers or ISOC99 compound
6533 literals). It also simplifies local
6534 initializers handling */
6535 if (size < 0 || (flexible_array && has_init)) {
6536 if (!has_init)
6537 tcc_error("unknown type size");
6538 /* get all init string */
6539 init_str = tok_str_alloc();
6540 if (has_init == 2) {
6541 /* only get strings */
6542 while (tok == TOK_STR || tok == TOK_LSTR) {
6543 tok_str_add_tok(init_str);
6544 next();
6546 } else {
6547 level = 0;
6548 while (level > 0 || (tok != ',' && tok != ';')) {
6549 if (tok < 0)
6550 tcc_error("unexpected end of file in initializer");
6551 tok_str_add_tok(init_str);
6552 if (tok == '{')
6553 level++;
6554 else if (tok == '}') {
6555 level--;
6556 if (level <= 0) {
6557 next();
6558 break;
6561 next();
6564 tok_str_add(init_str, -1);
6565 tok_str_add(init_str, 0);
6567 /* compute size */
6568 save_parse_state(&saved_parse_state);
6570 begin_macro(init_str, 1);
6571 next();
6572 decl_initializer(type, NULL, 0, 1, 1);
6573 /* prepare second initializer parsing */
6574 macro_ptr = init_str->str;
6575 next();
6577 /* if still unknown size, error */
6578 size = type_size(type, &align);
6579 if (size < 0)
6580 tcc_error("unknown type size");
6582 /* If there's a flex member and it was used in the initializer
6583 adjust size. */
6584 if (flexible_array &&
6585 flexible_array->type.ref->c > 0)
6586 size += flexible_array->type.ref->c
6587 * pointed_size(&flexible_array->type);
6588 /* take into account specified alignment if bigger */
6589 if (ad->a.aligned) {
6590 int speca = 1 << (ad->a.aligned - 1);
6591 if (speca > align)
6592 align = speca;
6593 } else if (ad->a.packed) {
6594 align = 1;
6596 if ((r & VT_VALMASK) == VT_LOCAL) {
6597 sec = NULL;
6598 #ifdef CONFIG_TCC_BCHECK
6599 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6600 loc--;
6602 #endif
6603 loc = (loc - size) & -align;
6604 addr = loc;
6605 #ifdef CONFIG_TCC_BCHECK
6606 /* handles bounds */
6607 /* XXX: currently, since we do only one pass, we cannot track
6608 '&' operators, so we add only arrays */
6609 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6610 addr_t *bounds_ptr;
6611 /* add padding between regions */
6612 loc--;
6613 /* then add local bound info */
6614 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6615 bounds_ptr[0] = addr;
6616 bounds_ptr[1] = size;
6618 #endif
6619 if (v) {
6620 /* local variable */
6621 #ifdef CONFIG_TCC_ASM
6622 if (ad->asm_label) {
6623 int reg = asm_parse_regvar(ad->asm_label);
6624 if (reg >= 0)
6625 r = (r & ~VT_VALMASK) | reg;
6627 #endif
6628 sym_push(v, type, r, addr);
6629 } else {
6630 /* push local reference */
6631 vset(type, r, addr);
6633 } else {
6634 Sym *sym = NULL;
6635 if (v && scope == VT_CONST) {
6636 /* see if the symbol was already defined */
6637 sym = sym_find(v);
6638 if (sym) {
6639 patch_storage(sym, type);
6640 if (sym->type.t & VT_EXTERN) {
6641 /* if the variable is extern, it was not allocated */
6642 sym->type.t &= ~VT_EXTERN;
6643 /* set array size if it was omitted in extern
6644 declaration */
6645 if ((sym->type.t & VT_ARRAY) &&
6646 sym->type.ref->c < 0 &&
6647 type->ref->c >= 0)
6648 sym->type.ref->c = type->ref->c;
6649 } else if (!has_init) {
6650 /* we accept several definitions of the same
6651 global variable. this is tricky, because we
6652 must play with the SHN_COMMON type of the symbol */
6653 /* no init data, we won't add more to the symbol */
6654 update_storage(sym);
6655 goto no_alloc;
6656 } else if (sym->c) {
6657 ElfW(Sym) *esym;
6658 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6659 if (esym->st_shndx == data_section->sh_num)
6660 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6665 /* allocate symbol in corresponding section */
6666 sec = ad->section;
6667 if (!sec) {
6668 if (has_init)
6669 sec = data_section;
6670 else if (tcc_state->nocommon)
6671 sec = bss_section;
6674 if (sec) {
6675 data_offset = sec->data_offset;
6676 data_offset = (data_offset + align - 1) & -align;
6677 addr = data_offset;
6678 /* very important to increment global pointer at this time
6679 because initializers themselves can create new initializers */
6680 data_offset += size;
6681 #ifdef CONFIG_TCC_BCHECK
6682 /* add padding if bound check */
6683 if (tcc_state->do_bounds_check)
6684 data_offset++;
6685 #endif
6686 sec->data_offset = data_offset;
6687 /* allocate section space to put the data */
6688 if (sec->sh_type != SHT_NOBITS &&
6689 data_offset > sec->data_allocated)
6690 section_realloc(sec, data_offset);
6691 /* align section if needed */
6692 if (align > sec->sh_addralign)
6693 sec->sh_addralign = align;
6694 } else {
6695 addr = 0; /* avoid warning */
6698 if (v) {
6699 if (scope != VT_CONST || !sym) {
6700 sym = sym_push(v, type, r | VT_SYM, 0);
6701 sym->asm_label = ad->asm_label;
6703 /* update symbol definition */
6704 if (sec) {
6705 put_extern_sym(sym, sec, addr, size);
6706 } else {
6707 put_extern_sym(sym, SECTION_COMMON, align, size);
6710 } else {
6711 /* push global reference */
6712 sym = get_sym_ref(type, sec, addr, size);
6713 vpushsym(type, sym);
6716 #ifdef CONFIG_TCC_BCHECK
6717 /* handles bounds now because the symbol must be defined
6718 before for the relocation */
6719 if (tcc_state->do_bounds_check) {
6720 addr_t *bounds_ptr;
6722 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6723 /* then add global bound info */
6724 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6725 bounds_ptr[0] = 0; /* relocated */
6726 bounds_ptr[1] = size;
6728 #endif
6731 if (type->t & VT_VLA) {
6732 int a;
6734 /* save current stack pointer */
6735 if (vlas_in_scope == 0) {
6736 if (vla_sp_root_loc == -1)
6737 vla_sp_root_loc = (loc -= PTR_SIZE);
6738 gen_vla_sp_save(vla_sp_root_loc);
6741 vla_runtime_type_size(type, &a);
6742 gen_vla_alloc(type, a);
6743 gen_vla_sp_save(addr);
6744 vla_sp_loc = addr;
6745 vlas_in_scope++;
6747 } else if (has_init) {
6748 size_t oldreloc_offset = 0;
6749 if (sec && sec->reloc)
6750 oldreloc_offset = sec->reloc->data_offset;
6751 decl_initializer(type, sec, addr, 1, 0);
6752 if (sec && sec->reloc)
6753 squeeze_multi_relocs(sec, oldreloc_offset);
6754 /* patch flexible array member size back to -1, */
6755 /* for possible subsequent similar declarations */
6756 if (flexible_array)
6757 flexible_array->type.ref->c = -1;
6760 no_alloc:
6761 /* restore parse state if needed */
6762 if (init_str) {
6763 end_macro();
6764 restore_parse_state(&saved_parse_state);
6768 /* parse an old style function declaration list */
6769 /* XXX: check multiple parameter */
6770 static void func_decl_list(Sym *func_sym)
6772 AttributeDef ad;
6773 int v;
6774 Sym *s;
6775 CType btype, type;
6777 /* parse each declaration */
6778 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6779 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6780 if (!parse_btype(&btype, &ad))
6781 expect("declaration list");
6782 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6783 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6784 tok == ';') {
6785 /* we accept no variable after */
6786 } else {
6787 for(;;) {
6788 type = btype;
6789 type_decl(&type, &ad, &v, TYPE_DIRECT);
6790 /* find parameter in function parameter list */
6791 s = func_sym->next;
6792 while (s != NULL) {
6793 if ((s->v & ~SYM_FIELD) == v)
6794 goto found;
6795 s = s->next;
6797 tcc_error("declaration for parameter '%s' but no such parameter",
6798 get_tok_str(v, NULL));
6799 found:
6800 /* check that no storage specifier except 'register' was given */
6801 if (type.t & VT_STORAGE)
6802 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6803 convert_parameter_type(&type);
6804 /* we can add the type (NOTE: it could be local to the function) */
6805 s->type = type;
6806 /* accept other parameters */
6807 if (tok == ',')
6808 next();
6809 else
6810 break;
6813 skip(';');
6817 /* parse a function defined by symbol 'sym' and generate its code in
6818 'cur_text_section' */
6819 static void gen_function(Sym *sym)
6821 nocode_wanted = 0;
6822 ind = cur_text_section->data_offset;
6823 /* NOTE: we patch the symbol size later */
6824 put_extern_sym(sym, cur_text_section, ind, 0);
6825 funcname = get_tok_str(sym->v, NULL);
6826 func_ind = ind;
6827 /* Initialize VLA state */
6828 vla_sp_loc = -1;
6829 vla_sp_root_loc = -1;
6830 /* put debug symbol */
6831 tcc_debug_funcstart(tcc_state, sym);
6832 /* push a dummy symbol to enable local sym storage */
6833 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6834 local_scope = 1; /* for function parameters */
6835 gfunc_prolog(&sym->type);
6836 local_scope = 0;
6837 rsym = 0;
6838 block(NULL, NULL, 0);
6839 nocode_wanted = 0;
6840 gsym(rsym);
6841 gfunc_epilog();
6842 cur_text_section->data_offset = ind;
6843 label_pop(&global_label_stack, NULL);
6844 /* reset local stack */
6845 local_scope = 0;
6846 sym_pop(&local_stack, NULL, 0);
6847 /* end of function */
6848 /* patch symbol size */
6849 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6850 ind - func_ind;
6851 tcc_debug_funcend(tcc_state, ind - func_ind);
6852 /* It's better to crash than to generate wrong code */
6853 cur_text_section = NULL;
6854 funcname = ""; /* for safety */
6855 func_vt.t = VT_VOID; /* for safety */
6856 func_var = 0; /* for safety */
6857 ind = 0; /* for safety */
6858 nocode_wanted = 1;
6859 check_vstack();
6862 static void gen_inline_functions(TCCState *s)
6864 Sym *sym;
6865 int inline_generated, i, ln;
6866 struct InlineFunc *fn;
6868 ln = file->line_num;
6869 /* iterate while inline function are referenced */
6870 for(;;) {
6871 inline_generated = 0;
6872 for (i = 0; i < s->nb_inline_fns; ++i) {
6873 fn = s->inline_fns[i];
6874 sym = fn->sym;
6875 if (sym && sym->c) {
6876 /* the function was used: generate its code and
6877 convert it to a normal function */
6878 fn->sym = NULL;
6879 if (file)
6880 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6881 sym->type.t &= ~VT_INLINE;
6883 begin_macro(fn->func_str, 1);
6884 next();
6885 cur_text_section = text_section;
6886 gen_function(sym);
6887 end_macro();
6889 inline_generated = 1;
6892 if (!inline_generated)
6893 break;
6895 file->line_num = ln;
6898 ST_FUNC void free_inline_functions(TCCState *s)
6900 int i;
6901 /* free tokens of unused inline functions */
6902 for (i = 0; i < s->nb_inline_fns; ++i) {
6903 struct InlineFunc *fn = s->inline_fns[i];
6904 if (fn->sym)
6905 tok_str_free(fn->func_str);
6907 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6910 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6911 static int decl0(int l, int is_for_loop_init)
6913 int v, has_init, r;
6914 CType type, btype;
6915 Sym *sym;
6916 AttributeDef ad;
6918 while (1) {
6919 if (!parse_btype(&btype, &ad)) {
6920 if (is_for_loop_init)
6921 return 0;
6922 /* skip redundant ';' */
6923 /* XXX: find more elegant solution */
6924 if (tok == ';') {
6925 next();
6926 continue;
6928 if (l == VT_CONST &&
6929 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6930 /* global asm block */
6931 asm_global_instr();
6932 continue;
6934 /* special test for old K&R protos without explicit int
6935 type. Only accepted when defining global data */
6936 if (l == VT_LOCAL || tok < TOK_UIDENT)
6937 break;
6938 btype.t = VT_INT;
6940 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6941 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6942 tok == ';') {
6943 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6944 int v = btype.ref->v;
6945 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6946 tcc_warning("unnamed struct/union that defines no instances");
6948 next();
6949 continue;
6951 while (1) { /* iterate thru each declaration */
6952 type = btype;
6953 /* If the base type itself was an array type of unspecified
6954 size (like in 'typedef int arr[]; arr x = {1};') then
6955 we will overwrite the unknown size by the real one for
6956 this decl. We need to unshare the ref symbol holding
6957 that size. */
6958 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6959 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6961 type_decl(&type, &ad, &v, TYPE_DIRECT);
6962 #if 0
6964 char buf[500];
6965 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6966 printf("type = '%s'\n", buf);
6968 #endif
6969 if ((type.t & VT_BTYPE) == VT_FUNC) {
6970 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6971 tcc_error("function without file scope cannot be static");
6973 /* if old style function prototype, we accept a
6974 declaration list */
6975 sym = type.ref;
6976 if (sym->c == FUNC_OLD)
6977 func_decl_list(sym);
6980 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6981 ad.asm_label = asm_label_instr();
6982 /* parse one last attribute list, after asm label */
6983 parse_attribute(&ad);
6984 if (tok == '{')
6985 expect(";");
6988 if (ad.a.weak)
6989 type.t |= VT_WEAK;
6990 #ifdef TCC_TARGET_PE
6991 if (ad.a.func_import || ad.a.func_export) {
6992 if (type.t & (VT_STATIC|VT_TYPEDEF))
6993 tcc_error("cannot have dll linkage with static or typedef");
6994 if (ad.a.func_export)
6995 type.t |= VT_EXPORT;
6996 else if ((type.t & VT_BTYPE) != VT_FUNC)
6997 type.t |= VT_IMPORT|VT_EXTERN;
6999 #endif
7000 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7002 if (tok == '{') {
7003 if (l == VT_LOCAL)
7004 tcc_error("cannot use local functions");
7005 if ((type.t & VT_BTYPE) != VT_FUNC)
7006 expect("function definition");
7008 /* reject abstract declarators in function definition */
7009 sym = type.ref;
7010 while ((sym = sym->next) != NULL)
7011 if (!(sym->v & ~SYM_FIELD))
7012 expect("identifier");
7014 /* XXX: cannot do better now: convert extern line to static inline */
7015 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7016 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7018 sym = sym_find(v);
7019 if (sym) {
7020 Sym *ref;
7021 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7022 goto func_error1;
7024 ref = sym->type.ref;
7026 /* use func_call from prototype if not defined */
7027 if (ref->a.func_call != FUNC_CDECL
7028 && type.ref->a.func_call == FUNC_CDECL)
7029 type.ref->a.func_call = ref->a.func_call;
7031 /* use static from prototype */
7032 if (sym->type.t & VT_STATIC)
7033 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7035 /* If the definition has no visibility use the
7036 one from prototype. */
7037 if (! (type.t & VT_VIS_MASK))
7038 type.t |= sym->type.t & VT_VIS_MASK;
7040 /* apply other storage attributes from prototype */
7041 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
7043 if (!is_compatible_types(&sym->type, &type)) {
7044 func_error1:
7045 tcc_error("incompatible types for redefinition of '%s'",
7046 get_tok_str(v, NULL));
7048 if (ref->a.func_body)
7049 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7050 /* if symbol is already defined, then put complete type */
7051 sym->type = type;
7053 } else {
7054 /* put function symbol */
7055 sym = global_identifier_push(v, type.t, 0);
7056 sym->type.ref = type.ref;
7059 sym->type.ref->a.func_body = 1;
7060 sym->r = VT_SYM | VT_CONST;
7062 /* static inline functions are just recorded as a kind
7063 of macro. Their code will be emitted at the end of
7064 the compilation unit only if they are used */
7065 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7066 (VT_INLINE | VT_STATIC)) {
7067 int block_level;
7068 struct InlineFunc *fn;
7069 const char *filename;
7071 filename = file ? file->filename : "";
7072 fn = tcc_malloc(sizeof *fn + strlen(filename));
7073 strcpy(fn->filename, filename);
7074 fn->sym = sym;
7075 fn->func_str = tok_str_alloc();
7077 block_level = 0;
7078 for(;;) {
7079 int t;
7080 if (tok == TOK_EOF)
7081 tcc_error("unexpected end of file");
7082 tok_str_add_tok(fn->func_str);
7083 t = tok;
7084 next();
7085 if (t == '{') {
7086 block_level++;
7087 } else if (t == '}') {
7088 block_level--;
7089 if (block_level == 0)
7090 break;
7093 tok_str_add(fn->func_str, -1);
7094 tok_str_add(fn->func_str, 0);
7095 dynarray_add(&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7097 } else {
7098 /* compute text section */
7099 cur_text_section = ad.section;
7100 if (!cur_text_section)
7101 cur_text_section = text_section;
7102 gen_function(sym);
7104 break;
7105 } else {
7106 if (type.t & VT_TYPEDEF) {
7107 /* save typedefed type */
7108 /* XXX: test storage specifiers ? */
7109 sym = sym_find(v);
7110 if (sym && sym->scope == local_scope) {
7111 if (!is_compatible_types(&sym->type, &type)
7112 || !(sym->type.t & VT_TYPEDEF))
7113 tcc_error("incompatible redefinition of '%s'",
7114 get_tok_str(v, NULL));
7115 sym->type = type;
7116 } else {
7117 sym = sym_push(v, &type, 0, 0);
7119 sym->a = ad.a;
7120 } else {
7121 r = 0;
7122 if ((type.t & VT_BTYPE) == VT_FUNC) {
7123 /* external function definition */
7124 /* specific case for func_call attribute */
7125 type.ref->a = ad.a;
7126 } else if (!(type.t & VT_ARRAY)) {
7127 /* not lvalue if array */
7128 r |= lvalue_type(type.t);
7130 has_init = (tok == '=');
7131 if (has_init && (type.t & VT_VLA))
7132 tcc_error("variable length array cannot be initialized");
7133 if ((type.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7134 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7135 !has_init && l == VT_CONST && type.ref->c < 0)) {
7136 /* external variable or function */
7137 /* NOTE: as GCC, uninitialized global static
7138 arrays of null size are considered as
7139 extern */
7140 sym = external_sym(v, &type, r);
7141 sym->asm_label = ad.asm_label;
7142 if (ad.alias_target) {
7143 Section tsec;
7144 ElfW(Sym) *esym;
7145 Sym *alias_target;
7147 alias_target = sym_find(ad.alias_target);
7148 if (!alias_target || !alias_target->c)
7149 tcc_error("unsupported forward __alias__ attribute");
7150 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7151 tsec.sh_num = esym->st_shndx;
7152 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7154 } else {
7155 if (type.t & VT_STATIC)
7156 r |= VT_CONST;
7157 else
7158 r |= l;
7159 if (has_init)
7160 next();
7161 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7164 if (tok != ',') {
7165 if (is_for_loop_init)
7166 return 1;
7167 skip(';');
7168 break;
7170 next();
7172 ad.a.aligned = 0;
7175 return 0;
7178 ST_FUNC void decl(int l)
7180 decl0(l, 0);
7183 /* ------------------------------------------------------------------------- */