fix-mixed-struct (patch by Pip Cet)
[tinycc.git] / tccgen.c
blob5ed7b3434263bd70ed5eedb5a55c7d763c506127
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
30 */
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Section *text_section, *data_section, *bss_section; /* predefined sections */
34 ST_DATA Section *cur_text_section; /* current section where function code is generated */
35 #ifdef CONFIG_TCC_ASM
36 ST_DATA Section *last_text_section; /* to handle .previous asm directive */
37 #endif
38 #ifdef CONFIG_TCC_BCHECK
39 /* bound check related sections */
40 ST_DATA Section *bounds_section; /* contains global data bound description */
41 ST_DATA Section *lbounds_section; /* contains local data bound description */
42 #endif
43 /* symbol sections */
44 ST_DATA Section *symtab_section, *strtab_section;
45 /* debug sections */
46 ST_DATA Section *stab_section, *stabstr_section;
47 ST_DATA Sym *sym_free_first;
48 ST_DATA void **sym_pools;
49 ST_DATA int nb_sym_pools;
51 ST_DATA Sym *global_stack;
52 ST_DATA Sym *local_stack;
53 ST_DATA Sym *scope_stack_bottom;
54 ST_DATA Sym *define_stack;
55 ST_DATA Sym *global_label_stack;
56 ST_DATA Sym *local_label_stack;
58 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
59 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
60 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
62 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
64 ST_DATA int const_wanted; /* true if constant wanted */
65 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
66 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
67 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
68 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
69 ST_DATA int func_vc;
70 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
71 ST_DATA const char *funcname;
73 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
75 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
83 static void block(int *bsym, int *csym, int *case_sym, int *def_sym, int case_reg, int is_expr);
84 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, char *asm_label, int scope);
85 static int decl0(int l, int is_for_loop_init);
86 static void expr_eq(void);
87 static void unary_type(CType *type);
88 static void vla_runtime_type_size(CType *type, int *a);
89 static void vla_sp_restore(void);
90 static void vla_sp_restore_root(void);
91 static int is_compatible_parameter_types(CType *type1, CType *type2);
92 static void expr_type(CType *type);
93 ST_FUNC void vpush64(int ty, unsigned long long v);
94 ST_FUNC void vpush(CType *type);
95 ST_FUNC int gvtst(int inv, int t);
96 ST_FUNC int is_btype_size(int bt);
98 ST_INLN int is_float(int t)
99 {
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* symbol allocator */
129 static Sym *__sym_malloc(void)
131 Sym *sym_pool, *sym, *last_sym;
132 int i;
134 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
135 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
137 last_sym = sym_free_first;
138 sym = sym_pool;
139 for(i = 0; i < SYM_POOL_NB; i++) {
140 sym->next = last_sym;
141 last_sym = sym;
142 sym++;
144 sym_free_first = last_sym;
145 return last_sym;
148 static inline Sym *sym_malloc(void)
150 Sym *sym;
151 sym = sym_free_first;
152 if (!sym)
153 sym = __sym_malloc();
154 sym_free_first = sym->next;
155 return sym;
158 ST_INLN void sym_free(Sym *sym)
160 sym->next = sym_free_first;
161 tcc_free(sym->asm_label);
162 sym_free_first = sym;
165 /* push, without hashing */
166 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
168 Sym *s;
169 if (ps == &local_stack) {
170 for (s = *ps; s && s != scope_stack_bottom; s = s->prev)
171 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM && s->v == v)
172 tcc_error("incompatible types for redefinition of '%s'",
173 get_tok_str(v, NULL));
175 s = sym_malloc();
176 s->asm_label = NULL;
177 s->v = v;
178 s->type.t = t;
179 s->type.ref = NULL;
180 #ifdef _WIN64
181 s->d = NULL;
182 #endif
183 s->c = c;
184 s->next = NULL;
185 /* add in stack */
186 s->prev = *ps;
187 *ps = s;
188 return s;
191 /* find a symbol and return its associated structure. 's' is the top
192 of the symbol stack */
193 ST_FUNC Sym *sym_find2(Sym *s, int v)
195 while (s) {
196 if (s->v == v)
197 return s;
198 else if (s->v == -1)
199 return NULL;
200 s = s->prev;
202 return NULL;
205 /* structure lookup */
206 ST_INLN Sym *struct_find(int v)
208 v -= TOK_IDENT;
209 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
210 return NULL;
211 return table_ident[v]->sym_struct;
214 /* find an identifier */
215 ST_INLN Sym *sym_find(int v)
217 v -= TOK_IDENT;
218 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
219 return NULL;
220 return table_ident[v]->sym_identifier;
223 /* push a given symbol on the symbol stack */
224 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
226 Sym *s, **ps;
227 TokenSym *ts;
229 if (local_stack)
230 ps = &local_stack;
231 else
232 ps = &global_stack;
233 s = sym_push2(ps, v, type->t, c);
234 s->type.ref = type->ref;
235 s->r = r;
236 /* don't record fields or anonymous symbols */
237 /* XXX: simplify */
238 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
239 /* record symbol in token array */
240 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
241 if (v & SYM_STRUCT)
242 ps = &ts->sym_struct;
243 else
244 ps = &ts->sym_identifier;
245 s->prev_tok = *ps;
246 *ps = s;
248 return s;
251 /* push a global identifier */
252 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
254 Sym *s, **ps;
255 s = sym_push2(&global_stack, v, t, c);
256 /* don't record anonymous symbol */
257 if (v < SYM_FIRST_ANOM) {
258 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
259 /* modify the top most local identifier, so that
260 sym_identifier will point to 's' when popped */
261 while (*ps != NULL)
262 ps = &(*ps)->prev_tok;
263 s->prev_tok = NULL;
264 *ps = s;
266 return s;
269 /* pop symbols until top reaches 'b' */
270 ST_FUNC void sym_pop(Sym **ptop, Sym *b)
272 Sym *s, *ss, **ps;
273 TokenSym *ts;
274 int v;
276 s = *ptop;
277 while(s != b) {
278 ss = s->prev;
279 v = s->v;
280 /* remove symbol in token array */
281 /* XXX: simplify */
282 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
283 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
284 if (v & SYM_STRUCT)
285 ps = &ts->sym_struct;
286 else
287 ps = &ts->sym_identifier;
288 *ps = s->prev_tok;
290 sym_free(s);
291 s = ss;
293 *ptop = b;
296 static void weaken_symbol(Sym *sym)
298 sym->type.t |= VT_WEAK;
299 if (sym->c > 0) {
300 int esym_type;
301 ElfW(Sym) *esym;
303 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
304 esym_type = ELFW(ST_TYPE)(esym->st_info);
305 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
309 static void apply_visibility(Sym *sym, CType *type)
311 int vis = sym->type.t & VT_VIS_MASK;
312 int vis2 = type->t & VT_VIS_MASK;
313 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
314 vis = vis2;
315 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
317 else
318 vis = (vis < vis2) ? vis : vis2;
319 sym->type.t &= ~VT_VIS_MASK;
320 sym->type.t |= vis;
322 if (sym->c > 0) {
323 ElfW(Sym) *esym;
325 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
326 vis >>= VT_VIS_SHIFT;
327 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
331 /* ------------------------------------------------------------------------- */
333 ST_FUNC void swap(int *p, int *q)
335 int t;
336 t = *p;
337 *p = *q;
338 *q = t;
341 static void vsetc(CType *type, int r, CValue *vc)
343 int v;
345 if (vtop >= vstack + (VSTACK_SIZE - 1))
346 tcc_error("memory full (vstack)");
347 /* cannot let cpu flags if other instruction are generated. Also
348 avoid leaving VT_JMP anywhere except on the top of the stack
349 because it would complicate the code generator. */
350 if (vtop >= vstack) {
351 v = vtop->r & VT_VALMASK;
352 if (v == VT_CMP || (v & ~1) == VT_JMP)
353 gv(RC_INT);
355 vtop++;
356 vtop->type = *type;
357 vtop->r = r;
358 vtop->r2 = VT_CONST;
359 vtop->c = *vc;
362 /* push constant of type "type" with useless value */
363 ST_FUNC void vpush(CType *type)
365 CValue cval;
366 vsetc(type, VT_CONST, &cval);
369 /* push integer constant */
370 ST_FUNC void vpushi(int v)
372 CValue cval;
373 cval.i = v;
374 vsetc(&int_type, VT_CONST, &cval);
377 /* push a pointer sized constant */
378 static void vpushs(addr_t v)
380 CValue cval;
381 cval.ptr_offset = v;
382 vsetc(&size_type, VT_CONST, &cval);
385 /* push arbitrary 64bit constant */
386 ST_FUNC void vpush64(int ty, unsigned long long v)
388 CValue cval;
389 CType ctype;
390 ctype.t = ty;
391 ctype.ref = NULL;
392 cval.ull = v;
393 vsetc(&ctype, VT_CONST, &cval);
396 /* push long long constant */
397 static inline void vpushll(long long v)
399 vpush64(VT_LLONG, v);
402 /* push a symbol value of TYPE */
403 static inline void vpushsym(CType *type, Sym *sym)
405 CValue cval;
406 cval.ptr_offset = 0;
407 vsetc(type, VT_CONST | VT_SYM, &cval);
408 vtop->sym = sym;
411 /* Return a static symbol pointing to a section */
412 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
414 int v;
415 Sym *sym;
417 v = anon_sym++;
418 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
419 sym->type.ref = type->ref;
420 sym->r = VT_CONST | VT_SYM;
421 put_extern_sym(sym, sec, offset, size);
422 return sym;
425 /* push a reference to a section offset by adding a dummy symbol */
426 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
428 vpushsym(type, get_sym_ref(type, sec, offset, size));
431 /* define a new external reference to a symbol 'v' of type 'u' */
432 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
434 Sym *s;
436 s = sym_find(v);
437 if (!s) {
438 /* push forward reference */
439 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
440 s->type.ref = type->ref;
441 s->r = r | VT_CONST | VT_SYM;
443 return s;
446 /* define a new external reference to a symbol 'v' with alternate asm
447 name 'asm_label' of type 'u'. 'asm_label' is equal to NULL if there
448 is no alternate name (most cases) */
449 static Sym *external_sym(int v, CType *type, int r, char *asm_label)
451 Sym *s;
453 s = sym_find(v);
454 if (!s) {
455 /* push forward reference */
456 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
457 s->asm_label = asm_label;
458 s->type.t |= VT_EXTERN;
459 } else if (s->type.ref == func_old_type.ref) {
460 s->type.ref = type->ref;
461 s->r = r | VT_CONST | VT_SYM;
462 s->type.t |= VT_EXTERN;
463 } else if (!is_compatible_types(&s->type, type)) {
464 tcc_error("incompatible types for redefinition of '%s'",
465 get_tok_str(v, NULL));
467 /* Merge some storage attributes. */
468 if (type->t & VT_WEAK)
469 weaken_symbol(s);
471 if (type->t & VT_VIS_MASK)
472 apply_visibility(s, type);
474 return s;
477 /* push a reference to global symbol v */
478 ST_FUNC void vpush_global_sym(CType *type, int v)
480 vpushsym(type, external_global_sym(v, type, 0));
483 ST_FUNC void vset(CType *type, int r, int v)
485 CValue cval;
487 cval.i = v;
488 vsetc(type, r, &cval);
491 static void vseti(int r, int v)
493 CType type;
494 type.t = VT_INT;
495 type.ref = 0;
496 vset(&type, r, v);
499 ST_FUNC void vswap(void)
501 SValue tmp;
502 /* cannot let cpu flags if other instruction are generated. Also
503 avoid leaving VT_JMP anywhere except on the top of the stack
504 because it would complicate the code generator. */
505 if (vtop >= vstack) {
506 int v = vtop->r & VT_VALMASK;
507 if (v == VT_CMP || (v & ~1) == VT_JMP)
508 gv(RC_INT);
510 tmp = vtop[0];
511 vtop[0] = vtop[-1];
512 vtop[-1] = tmp;
514 /* XXX: +2% overall speed possible with optimized memswap
516 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
517 */
520 ST_FUNC void vpushv(SValue *v)
522 if (vtop >= vstack + (VSTACK_SIZE - 1))
523 tcc_error("memory full (vstack)");
524 vtop++;
525 *vtop = *v;
528 ST_FUNC void vdup(void)
530 vpushv(vtop);
533 /* save r to the memory stack, and mark it as being free */
534 ST_FUNC void save_reg(int r)
536 int l, saved, size, align;
537 SValue *p, sv;
538 CType *type;
540 /* modify all stack values */
541 saved = 0;
542 l = 0;
543 for(p=vstack;p<=vtop;p++) {
544 if ((p->r & VT_VALMASK) == r ||
545 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
546 /* must save value on stack if not already done */
547 if (!saved) {
548 /* NOTE: must reload 'r' because r might be equal to r2 */
549 r = p->r & VT_VALMASK;
550 /* store register in the stack */
551 type = &p->type;
552 if ((p->r & VT_LVAL) ||
553 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
554 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
555 type = &char_pointer_type;
556 #else
557 type = &int_type;
558 #endif
559 size = type_size(type, &align);
560 loc = (loc - size) & -align;
561 sv.type.t = type->t;
562 sv.r = VT_LOCAL | VT_LVAL;
563 sv.c.ul = loc;
564 store(r, &sv);
565 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
566 /* x86 specific: need to pop fp register ST0 if saved */
567 if (r == TREG_ST0) {
568 o(0xd8dd); /* fstp %st(0) */
570 #endif
571 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
572 /* special long long case */
573 if ((type->t & VT_BTYPE) == VT_LLONG) {
574 sv.c.ul += 4;
575 store(p->r2, &sv);
577 #endif
578 l = loc;
579 saved = 1;
581 /* mark that stack entry as being saved on the stack */
582 if (p->r & VT_LVAL) {
583 /* also clear the bounded flag because the
584 relocation address of the function was stored in
585 p->c.ul */
586 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
587 } else {
588 p->r = lvalue_type(p->type.t) | VT_LOCAL;
590 p->r2 = VT_CONST;
591 p->c.ul = l;
596 #ifdef TCC_TARGET_ARM
597 /* find a register of class 'rc2' with at most one reference on stack.
598 * If none, call get_reg(rc) */
599 ST_FUNC int get_reg_ex(int rc, int rc2)
601 int r;
602 SValue *p;
604 for(r=0;r<NB_REGS;r++) {
605 if (reg_classes[r] & rc2) {
606 int n;
607 n=0;
608 for(p = vstack; p <= vtop; p++) {
609 if ((p->r & VT_VALMASK) == r ||
610 (p->r2 & VT_VALMASK) == r)
611 n++;
613 if (n <= 1)
614 return r;
617 return get_reg(rc);
619 #endif
621 /* find a free register of class 'rc'. If none, save one register */
622 ST_FUNC int get_reg(int rc)
624 int r;
625 SValue *p;
627 /* find a free register */
628 for(r=0;r<NB_REGS;r++) {
629 if (reg_classes[r] & rc) {
630 for(p=vstack;p<=vtop;p++) {
631 if ((p->r & VT_VALMASK) == r ||
632 (p->r2 & VT_VALMASK) == r)
633 goto notfound;
635 return r;
637 notfound: ;
640 /* no register left : free the first one on the stack (VERY
641 IMPORTANT to start from the bottom to ensure that we don't
642 spill registers used in gen_opi()) */
643 for(p=vstack;p<=vtop;p++) {
644 /* look at second register (if long long) */
645 r = p->r2 & VT_VALMASK;
646 if (r < VT_CONST && (reg_classes[r] & rc))
647 goto save_found;
648 r = p->r & VT_VALMASK;
649 if (r < VT_CONST && (reg_classes[r] & rc)) {
650 save_found:
651 save_reg(r);
652 return r;
655 /* Should never comes here */
656 return -1;
659 /* save registers up to (vtop - n) stack entry */
660 ST_FUNC void save_regs(int n)
662 int r;
663 SValue *p, *p1;
664 p1 = vtop - n;
665 for(p = vstack;p <= p1; p++) {
666 r = p->r & VT_VALMASK;
667 if (r < VT_CONST) {
668 save_reg(r);
673 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
674 if needed */
675 static void move_reg(int r, int s, int t)
677 SValue sv;
679 if (r != s) {
680 save_reg(r);
681 sv.type.t = t;
682 sv.type.ref = NULL;
683 sv.r = s;
684 sv.c.ul = 0;
685 load(r, &sv);
689 /* get address of vtop (vtop MUST BE an lvalue) */
690 ST_FUNC void gaddrof(void)
692 if (vtop->r & VT_REF && !nocode_wanted)
693 gv(RC_INT);
694 vtop->r &= ~VT_LVAL;
695 /* tricky: if saved lvalue, then we can go back to lvalue */
696 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
697 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
702 #ifdef CONFIG_TCC_BCHECK
703 /* generate lvalue bound code */
704 static void gbound(void)
706 int lval_type;
707 CType type1;
709 vtop->r &= ~VT_MUSTBOUND;
710 /* if lvalue, then use checking code before dereferencing */
711 if (vtop->r & VT_LVAL) {
712 /* if not VT_BOUNDED value, then make one */
713 if (!(vtop->r & VT_BOUNDED)) {
714 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
715 /* must save type because we must set it to int to get pointer */
716 type1 = vtop->type;
717 vtop->type.t = VT_PTR;
718 gaddrof();
719 vpushi(0);
720 gen_bounded_ptr_add();
721 vtop->r |= lval_type;
722 vtop->type = type1;
724 /* then check for dereferencing */
725 gen_bounded_ptr_deref();
728 #endif
730 /* store vtop a register belonging to class 'rc'. lvalues are
731 converted to values. Cannot be used if cannot be converted to
732 register value (such as structures). */
733 ST_FUNC int gv(int rc)
735 int r, bit_pos, bit_size, size, align, i;
736 int rc2;
738 /* NOTE: get_reg can modify vstack[] */
739 if (vtop->type.t & VT_BITFIELD) {
740 CType type;
741 int bits = 32;
742 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
743 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
744 /* remove bit field info to avoid loops */
745 vtop->type.t &= ~(VT_BITFIELD | (-1 << VT_STRUCT_SHIFT));
746 /* cast to int to propagate signedness in following ops */
747 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
748 type.t = VT_LLONG;
749 bits = 64;
750 } else
751 type.t = VT_INT;
752 if((vtop->type.t & VT_UNSIGNED) ||
753 (vtop->type.t & VT_BTYPE) == VT_BOOL)
754 type.t |= VT_UNSIGNED;
755 gen_cast(&type);
756 /* generate shifts */
757 vpushi(bits - (bit_pos + bit_size));
758 gen_op(TOK_SHL);
759 vpushi(bits - bit_size);
760 /* NOTE: transformed to SHR if unsigned */
761 gen_op(TOK_SAR);
762 r = gv(rc);
763 } else {
764 if (is_float(vtop->type.t) &&
765 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
766 Sym *sym;
767 int *ptr;
768 unsigned long offset;
769 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
770 CValue check;
771 #endif
773 /* XXX: unify with initializers handling ? */
774 /* CPUs usually cannot use float constants, so we store them
775 generically in data segment */
776 size = type_size(&vtop->type, &align);
777 offset = (data_section->data_offset + align - 1) & -align;
778 data_section->data_offset = offset;
779 /* XXX: not portable yet */
780 #if defined(__i386__) || defined(__x86_64__)
781 /* Zero pad x87 tenbyte long doubles */
782 if (size == LDOUBLE_SIZE) {
783 vtop->c.tab[2] &= 0xffff;
784 #if LDOUBLE_SIZE == 16
785 vtop->c.tab[3] = 0;
786 #endif
788 #endif
789 ptr = section_ptr_add(data_section, size);
790 size = size >> 2;
791 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
792 check.d = 1;
793 if(check.tab[0])
794 for(i=0;i<size;i++)
795 ptr[i] = vtop->c.tab[size-1-i];
796 else
797 #endif
798 for(i=0;i<size;i++)
799 ptr[i] = vtop->c.tab[i];
800 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
801 vtop->r |= VT_LVAL | VT_SYM;
802 vtop->sym = sym;
803 vtop->c.ptr_offset = 0;
805 #ifdef CONFIG_TCC_BCHECK
806 if (vtop->r & VT_MUSTBOUND)
807 gbound();
808 #endif
810 r = vtop->r & VT_VALMASK;
811 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
812 #ifndef TCC_TARGET_ARM64
813 if (rc == RC_IRET)
814 rc2 = RC_LRET;
815 #ifdef TCC_TARGET_X86_64
816 else if (rc == RC_FRET)
817 rc2 = RC_QRET;
818 #endif
819 #endif
821 /* need to reload if:
822 - constant
823 - lvalue (need to dereference pointer)
824 - already a register, but not in the right class */
825 if (r >= VT_CONST
826 || (vtop->r & VT_LVAL)
827 || !(reg_classes[r] & rc)
828 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
829 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
830 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
831 #else
832 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
833 #endif
836 r = get_reg(rc);
837 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
838 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
839 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
840 #else
841 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
842 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
843 unsigned long long ll;
844 #endif
845 int r2, original_type;
846 original_type = vtop->type.t;
847 /* two register type load : expand to two words
848 temporarily */
849 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
850 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
851 /* load constant */
852 ll = vtop->c.ull;
853 vtop->c.ui = ll; /* first word */
854 load(r, vtop);
855 vtop->r = r; /* save register value */
856 vpushi(ll >> 32); /* second word */
857 } else
858 #endif
859 if (r >= VT_CONST || /* XXX: test to VT_CONST incorrect ? */
860 (vtop->r & VT_LVAL)) {
861 /* We do not want to modifier the long long
862 pointer here, so the safest (and less
863 efficient) is to save all the other registers
864 in the stack. XXX: totally inefficient. */
865 save_regs(1);
866 /* load from memory */
867 vtop->type.t = load_type;
868 load(r, vtop);
869 vdup();
870 vtop[-1].r = r; /* save register value */
871 /* increment pointer to get second word */
872 vtop->type.t = addr_type;
873 gaddrof();
874 vpushi(load_size);
875 gen_op('+');
876 vtop->r |= VT_LVAL;
877 vtop->type.t = load_type;
878 } else {
879 /* move registers */
880 load(r, vtop);
881 vdup();
882 vtop[-1].r = r; /* save register value */
883 vtop->r = vtop[-1].r2;
885 /* Allocate second register. Here we rely on the fact that
886 get_reg() tries first to free r2 of an SValue. */
887 r2 = get_reg(rc2);
888 load(r2, vtop);
889 vpop();
890 /* write second register */
891 vtop->r2 = r2;
892 vtop->type.t = original_type;
893 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
894 int t1, t;
895 /* lvalue of scalar type : need to use lvalue type
896 because of possible cast */
897 t = vtop->type.t;
898 t1 = t;
899 /* compute memory access type */
900 if (vtop->r & VT_REF)
901 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
902 t = VT_PTR;
903 #else
904 t = VT_INT;
905 #endif
906 else if (vtop->r & VT_LVAL_BYTE)
907 t = VT_BYTE;
908 else if (vtop->r & VT_LVAL_SHORT)
909 t = VT_SHORT;
910 if (vtop->r & VT_LVAL_UNSIGNED)
911 t |= VT_UNSIGNED;
912 vtop->type.t = t;
913 load(r, vtop);
914 /* restore wanted type */
915 vtop->type.t = t1;
916 } else {
917 /* one register type load */
918 load(r, vtop);
921 vtop->r = r;
922 #ifdef TCC_TARGET_C67
923 /* uses register pairs for doubles */
924 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
925 vtop->r2 = r+1;
926 #endif
928 return r;
931 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
932 ST_FUNC void gv2(int rc1, int rc2)
934 int v;
936 /* generate more generic register first. But VT_JMP or VT_CMP
937 values must be generated first in all cases to avoid possible
938 reload errors */
939 v = vtop[0].r & VT_VALMASK;
940 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
941 vswap();
942 gv(rc1);
943 vswap();
944 gv(rc2);
945 /* test if reload is needed for first register */
946 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
947 vswap();
948 gv(rc1);
949 vswap();
951 } else {
952 gv(rc2);
953 vswap();
954 gv(rc1);
955 vswap();
956 /* test if reload is needed for first register */
957 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
958 gv(rc2);
963 #ifndef TCC_TARGET_ARM64
964 /* wrapper around RC_FRET to return a register by type */
965 static int rc_fret(int t)
967 #ifdef TCC_TARGET_X86_64
968 if (t == VT_LDOUBLE) {
969 return RC_ST0;
971 #endif
972 return RC_FRET;
974 #endif
976 /* wrapper around REG_FRET to return a register by type */
977 static int reg_fret(int t)
979 #ifdef TCC_TARGET_X86_64
980 if (t == VT_LDOUBLE) {
981 return TREG_ST0;
983 #endif
984 return REG_FRET;
987 /* expand long long on stack in two int registers */
988 static void lexpand(void)
990 int u;
992 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
993 gv(RC_INT);
994 vdup();
995 vtop[0].r = vtop[-1].r2;
996 vtop[0].r2 = VT_CONST;
997 vtop[-1].r2 = VT_CONST;
998 vtop[0].type.t = VT_INT | u;
999 vtop[-1].type.t = VT_INT | u;
1002 #ifdef TCC_TARGET_ARM
1003 /* expand long long on stack */
1004 ST_FUNC void lexpand_nr(void)
1006 int u,v;
1008 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1009 vdup();
1010 vtop->r2 = VT_CONST;
1011 vtop->type.t = VT_INT | u;
1012 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1013 if (v == VT_CONST) {
1014 vtop[-1].c.ui = vtop->c.ull;
1015 vtop->c.ui = vtop->c.ull >> 32;
1016 vtop->r = VT_CONST;
1017 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1018 vtop->c.ui += 4;
1019 vtop->r = vtop[-1].r;
1020 } else if (v > VT_CONST) {
1021 vtop--;
1022 lexpand();
1023 } else
1024 vtop->r = vtop[-1].r2;
1025 vtop[-1].r2 = VT_CONST;
1026 vtop[-1].type.t = VT_INT | u;
1028 #endif
1030 /* build a long long from two ints */
1031 static void lbuild(int t)
1033 gv2(RC_INT, RC_INT);
1034 vtop[-1].r2 = vtop[0].r;
1035 vtop[-1].type.t = t;
1036 vpop();
1039 /* rotate n first stack elements to the bottom
1040 I1 ... In -> I2 ... In I1 [top is right]
1041 */
1042 ST_FUNC void vrotb(int n)
1044 int i;
1045 SValue tmp;
1047 tmp = vtop[-n + 1];
1048 for(i=-n+1;i!=0;i++)
1049 vtop[i] = vtop[i+1];
1050 vtop[0] = tmp;
1053 /* rotate the n elements before entry e towards the top
1054 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1055 */
1056 ST_FUNC void vrote(SValue *e, int n)
1058 int i;
1059 SValue tmp;
1061 tmp = *e;
1062 for(i = 0;i < n - 1; i++)
1063 e[-i] = e[-i - 1];
1064 e[-n + 1] = tmp;
1067 /* rotate n first stack elements to the top
1068 I1 ... In -> In I1 ... I(n-1) [top is right]
1069 */
1070 ST_FUNC void vrott(int n)
1072 vrote(vtop, n);
1075 /* pop stack value */
1076 ST_FUNC void vpop(void)
1078 int v;
1079 v = vtop->r & VT_VALMASK;
1080 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1081 /* for x86, we need to pop the FP stack */
1082 if (v == TREG_ST0 && !nocode_wanted) {
1083 o(0xd8dd); /* fstp %st(0) */
1084 } else
1085 #endif
1086 if (v == VT_JMP || v == VT_JMPI) {
1087 /* need to put correct jump if && or || without test */
1088 gsym(vtop->c.ul);
1090 vtop--;
1093 /* convert stack entry to register and duplicate its value in another
1094 register */
1095 static void gv_dup(void)
1097 int rc, t, r, r1;
1098 SValue sv;
1100 t = vtop->type.t;
1101 if ((t & VT_BTYPE) == VT_LLONG) {
1102 lexpand();
1103 gv_dup();
1104 vswap();
1105 vrotb(3);
1106 gv_dup();
1107 vrotb(4);
1108 /* stack: H L L1 H1 */
1109 lbuild(t);
1110 vrotb(3);
1111 vrotb(3);
1112 vswap();
1113 lbuild(t);
1114 vswap();
1115 } else {
1116 /* duplicate value */
1117 rc = RC_INT;
1118 sv.type.t = VT_INT;
1119 if (is_float(t)) {
1120 rc = RC_FLOAT;
1121 #ifdef TCC_TARGET_X86_64
1122 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1123 rc = RC_ST0;
1125 #endif
1126 sv.type.t = t;
1128 r = gv(rc);
1129 r1 = get_reg(rc);
1130 sv.r = r;
1131 sv.c.ul = 0;
1132 load(r1, &sv); /* move r to r1 */
1133 vdup();
1134 /* duplicates value */
1135 if (r != r1)
1136 vtop->r = r1;
1140 /* Generate value test
1142 * Generate a test for any value (jump, comparison and integers) */
1143 ST_FUNC int gvtst(int inv, int t)
1145 int v = vtop->r & VT_VALMASK;
1146 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1147 vpushi(0);
1148 gen_op(TOK_NE);
1150 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1151 /* constant jmp optimization */
1152 if ((vtop->c.i != 0) != inv)
1153 t = gjmp(t);
1154 vtop--;
1155 return t;
1157 return gtst(inv, t);
1160 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1161 /* generate CPU independent (unsigned) long long operations */
1162 static void gen_opl(int op)
1164 int t, a, b, op1, c, i;
1165 int func;
1166 unsigned short reg_iret = REG_IRET;
1167 unsigned short reg_lret = REG_LRET;
1168 SValue tmp;
1170 switch(op) {
1171 case '/':
1172 case TOK_PDIV:
1173 func = TOK___divdi3;
1174 goto gen_func;
1175 case TOK_UDIV:
1176 func = TOK___udivdi3;
1177 goto gen_func;
1178 case '%':
1179 func = TOK___moddi3;
1180 goto gen_mod_func;
1181 case TOK_UMOD:
1182 func = TOK___umoddi3;
1183 gen_mod_func:
1184 #ifdef TCC_ARM_EABI
1185 reg_iret = TREG_R2;
1186 reg_lret = TREG_R3;
1187 #endif
1188 gen_func:
1189 /* call generic long long function */
1190 vpush_global_sym(&func_old_type, func);
1191 vrott(3);
1192 gfunc_call(2);
1193 vpushi(0);
1194 vtop->r = reg_iret;
1195 vtop->r2 = reg_lret;
1196 break;
1197 case '^':
1198 case '&':
1199 case '|':
1200 case '*':
1201 case '+':
1202 case '-':
1203 t = vtop->type.t;
1204 vswap();
1205 lexpand();
1206 vrotb(3);
1207 lexpand();
1208 /* stack: L1 H1 L2 H2 */
1209 tmp = vtop[0];
1210 vtop[0] = vtop[-3];
1211 vtop[-3] = tmp;
1212 tmp = vtop[-2];
1213 vtop[-2] = vtop[-3];
1214 vtop[-3] = tmp;
1215 vswap();
1216 /* stack: H1 H2 L1 L2 */
1217 if (op == '*') {
1218 vpushv(vtop - 1);
1219 vpushv(vtop - 1);
1220 gen_op(TOK_UMULL);
1221 lexpand();
1222 /* stack: H1 H2 L1 L2 ML MH */
1223 for(i=0;i<4;i++)
1224 vrotb(6);
1225 /* stack: ML MH H1 H2 L1 L2 */
1226 tmp = vtop[0];
1227 vtop[0] = vtop[-2];
1228 vtop[-2] = tmp;
1229 /* stack: ML MH H1 L2 H2 L1 */
1230 gen_op('*');
1231 vrotb(3);
1232 vrotb(3);
1233 gen_op('*');
1234 /* stack: ML MH M1 M2 */
1235 gen_op('+');
1236 gen_op('+');
1237 } else if (op == '+' || op == '-') {
1238 /* XXX: add non carry method too (for MIPS or alpha) */
1239 if (op == '+')
1240 op1 = TOK_ADDC1;
1241 else
1242 op1 = TOK_SUBC1;
1243 gen_op(op1);
1244 /* stack: H1 H2 (L1 op L2) */
1245 vrotb(3);
1246 vrotb(3);
1247 gen_op(op1 + 1); /* TOK_xxxC2 */
1248 } else {
1249 gen_op(op);
1250 /* stack: H1 H2 (L1 op L2) */
1251 vrotb(3);
1252 vrotb(3);
1253 /* stack: (L1 op L2) H1 H2 */
1254 gen_op(op);
1255 /* stack: (L1 op L2) (H1 op H2) */
1257 /* stack: L H */
1258 lbuild(t);
1259 break;
1260 case TOK_SAR:
1261 case TOK_SHR:
1262 case TOK_SHL:
1263 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1264 t = vtop[-1].type.t;
1265 vswap();
1266 lexpand();
1267 vrotb(3);
1268 /* stack: L H shift */
1269 c = (int)vtop->c.i;
1270 /* constant: simpler */
1271 /* NOTE: all comments are for SHL. the other cases are
1272 done by swaping words */
1273 vpop();
1274 if (op != TOK_SHL)
1275 vswap();
1276 if (c >= 32) {
1277 /* stack: L H */
1278 vpop();
1279 if (c > 32) {
1280 vpushi(c - 32);
1281 gen_op(op);
1283 if (op != TOK_SAR) {
1284 vpushi(0);
1285 } else {
1286 gv_dup();
1287 vpushi(31);
1288 gen_op(TOK_SAR);
1290 vswap();
1291 } else {
1292 vswap();
1293 gv_dup();
1294 /* stack: H L L */
1295 vpushi(c);
1296 gen_op(op);
1297 vswap();
1298 vpushi(32 - c);
1299 if (op == TOK_SHL)
1300 gen_op(TOK_SHR);
1301 else
1302 gen_op(TOK_SHL);
1303 vrotb(3);
1304 /* stack: L L H */
1305 vpushi(c);
1306 if (op == TOK_SHL)
1307 gen_op(TOK_SHL);
1308 else
1309 gen_op(TOK_SHR);
1310 gen_op('|');
1312 if (op != TOK_SHL)
1313 vswap();
1314 lbuild(t);
1315 } else {
1316 /* XXX: should provide a faster fallback on x86 ? */
1317 switch(op) {
1318 case TOK_SAR:
1319 func = TOK___ashrdi3;
1320 goto gen_func;
1321 case TOK_SHR:
1322 func = TOK___lshrdi3;
1323 goto gen_func;
1324 case TOK_SHL:
1325 func = TOK___ashldi3;
1326 goto gen_func;
1329 break;
1330 default:
1331 /* compare operations */
1332 t = vtop->type.t;
1333 vswap();
1334 lexpand();
1335 vrotb(3);
1336 lexpand();
1337 /* stack: L1 H1 L2 H2 */
1338 tmp = vtop[-1];
1339 vtop[-1] = vtop[-2];
1340 vtop[-2] = tmp;
1341 /* stack: L1 L2 H1 H2 */
1342 /* compare high */
1343 op1 = op;
1344 /* when values are equal, we need to compare low words. since
1345 the jump is inverted, we invert the test too. */
1346 if (op1 == TOK_LT)
1347 op1 = TOK_LE;
1348 else if (op1 == TOK_GT)
1349 op1 = TOK_GE;
1350 else if (op1 == TOK_ULT)
1351 op1 = TOK_ULE;
1352 else if (op1 == TOK_UGT)
1353 op1 = TOK_UGE;
1354 a = 0;
1355 b = 0;
1356 gen_op(op1);
1357 if (op1 != TOK_NE) {
1358 a = gvtst(1, 0);
1360 if (op != TOK_EQ) {
1361 /* generate non equal test */
1362 /* XXX: NOT PORTABLE yet */
1363 if (a == 0) {
1364 b = gvtst(0, 0);
1365 } else {
1366 #if defined(TCC_TARGET_I386)
1367 b = psym(0x850f, 0);
1368 #elif defined(TCC_TARGET_ARM)
1369 b = ind;
1370 o(0x1A000000 | encbranch(ind, 0, 1));
1371 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1372 tcc_error("not implemented");
1373 #else
1374 #error not supported
1375 #endif
1378 /* compare low. Always unsigned */
1379 op1 = op;
1380 if (op1 == TOK_LT)
1381 op1 = TOK_ULT;
1382 else if (op1 == TOK_LE)
1383 op1 = TOK_ULE;
1384 else if (op1 == TOK_GT)
1385 op1 = TOK_UGT;
1386 else if (op1 == TOK_GE)
1387 op1 = TOK_UGE;
1388 gen_op(op1);
1389 a = gvtst(1, a);
1390 gsym(b);
1391 vseti(VT_JMPI, a);
1392 break;
1395 #endif
1397 /* handle integer constant optimizations and various machine
1398 independent opt */
1399 static void gen_opic(int op)
1401 int c1, c2, t1, t2, n;
1402 SValue *v1, *v2;
1403 long long l1, l2;
1404 typedef unsigned long long U;
1406 v1 = vtop - 1;
1407 v2 = vtop;
1408 t1 = v1->type.t & VT_BTYPE;
1409 t2 = v2->type.t & VT_BTYPE;
1411 if (t1 == VT_LLONG)
1412 l1 = v1->c.ll;
1413 else if (v1->type.t & VT_UNSIGNED)
1414 l1 = v1->c.ui;
1415 else
1416 l1 = v1->c.i;
1418 if (t2 == VT_LLONG)
1419 l2 = v2->c.ll;
1420 else if (v2->type.t & VT_UNSIGNED)
1421 l2 = v2->c.ui;
1422 else
1423 l2 = v2->c.i;
1425 /* currently, we cannot do computations with forward symbols */
1426 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1427 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1428 if (c1 && c2) {
1429 switch(op) {
1430 case '+': l1 += l2; break;
1431 case '-': l1 -= l2; break;
1432 case '&': l1 &= l2; break;
1433 case '^': l1 ^= l2; break;
1434 case '|': l1 |= l2; break;
1435 case '*': l1 *= l2; break;
1437 case TOK_PDIV:
1438 case '/':
1439 case '%':
1440 case TOK_UDIV:
1441 case TOK_UMOD:
1442 /* if division by zero, generate explicit division */
1443 if (l2 == 0) {
1444 if (const_wanted)
1445 tcc_error("division by zero in constant");
1446 goto general_case;
1448 switch(op) {
1449 default: l1 /= l2; break;
1450 case '%': l1 %= l2; break;
1451 case TOK_UDIV: l1 = (U)l1 / l2; break;
1452 case TOK_UMOD: l1 = (U)l1 % l2; break;
1454 break;
1455 case TOK_SHL: l1 <<= l2; break;
1456 case TOK_SHR: l1 = (U)l1 >> l2; break;
1457 case TOK_SAR: l1 >>= l2; break;
1458 /* tests */
1459 case TOK_ULT: l1 = (U)l1 < (U)l2; break;
1460 case TOK_UGE: l1 = (U)l1 >= (U)l2; break;
1461 case TOK_EQ: l1 = l1 == l2; break;
1462 case TOK_NE: l1 = l1 != l2; break;
1463 case TOK_ULE: l1 = (U)l1 <= (U)l2; break;
1464 case TOK_UGT: l1 = (U)l1 > (U)l2; break;
1465 case TOK_LT: l1 = l1 < l2; break;
1466 case TOK_GE: l1 = l1 >= l2; break;
1467 case TOK_LE: l1 = l1 <= l2; break;
1468 case TOK_GT: l1 = l1 > l2; break;
1469 /* logical */
1470 case TOK_LAND: l1 = l1 && l2; break;
1471 case TOK_LOR: l1 = l1 || l2; break;
1472 default:
1473 goto general_case;
1475 v1->c.ll = l1;
1476 vtop--;
1477 } else {
1478 /* if commutative ops, put c2 as constant */
1479 if (c1 && (op == '+' || op == '&' || op == '^' ||
1480 op == '|' || op == '*')) {
1481 vswap();
1482 c2 = c1; //c = c1, c1 = c2, c2 = c;
1483 l2 = l1; //l = l1, l1 = l2, l2 = l;
1485 if (!const_wanted &&
1486 c1 && ((l1 == 0 &&
1487 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1488 (l1 == -1 && op == TOK_SAR))) {
1489 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1490 vtop--;
1491 } else if (!const_wanted &&
1492 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1493 (l2 == -1 && op == '|') ||
1494 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1495 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1496 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1497 if (l2 == 1)
1498 vtop->c.ll = 0;
1499 vswap();
1500 vtop--;
1501 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1502 op == TOK_PDIV) &&
1503 l2 == 1) ||
1504 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1505 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1506 l2 == 0) ||
1507 (op == '&' &&
1508 l2 == -1))) {
1509 /* filter out NOP operations like x*1, x-0, x&-1... */
1510 vtop--;
1511 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1512 /* try to use shifts instead of muls or divs */
1513 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1514 n = -1;
1515 while (l2) {
1516 l2 >>= 1;
1517 n++;
1519 vtop->c.ll = n;
1520 if (op == '*')
1521 op = TOK_SHL;
1522 else if (op == TOK_PDIV)
1523 op = TOK_SAR;
1524 else
1525 op = TOK_SHR;
1527 goto general_case;
1528 } else if (c2 && (op == '+' || op == '-') &&
1529 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1530 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1531 /* symbol + constant case */
1532 if (op == '-')
1533 l2 = -l2;
1534 vtop--;
1535 vtop->c.ll += l2;
1536 } else {
1537 general_case:
1538 if (!nocode_wanted) {
1539 /* call low level op generator */
1540 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1541 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1542 gen_opl(op);
1543 else
1544 gen_opi(op);
1545 } else {
1546 vtop--;
1552 /* generate a floating point operation with constant propagation */
1553 static void gen_opif(int op)
1555 int c1, c2;
1556 SValue *v1, *v2;
1557 long double f1, f2;
1559 v1 = vtop - 1;
1560 v2 = vtop;
1561 /* currently, we cannot do computations with forward symbols */
1562 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1563 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1564 if (c1 && c2) {
1565 if (v1->type.t == VT_FLOAT) {
1566 f1 = v1->c.f;
1567 f2 = v2->c.f;
1568 } else if (v1->type.t == VT_DOUBLE) {
1569 f1 = v1->c.d;
1570 f2 = v2->c.d;
1571 } else {
1572 f1 = v1->c.ld;
1573 f2 = v2->c.ld;
1576 /* NOTE: we only do constant propagation if finite number (not
1577 NaN or infinity) (ANSI spec) */
1578 if (!ieee_finite(f1) || !ieee_finite(f2))
1579 goto general_case;
1581 switch(op) {
1582 case '+': f1 += f2; break;
1583 case '-': f1 -= f2; break;
1584 case '*': f1 *= f2; break;
1585 case '/':
1586 if (f2 == 0.0) {
1587 if (const_wanted)
1588 tcc_error("division by zero in constant");
1589 goto general_case;
1591 f1 /= f2;
1592 break;
1593 /* XXX: also handles tests ? */
1594 default:
1595 goto general_case;
1597 /* XXX: overflow test ? */
1598 if (v1->type.t == VT_FLOAT) {
1599 v1->c.f = f1;
1600 } else if (v1->type.t == VT_DOUBLE) {
1601 v1->c.d = f1;
1602 } else {
1603 v1->c.ld = f1;
1605 vtop--;
1606 } else {
1607 general_case:
1608 if (!nocode_wanted) {
1609 gen_opf(op);
1610 } else {
1611 vtop--;
1616 static int pointed_size(CType *type)
1618 int align;
1619 return type_size(pointed_type(type), &align);
1622 static void vla_runtime_pointed_size(CType *type)
1624 int align;
1625 vla_runtime_type_size(pointed_type(type), &align);
1628 static inline int is_null_pointer(SValue *p)
1630 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1631 return 0;
1632 return ((p->type.t & VT_BTYPE) == VT_INT && p->c.i == 0) ||
1633 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.ll == 0) ||
1634 ((p->type.t & VT_BTYPE) == VT_PTR && p->c.ptr_offset == 0);
1637 static inline int is_integer_btype(int bt)
1639 return (bt == VT_BYTE || bt == VT_SHORT ||
1640 bt == VT_INT || bt == VT_LLONG);
1643 /* check types for comparison or subtraction of pointers */
1644 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1646 CType *type1, *type2, tmp_type1, tmp_type2;
1647 int bt1, bt2;
1649 /* null pointers are accepted for all comparisons as gcc */
1650 if (is_null_pointer(p1) || is_null_pointer(p2))
1651 return;
1652 type1 = &p1->type;
1653 type2 = &p2->type;
1654 bt1 = type1->t & VT_BTYPE;
1655 bt2 = type2->t & VT_BTYPE;
1656 /* accept comparison between pointer and integer with a warning */
1657 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1658 if (op != TOK_LOR && op != TOK_LAND )
1659 tcc_warning("comparison between pointer and integer");
1660 return;
1663 /* both must be pointers or implicit function pointers */
1664 if (bt1 == VT_PTR) {
1665 type1 = pointed_type(type1);
1666 } else if (bt1 != VT_FUNC)
1667 goto invalid_operands;
1669 if (bt2 == VT_PTR) {
1670 type2 = pointed_type(type2);
1671 } else if (bt2 != VT_FUNC) {
1672 invalid_operands:
1673 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1675 if ((type1->t & VT_BTYPE) == VT_VOID ||
1676 (type2->t & VT_BTYPE) == VT_VOID)
1677 return;
1678 tmp_type1 = *type1;
1679 tmp_type2 = *type2;
1680 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1681 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1682 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1683 /* gcc-like error if '-' is used */
1684 if (op == '-')
1685 goto invalid_operands;
1686 else
1687 tcc_warning("comparison of distinct pointer types lacks a cast");
1691 /* generic gen_op: handles types problems */
1692 ST_FUNC void gen_op(int op)
1694 int u, t1, t2, bt1, bt2, t;
1695 CType type1;
1697 t1 = vtop[-1].type.t;
1698 t2 = vtop[0].type.t;
1699 bt1 = t1 & VT_BTYPE;
1700 bt2 = t2 & VT_BTYPE;
1702 if (bt1 == VT_PTR || bt2 == VT_PTR) {
1703 /* at least one operand is a pointer */
1704 /* relationnal op: must be both pointers */
1705 if (op >= TOK_ULT && op <= TOK_LOR) {
1706 check_comparison_pointer_types(vtop - 1, vtop, op);
1707 /* pointers are handled are unsigned */
1708 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1709 t = VT_LLONG | VT_UNSIGNED;
1710 #else
1711 t = VT_INT | VT_UNSIGNED;
1712 #endif
1713 goto std_op;
1715 /* if both pointers, then it must be the '-' op */
1716 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1717 if (op != '-')
1718 tcc_error("cannot use pointers here");
1719 check_comparison_pointer_types(vtop - 1, vtop, op);
1720 /* XXX: check that types are compatible */
1721 if (vtop[-1].type.t & VT_VLA) {
1722 vla_runtime_pointed_size(&vtop[-1].type);
1723 } else {
1724 vpushi(pointed_size(&vtop[-1].type));
1726 vrott(3);
1727 gen_opic(op);
1728 /* set to integer type */
1729 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1730 vtop->type.t = VT_LLONG;
1731 #else
1732 vtop->type.t = VT_INT;
1733 #endif
1734 vswap();
1735 gen_op(TOK_PDIV);
1736 } else {
1737 /* exactly one pointer : must be '+' or '-'. */
1738 if (op != '-' && op != '+')
1739 tcc_error("cannot use pointers here");
1740 /* Put pointer as first operand */
1741 if (bt2 == VT_PTR) {
1742 vswap();
1743 swap(&t1, &t2);
1745 type1 = vtop[-1].type;
1746 type1.t &= ~VT_ARRAY;
1747 if (vtop[-1].type.t & VT_VLA)
1748 vla_runtime_pointed_size(&vtop[-1].type);
1749 else {
1750 u = pointed_size(&vtop[-1].type);
1751 if (u < 0)
1752 tcc_error("unknown array element size");
1753 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1754 vpushll(u);
1755 #else
1756 /* XXX: cast to int ? (long long case) */
1757 vpushi(u);
1758 #endif
1760 gen_op('*');
1761 #if 0
1762 /* #ifdef CONFIG_TCC_BCHECK
1763 The main reason to removing this code:
1764 #include <stdio.h>
1765 int main ()
1767 int v[10];
1768 int i = 10;
1769 int j = 9;
1770 fprintf(stderr, "v+i-j = %p\n", v+i-j);
1771 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
1773 When this code is on. then the output looks like
1774 v+i-j = 0xfffffffe
1775 v+(i-j) = 0xbff84000
1776 */
1777 /* if evaluating constant expression, no code should be
1778 generated, so no bound check */
1779 if (tcc_state->do_bounds_check && !const_wanted) {
1780 /* if bounded pointers, we generate a special code to
1781 test bounds */
1782 if (op == '-') {
1783 vpushi(0);
1784 vswap();
1785 gen_op('-');
1787 gen_bounded_ptr_add();
1788 } else
1789 #endif
1791 gen_opic(op);
1793 /* put again type if gen_opic() swaped operands */
1794 vtop->type = type1;
1796 } else if (is_float(bt1) || is_float(bt2)) {
1797 /* compute bigger type and do implicit casts */
1798 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
1799 t = VT_LDOUBLE;
1800 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
1801 t = VT_DOUBLE;
1802 } else {
1803 t = VT_FLOAT;
1805 /* floats can only be used for a few operations */
1806 if (op != '+' && op != '-' && op != '*' && op != '/' &&
1807 (op < TOK_ULT || op > TOK_GT))
1808 tcc_error("invalid operands for binary operation");
1809 goto std_op;
1810 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
1811 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
1812 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
1813 t |= VT_UNSIGNED;
1814 goto std_op;
1815 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
1816 /* cast to biggest op */
1817 t = VT_LLONG;
1818 /* convert to unsigned if it does not fit in a long long */
1819 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
1820 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
1821 t |= VT_UNSIGNED;
1822 goto std_op;
1823 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1824 tcc_error("comparison of struct");
1825 } else {
1826 /* integer operations */
1827 t = VT_INT;
1828 /* convert to unsigned if it does not fit in an integer */
1829 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
1830 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
1831 t |= VT_UNSIGNED;
1832 std_op:
1833 /* XXX: currently, some unsigned operations are explicit, so
1834 we modify them here */
1835 if (t & VT_UNSIGNED) {
1836 if (op == TOK_SAR)
1837 op = TOK_SHR;
1838 else if (op == '/')
1839 op = TOK_UDIV;
1840 else if (op == '%')
1841 op = TOK_UMOD;
1842 else if (op == TOK_LT)
1843 op = TOK_ULT;
1844 else if (op == TOK_GT)
1845 op = TOK_UGT;
1846 else if (op == TOK_LE)
1847 op = TOK_ULE;
1848 else if (op == TOK_GE)
1849 op = TOK_UGE;
1851 vswap();
1852 type1.t = t;
1853 gen_cast(&type1);
1854 vswap();
1855 /* special case for shifts and long long: we keep the shift as
1856 an integer */
1857 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
1858 type1.t = VT_INT;
1859 gen_cast(&type1);
1860 if (is_float(t))
1861 gen_opif(op);
1862 else
1863 gen_opic(op);
1864 if (op >= TOK_ULT && op <= TOK_GT) {
1865 /* relationnal op: the result is an int */
1866 vtop->type.t = VT_INT;
1867 } else {
1868 vtop->type.t = t;
1871 // Make sure that we have converted to an rvalue:
1872 if (vtop->r & VT_LVAL && !nocode_wanted)
1873 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
1876 #ifndef TCC_TARGET_ARM
1877 /* generic itof for unsigned long long case */
1878 static void gen_cvt_itof1(int t)
1880 #ifdef TCC_TARGET_ARM64
1881 gen_cvt_itof(t);
1882 #else
1883 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
1884 (VT_LLONG | VT_UNSIGNED)) {
1886 if (t == VT_FLOAT)
1887 vpush_global_sym(&func_old_type, TOK___floatundisf);
1888 #if LDOUBLE_SIZE != 8
1889 else if (t == VT_LDOUBLE)
1890 vpush_global_sym(&func_old_type, TOK___floatundixf);
1891 #endif
1892 else
1893 vpush_global_sym(&func_old_type, TOK___floatundidf);
1894 vrott(2);
1895 gfunc_call(1);
1896 vpushi(0);
1897 vtop->r = reg_fret(t);
1898 } else {
1899 gen_cvt_itof(t);
1901 #endif
1903 #endif
1905 /* generic ftoi for unsigned long long case */
1906 static void gen_cvt_ftoi1(int t)
1908 #ifdef TCC_TARGET_ARM64
1909 gen_cvt_ftoi(t);
1910 #else
1911 int st;
1913 if (t == (VT_LLONG | VT_UNSIGNED)) {
1914 /* not handled natively */
1915 st = vtop->type.t & VT_BTYPE;
1916 if (st == VT_FLOAT)
1917 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
1918 #if LDOUBLE_SIZE != 8
1919 else if (st == VT_LDOUBLE)
1920 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
1921 #endif
1922 else
1923 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
1924 vrott(2);
1925 gfunc_call(1);
1926 vpushi(0);
1927 vtop->r = REG_IRET;
1928 vtop->r2 = REG_LRET;
1929 } else {
1930 gen_cvt_ftoi(t);
1932 #endif
1935 /* force char or short cast */
1936 static void force_charshort_cast(int t)
1938 int bits, dbt;
1939 dbt = t & VT_BTYPE;
1940 /* XXX: add optimization if lvalue : just change type and offset */
1941 if (dbt == VT_BYTE)
1942 bits = 8;
1943 else
1944 bits = 16;
1945 if (t & VT_UNSIGNED) {
1946 vpushi((1 << bits) - 1);
1947 gen_op('&');
1948 } else {
1949 bits = 32 - bits;
1950 vpushi(bits);
1951 gen_op(TOK_SHL);
1952 /* result must be signed or the SAR is converted to an SHL
1953 This was not the case when "t" was a signed short
1954 and the last value on the stack was an unsigned int */
1955 vtop->type.t &= ~VT_UNSIGNED;
1956 vpushi(bits);
1957 gen_op(TOK_SAR);
1961 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
1962 static void gen_cast(CType *type)
1964 int sbt, dbt, sf, df, c, p;
1966 /* special delayed cast for char/short */
1967 /* XXX: in some cases (multiple cascaded casts), it may still
1968 be incorrect */
1969 if (vtop->r & VT_MUSTCAST) {
1970 vtop->r &= ~VT_MUSTCAST;
1971 force_charshort_cast(vtop->type.t);
1974 /* bitfields first get cast to ints */
1975 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
1976 gv(RC_INT);
1979 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
1980 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
1982 if (sbt != dbt) {
1983 sf = is_float(sbt);
1984 df = is_float(dbt);
1985 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1986 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
1987 if (c) {
1988 /* constant case: we can do it now */
1989 /* XXX: in ISOC, cannot do it if error in convert */
1990 if (sbt == VT_FLOAT)
1991 vtop->c.ld = vtop->c.f;
1992 else if (sbt == VT_DOUBLE)
1993 vtop->c.ld = vtop->c.d;
1995 if (df) {
1996 if ((sbt & VT_BTYPE) == VT_LLONG) {
1997 if (sbt & VT_UNSIGNED)
1998 vtop->c.ld = vtop->c.ull;
1999 else
2000 vtop->c.ld = vtop->c.ll;
2001 } else if(!sf) {
2002 if (sbt & VT_UNSIGNED)
2003 vtop->c.ld = vtop->c.ui;
2004 else
2005 vtop->c.ld = vtop->c.i;
2008 if (dbt == VT_FLOAT)
2009 vtop->c.f = (float)vtop->c.ld;
2010 else if (dbt == VT_DOUBLE)
2011 vtop->c.d = (double)vtop->c.ld;
2012 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2013 vtop->c.ull = (unsigned long long)vtop->c.ld;
2014 } else if (sf && dbt == VT_BOOL) {
2015 vtop->c.i = (vtop->c.ld != 0);
2016 } else {
2017 if(sf)
2018 vtop->c.ll = (long long)vtop->c.ld;
2019 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2020 vtop->c.ll = vtop->c.ull;
2021 else if (sbt & VT_UNSIGNED)
2022 vtop->c.ll = vtop->c.ui;
2023 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2024 else if (sbt == VT_PTR)
2026 #endif
2027 else if (sbt != VT_LLONG)
2028 vtop->c.ll = vtop->c.i;
2030 if (dbt == (VT_LLONG|VT_UNSIGNED))
2031 vtop->c.ull = vtop->c.ll;
2032 else if (dbt == VT_BOOL)
2033 vtop->c.i = (vtop->c.ll != 0);
2034 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2035 else if (dbt == VT_PTR)
2037 #endif
2038 else if (dbt != VT_LLONG) {
2039 int s = 0;
2040 if ((dbt & VT_BTYPE) == VT_BYTE)
2041 s = 24;
2042 else if ((dbt & VT_BTYPE) == VT_SHORT)
2043 s = 16;
2044 if(dbt & VT_UNSIGNED)
2045 vtop->c.ui = ((unsigned int)vtop->c.ll << s) >> s;
2046 else
2047 vtop->c.i = ((int)vtop->c.ll << s) >> s;
2050 } else if (p && dbt == VT_BOOL) {
2051 vtop->r = VT_CONST;
2052 vtop->c.i = 1;
2053 } else if (!nocode_wanted) {
2054 /* non constant case: generate code */
2055 if (sf && df) {
2056 /* convert from fp to fp */
2057 gen_cvt_ftof(dbt);
2058 } else if (df) {
2059 /* convert int to fp */
2060 gen_cvt_itof1(dbt);
2061 } else if (sf) {
2062 /* convert fp to int */
2063 if (dbt == VT_BOOL) {
2064 vpushi(0);
2065 gen_op(TOK_NE);
2066 } else {
2067 /* we handle char/short/etc... with generic code */
2068 if (dbt != (VT_INT | VT_UNSIGNED) &&
2069 dbt != (VT_LLONG | VT_UNSIGNED) &&
2070 dbt != VT_LLONG)
2071 dbt = VT_INT;
2072 gen_cvt_ftoi1(dbt);
2073 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2074 /* additional cast for char/short... */
2075 vtop->type.t = dbt;
2076 gen_cast(type);
2079 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2080 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2081 if ((sbt & VT_BTYPE) != VT_LLONG && !nocode_wanted) {
2082 /* scalar to long long */
2083 /* machine independent conversion */
2084 gv(RC_INT);
2085 /* generate high word */
2086 if (sbt == (VT_INT | VT_UNSIGNED)) {
2087 vpushi(0);
2088 gv(RC_INT);
2089 } else {
2090 if (sbt == VT_PTR) {
2091 /* cast from pointer to int before we apply
2092 shift operation, which pointers don't support*/
2093 gen_cast(&int_type);
2095 gv_dup();
2096 vpushi(31);
2097 gen_op(TOK_SAR);
2099 /* patch second register */
2100 vtop[-1].r2 = vtop->r;
2101 vpop();
2103 #else
2104 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2105 (dbt & VT_BTYPE) == VT_PTR ||
2106 (dbt & VT_BTYPE) == VT_FUNC) {
2107 if ((sbt & VT_BTYPE) != VT_LLONG &&
2108 (sbt & VT_BTYPE) != VT_PTR &&
2109 (sbt & VT_BTYPE) != VT_FUNC && !nocode_wanted) {
2110 /* need to convert from 32bit to 64bit */
2111 gv(RC_INT);
2112 if (sbt != (VT_INT | VT_UNSIGNED)) {
2113 #if defined(TCC_TARGET_ARM64)
2114 gen_cvt_sxtw();
2115 #elif defined(TCC_TARGET_X86_64)
2116 int r = gv(RC_INT);
2117 /* x86_64 specific: movslq */
2118 o(0x6348);
2119 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2120 #else
2121 #error
2122 #endif
2125 #endif
2126 } else if (dbt == VT_BOOL) {
2127 /* scalar to bool */
2128 vpushi(0);
2129 gen_op(TOK_NE);
2130 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2131 (dbt & VT_BTYPE) == VT_SHORT) {
2132 if (sbt == VT_PTR) {
2133 vtop->type.t = VT_INT;
2134 tcc_warning("nonportable conversion from pointer to char/short");
2136 force_charshort_cast(dbt);
2137 } else if ((dbt & VT_BTYPE) == VT_INT) {
2138 /* scalar to int */
2139 if (sbt == VT_LLONG && !nocode_wanted) {
2140 /* from long long: just take low order word */
2141 lexpand();
2142 vpop();
2143 }
2144 /* if lvalue and single word type, nothing to do because
2145 the lvalue already contains the real type size (see
2146 VT_LVAL_xxx constants) */
2149 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2150 /* if we are casting between pointer types,
2151 we must update the VT_LVAL_xxx size */
2152 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2153 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2155 vtop->type = *type;
2158 /* return type size as known at compile time. Put alignment at 'a' */
2159 ST_FUNC int type_size(CType *type, int *a)
2161 Sym *s;
2162 int bt;
2164 bt = type->t & VT_BTYPE;
2165 if (bt == VT_STRUCT) {
2166 /* struct/union */
2167 s = type->ref;
2168 *a = s->r;
2169 return s->c;
2170 } else if (bt == VT_PTR) {
2171 if (type->t & VT_ARRAY) {
2172 int ts;
2174 s = type->ref;
2175 ts = type_size(&s->type, a);
2177 if (ts < 0 && s->c < 0)
2178 ts = -ts;
2180 return ts * s->c;
2181 } else {
2182 *a = PTR_SIZE;
2183 return PTR_SIZE;
2185 } else if (bt == VT_LDOUBLE) {
2186 *a = LDOUBLE_ALIGN;
2187 return LDOUBLE_SIZE;
2188 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2189 #ifdef TCC_TARGET_I386
2190 #ifdef TCC_TARGET_PE
2191 *a = 8;
2192 #else
2193 *a = 4;
2194 #endif
2195 #elif defined(TCC_TARGET_ARM)
2196 #ifdef TCC_ARM_EABI
2197 *a = 8;
2198 #else
2199 *a = 4;
2200 #endif
2201 #else
2202 *a = 8;
2203 #endif
2204 return 8;
2205 } else if (bt == VT_INT || bt == VT_ENUM || bt == VT_FLOAT) {
2206 *a = 4;
2207 return 4;
2208 } else if (bt == VT_SHORT) {
2209 *a = 2;
2210 return 2;
2211 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2212 *a = 8;
2213 return 16;
2214 } else {
2215 /* char, void, function, _Bool */
2216 *a = 1;
2217 return 1;
2221 /* push type size as known at runtime time on top of value stack. Put
2222 alignment at 'a' */
2223 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2225 if (type->t & VT_VLA) {
2226 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2227 } else {
2228 vpushi(type_size(type, a));
2232 static void vla_sp_restore(void) {
2233 if (vlas_in_scope) {
2234 gen_vla_sp_restore(vla_sp_loc);
2238 static void vla_sp_restore_root(void) {
2239 if (vlas_in_scope) {
2240 gen_vla_sp_restore(vla_sp_root_loc);
2244 /* return the pointed type of t */
2245 static inline CType *pointed_type(CType *type)
2247 return &type->ref->type;
2250 /* modify type so that its it is a pointer to type. */
2251 ST_FUNC void mk_pointer(CType *type)
2253 Sym *s;
2254 s = sym_push(SYM_FIELD, type, 0, -1);
2255 type->t = VT_PTR | (type->t & ~VT_TYPE);
2256 type->ref = s;
2259 /* compare function types. OLD functions match any new functions */
2260 static int is_compatible_func(CType *type1, CType *type2)
2262 Sym *s1, *s2;
2264 s1 = type1->ref;
2265 s2 = type2->ref;
2266 if (!is_compatible_types(&s1->type, &s2->type))
2267 return 0;
2268 /* check func_call */
2269 if (s1->a.func_call != s2->a.func_call)
2270 return 0;
2271 /* XXX: not complete */
2272 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2273 return 1;
2274 if (s1->c != s2->c)
2275 return 0;
2276 while (s1 != NULL) {
2277 if (s2 == NULL)
2278 return 0;
2279 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2280 return 0;
2281 s1 = s1->next;
2282 s2 = s2->next;
2284 if (s2)
2285 return 0;
2286 return 1;
2289 /* return true if type1 and type2 are the same. If unqualified is
2290 true, qualifiers on the types are ignored.
2292 - enums are not checked as gcc __builtin_types_compatible_p ()
2293 */
2294 static int compare_types(CType *type1, CType *type2, int unqualified)
2296 int bt1, t1, t2;
2298 t1 = type1->t & VT_TYPE;
2299 t2 = type2->t & VT_TYPE;
2300 if (unqualified) {
2301 /* strip qualifiers before comparing */
2302 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2303 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2305 /* Default Vs explicit signedness only matters for char */
2306 if ((t1 & VT_BTYPE) != VT_BYTE) {
2307 t1 &= ~VT_DEFSIGN;
2308 t2 &= ~VT_DEFSIGN;
2310 /* XXX: bitfields ? */
2311 if (t1 != t2)
2312 return 0;
2313 /* test more complicated cases */
2314 bt1 = t1 & VT_BTYPE;
2315 if (bt1 == VT_PTR) {
2316 type1 = pointed_type(type1);
2317 type2 = pointed_type(type2);
2318 return is_compatible_types(type1, type2);
2319 } else if (bt1 == VT_STRUCT) {
2320 return (type1->ref == type2->ref);
2321 } else if (bt1 == VT_FUNC) {
2322 return is_compatible_func(type1, type2);
2323 } else {
2324 return 1;
2328 /* return true if type1 and type2 are exactly the same (including
2329 qualifiers).
2330 */
2331 static int is_compatible_types(CType *type1, CType *type2)
2333 return compare_types(type1,type2,0);
2336 /* return true if type1 and type2 are the same (ignoring qualifiers).
2337 */
2338 static int is_compatible_parameter_types(CType *type1, CType *type2)
2340 return compare_types(type1,type2,1);
2343 /* print a type. If 'varstr' is not NULL, then the variable is also
2344 printed in the type */
2345 /* XXX: union */
2346 /* XXX: add array and function pointers */
2347 static void type_to_str(char *buf, int buf_size,
2348 CType *type, const char *varstr)
2350 int bt, v, t;
2351 Sym *s, *sa;
2352 char buf1[256];
2353 const char *tstr;
2355 t = type->t & VT_TYPE;
2356 bt = t & VT_BTYPE;
2357 buf[0] = '\0';
2358 if (t & VT_CONSTANT)
2359 pstrcat(buf, buf_size, "const ");
2360 if (t & VT_VOLATILE)
2361 pstrcat(buf, buf_size, "volatile ");
2362 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2363 pstrcat(buf, buf_size, "unsigned ");
2364 else if (t & VT_DEFSIGN)
2365 pstrcat(buf, buf_size, "signed ");
2366 switch(bt) {
2367 case VT_VOID:
2368 tstr = "void";
2369 goto add_tstr;
2370 case VT_BOOL:
2371 tstr = "_Bool";
2372 goto add_tstr;
2373 case VT_BYTE:
2374 tstr = "char";
2375 goto add_tstr;
2376 case VT_SHORT:
2377 tstr = "short";
2378 goto add_tstr;
2379 case VT_INT:
2380 tstr = "int";
2381 goto add_tstr;
2382 case VT_LONG:
2383 tstr = "long";
2384 goto add_tstr;
2385 case VT_LLONG:
2386 tstr = "long long";
2387 goto add_tstr;
2388 case VT_FLOAT:
2389 tstr = "float";
2390 goto add_tstr;
2391 case VT_DOUBLE:
2392 tstr = "double";
2393 goto add_tstr;
2394 case VT_LDOUBLE:
2395 tstr = "long double";
2396 add_tstr:
2397 pstrcat(buf, buf_size, tstr);
2398 break;
2399 case VT_ENUM:
2400 case VT_STRUCT:
2401 if (bt == VT_STRUCT)
2402 tstr = "struct ";
2403 else
2404 tstr = "enum ";
2405 pstrcat(buf, buf_size, tstr);
2406 v = type->ref->v & ~SYM_STRUCT;
2407 if (v >= SYM_FIRST_ANOM)
2408 pstrcat(buf, buf_size, "<anonymous>");
2409 else
2410 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2411 break;
2412 case VT_FUNC:
2413 s = type->ref;
2414 type_to_str(buf, buf_size, &s->type, varstr);
2415 pstrcat(buf, buf_size, "(");
2416 sa = s->next;
2417 while (sa != NULL) {
2418 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2419 pstrcat(buf, buf_size, buf1);
2420 sa = sa->next;
2421 if (sa)
2422 pstrcat(buf, buf_size, ", ");
2424 pstrcat(buf, buf_size, ")");
2425 goto no_var;
2426 case VT_PTR:
2427 s = type->ref;
2428 pstrcpy(buf1, sizeof(buf1), "*");
2429 if (varstr)
2430 pstrcat(buf1, sizeof(buf1), varstr);
2431 type_to_str(buf, buf_size, &s->type, buf1);
2432 goto no_var;
2434 if (varstr) {
2435 pstrcat(buf, buf_size, " ");
2436 pstrcat(buf, buf_size, varstr);
2438 no_var: ;
2441 /* verify type compatibility to store vtop in 'dt' type, and generate
2442 casts if needed. */
2443 static void gen_assign_cast(CType *dt)
2445 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2446 char buf1[256], buf2[256];
2447 int dbt, sbt;
2449 st = &vtop->type; /* source type */
2450 dbt = dt->t & VT_BTYPE;
2451 sbt = st->t & VT_BTYPE;
2452 if (sbt == VT_VOID || dbt == VT_VOID) {
2453 if (sbt == VT_VOID && dbt == VT_VOID)
2454 ; /*
2455 It is Ok if both are void
2456 A test program:
2457 void func1() {}
2458 void func2() {
2459 return func1();
2461 gcc accepts this program
2462 */
2463 else
2464 tcc_error("cannot cast from/to void");
2466 if (dt->t & VT_CONSTANT)
2467 tcc_warning("assignment of read-only location");
2468 switch(dbt) {
2469 case VT_PTR:
2470 /* special cases for pointers */
2471 /* '0' can also be a pointer */
2472 if (is_null_pointer(vtop))
2473 goto type_ok;
2474 /* accept implicit pointer to integer cast with warning */
2475 if (is_integer_btype(sbt)) {
2476 tcc_warning("assignment makes pointer from integer without a cast");
2477 goto type_ok;
2479 type1 = pointed_type(dt);
2480 /* a function is implicitely a function pointer */
2481 if (sbt == VT_FUNC) {
2482 if ((type1->t & VT_BTYPE) != VT_VOID &&
2483 !is_compatible_types(pointed_type(dt), st))
2484 tcc_warning("assignment from incompatible pointer type");
2485 goto type_ok;
2487 if (sbt != VT_PTR)
2488 goto error;
2489 type2 = pointed_type(st);
2490 if ((type1->t & VT_BTYPE) == VT_VOID ||
2491 (type2->t & VT_BTYPE) == VT_VOID) {
2492 /* void * can match anything */
2493 } else {
2494 /* exact type match, except for unsigned */
2495 tmp_type1 = *type1;
2496 tmp_type2 = *type2;
2497 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT |
2498 VT_VOLATILE);
2499 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT |
2500 VT_VOLATILE);
2501 if (!is_compatible_types(&tmp_type1, &tmp_type2))
2502 tcc_warning("assignment from incompatible pointer type");
2504 /* check const and volatile */
2505 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2506 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2507 tcc_warning("assignment discards qualifiers from pointer target type");
2508 break;
2509 case VT_BYTE:
2510 case VT_SHORT:
2511 case VT_INT:
2512 case VT_LLONG:
2513 if (sbt == VT_PTR || sbt == VT_FUNC) {
2514 tcc_warning("assignment makes integer from pointer without a cast");
2516 /* XXX: more tests */
2517 break;
2518 case VT_STRUCT:
2519 tmp_type1 = *dt;
2520 tmp_type2 = *st;
2521 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2522 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2523 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2524 error:
2525 type_to_str(buf1, sizeof(buf1), st, NULL);
2526 type_to_str(buf2, sizeof(buf2), dt, NULL);
2527 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2529 break;
2531 type_ok:
2532 gen_cast(dt);
2535 /* store vtop in lvalue pushed on stack */
2536 ST_FUNC void vstore(void)
2538 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2540 ft = vtop[-1].type.t;
2541 sbt = vtop->type.t & VT_BTYPE;
2542 dbt = ft & VT_BTYPE;
2543 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2544 (sbt == VT_INT && dbt == VT_SHORT))
2545 && !(vtop->type.t & VT_BITFIELD)) {
2546 /* optimize char/short casts */
2547 delayed_cast = VT_MUSTCAST;
2548 vtop->type.t = ft & (VT_TYPE & ~(VT_BITFIELD | (-1 << VT_STRUCT_SHIFT)));
2549 /* XXX: factorize */
2550 if (ft & VT_CONSTANT)
2551 tcc_warning("assignment of read-only location");
2552 } else {
2553 delayed_cast = 0;
2554 if (!(ft & VT_BITFIELD))
2555 gen_assign_cast(&vtop[-1].type);
2558 if (sbt == VT_STRUCT) {
2559 /* if structure, only generate pointer */
2560 /* structure assignment : generate memcpy */
2561 /* XXX: optimize if small size */
2562 if (!nocode_wanted) {
2563 size = type_size(&vtop->type, &align);
2565 /* destination */
2566 vswap();
2567 vtop->type.t = VT_PTR;
2568 gaddrof();
2570 /* address of memcpy() */
2571 #ifdef TCC_ARM_EABI
2572 if(!(align & 7))
2573 vpush_global_sym(&func_old_type, TOK_memcpy8);
2574 else if(!(align & 3))
2575 vpush_global_sym(&func_old_type, TOK_memcpy4);
2576 else
2577 #endif
2578 vpush_global_sym(&func_old_type, TOK_memcpy);
2580 vswap();
2581 /* source */
2582 vpushv(vtop - 2);
2583 vtop->type.t = VT_PTR;
2584 gaddrof();
2585 /* type size */
2586 vpushi(size);
2587 gfunc_call(3);
2588 } else {
2589 vswap();
2590 vpop();
2592 /* leave source on stack */
2593 } else if (ft & VT_BITFIELD) {
2594 /* bitfield store handling */
2596 /* save lvalue as expression result (example: s.b = s.a = n;) */
2597 vdup(), vtop[-1] = vtop[-2];
2599 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2600 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2601 /* remove bit field info to avoid loops */
2602 vtop[-1].type.t = ft & ~(VT_BITFIELD | (-1 << VT_STRUCT_SHIFT));
2604 if((ft & VT_BTYPE) == VT_BOOL) {
2605 gen_cast(&vtop[-1].type);
2606 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2609 /* duplicate destination */
2610 vdup();
2611 vtop[-1] = vtop[-2];
2613 /* mask and shift source */
2614 if((ft & VT_BTYPE) != VT_BOOL) {
2615 if((ft & VT_BTYPE) == VT_LLONG) {
2616 vpushll((1ULL << bit_size) - 1ULL);
2617 } else {
2618 vpushi((1 << bit_size) - 1);
2620 gen_op('&');
2622 vpushi(bit_pos);
2623 gen_op(TOK_SHL);
2624 /* load destination, mask and or with source */
2625 vswap();
2626 if((ft & VT_BTYPE) == VT_LLONG) {
2627 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2628 } else {
2629 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2631 gen_op('&');
2632 gen_op('|');
2633 /* store result */
2634 vstore();
2635 /* ... and discard */
2636 vpop();
2638 } else {
2639 if (!nocode_wanted) {
2640 #ifdef CONFIG_TCC_BCHECK
2641 /* bound check case */
2642 if (vtop[-1].r & VT_MUSTBOUND) {
2643 vswap();
2644 gbound();
2645 vswap();
2647 #endif
2648 rc = RC_INT;
2649 if (is_float(ft)) {
2650 rc = RC_FLOAT;
2651 #ifdef TCC_TARGET_X86_64
2652 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2653 rc = RC_ST0;
2654 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2655 rc = RC_FRET;
2657 #endif
2659 r = gv(rc); /* generate value */
2660 /* if lvalue was saved on stack, must read it */
2661 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2662 SValue sv;
2663 t = get_reg(RC_INT);
2664 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2665 sv.type.t = VT_PTR;
2666 #else
2667 sv.type.t = VT_INT;
2668 #endif
2669 sv.r = VT_LOCAL | VT_LVAL;
2670 sv.c.ul = vtop[-1].c.ul;
2671 load(t, &sv);
2672 vtop[-1].r = t | VT_LVAL;
2674 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2675 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2676 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2677 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2678 #else
2679 if ((ft & VT_BTYPE) == VT_LLONG) {
2680 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
2681 #endif
2682 vtop[-1].type.t = load_type;
2683 store(r, vtop - 1);
2684 vswap();
2685 /* convert to int to increment easily */
2686 vtop->type.t = addr_type;
2687 gaddrof();
2688 vpushi(load_size);
2689 gen_op('+');
2690 vtop->r |= VT_LVAL;
2691 vswap();
2692 vtop[-1].type.t = load_type;
2693 /* XXX: it works because r2 is spilled last ! */
2694 store(vtop->r2, vtop - 1);
2695 } else {
2696 store(r, vtop - 1);
2699 vswap();
2700 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
2701 vtop->r |= delayed_cast;
2705 /* post defines POST/PRE add. c is the token ++ or -- */
2706 ST_FUNC void inc(int post, int c)
2708 test_lvalue();
2709 vdup(); /* save lvalue */
2710 if (post) {
2711 if (!nocode_wanted)
2712 gv_dup(); /* duplicate value */
2713 else
2714 vdup(); /* duplicate value */
2715 vrotb(3);
2716 vrotb(3);
2718 /* add constant */
2719 vpushi(c - TOK_MID);
2720 gen_op('+');
2721 vstore(); /* store value */
2722 if (post)
2723 vpop(); /* if post op, return saved value */
2726 /* Parse GNUC __attribute__ extension. Currently, the following
2727 extensions are recognized:
2728 - aligned(n) : set data/function alignment.
2729 - packed : force data alignment to 1
2730 - section(x) : generate data/code in this section.
2731 - unused : currently ignored, but may be used someday.
2732 - regparm(n) : pass function parameters in registers (i386 only)
2733 */
2734 static void parse_attribute(AttributeDef *ad)
2736 int t, n;
2738 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
2739 next();
2740 skip('(');
2741 skip('(');
2742 while (tok != ')') {
2743 if (tok < TOK_IDENT)
2744 expect("attribute name");
2745 t = tok;
2746 next();
2747 switch(t) {
2748 case TOK_SECTION1:
2749 case TOK_SECTION2:
2750 skip('(');
2751 if (tok != TOK_STR)
2752 expect("section name");
2753 ad->section = find_section(tcc_state, (char *)tokc.cstr->data);
2754 next();
2755 skip(')');
2756 break;
2757 case TOK_ALIAS1:
2758 case TOK_ALIAS2:
2759 skip('(');
2760 if (tok != TOK_STR)
2761 expect("alias(\"target\")");
2762 ad->alias_target = /* save string as token, for later */
2763 tok_alloc((char*)tokc.cstr->data, tokc.cstr->size-1)->tok;
2764 next();
2765 skip(')');
2766 break;
2767 case TOK_VISIBILITY1:
2768 case TOK_VISIBILITY2:
2769 skip('(');
2770 if (tok != TOK_STR)
2771 expect("visibility(\"default|hidden|internal|protected\")");
2772 if (!strcmp (tokc.cstr->data, "default"))
2773 ad->a.visibility = STV_DEFAULT;
2774 else if (!strcmp (tokc.cstr->data, "hidden"))
2775 ad->a.visibility = STV_HIDDEN;
2776 else if (!strcmp (tokc.cstr->data, "internal"))
2777 ad->a.visibility = STV_INTERNAL;
2778 else if (!strcmp (tokc.cstr->data, "protected"))
2779 ad->a.visibility = STV_PROTECTED;
2780 else
2781 expect("visibility(\"default|hidden|internal|protected\")");
2782 next();
2783 skip(')');
2784 break;
2785 case TOK_ALIGNED1:
2786 case TOK_ALIGNED2:
2787 if (tok == '(') {
2788 next();
2789 n = expr_const();
2790 if (n <= 0 || (n & (n - 1)) != 0)
2791 tcc_error("alignment must be a positive power of two");
2792 skip(')');
2793 } else {
2794 n = MAX_ALIGN;
2796 ad->a.aligned = n;
2797 break;
2798 case TOK_PACKED1:
2799 case TOK_PACKED2:
2800 ad->a.packed = 1;
2801 break;
2802 case TOK_WEAK1:
2803 case TOK_WEAK2:
2804 ad->a.weak = 1;
2805 break;
2806 case TOK_UNUSED1:
2807 case TOK_UNUSED2:
2808 /* currently, no need to handle it because tcc does not
2809 track unused objects */
2810 break;
2811 case TOK_NORETURN1:
2812 case TOK_NORETURN2:
2813 /* currently, no need to handle it because tcc does not
2814 track unused objects */
2815 break;
2816 case TOK_CDECL1:
2817 case TOK_CDECL2:
2818 case TOK_CDECL3:
2819 ad->a.func_call = FUNC_CDECL;
2820 break;
2821 case TOK_STDCALL1:
2822 case TOK_STDCALL2:
2823 case TOK_STDCALL3:
2824 ad->a.func_call = FUNC_STDCALL;
2825 break;
2826 #ifdef TCC_TARGET_I386
2827 case TOK_REGPARM1:
2828 case TOK_REGPARM2:
2829 skip('(');
2830 n = expr_const();
2831 if (n > 3)
2832 n = 3;
2833 else if (n < 0)
2834 n = 0;
2835 if (n > 0)
2836 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
2837 skip(')');
2838 break;
2839 case TOK_FASTCALL1:
2840 case TOK_FASTCALL2:
2841 case TOK_FASTCALL3:
2842 ad->a.func_call = FUNC_FASTCALLW;
2843 break;
2844 #endif
2845 case TOK_MODE:
2846 skip('(');
2847 switch(tok) {
2848 case TOK_MODE_DI:
2849 ad->a.mode = VT_LLONG + 1;
2850 break;
2851 case TOK_MODE_HI:
2852 ad->a.mode = VT_SHORT + 1;
2853 break;
2854 case TOK_MODE_SI:
2855 ad->a.mode = VT_INT + 1;
2856 break;
2857 default:
2858 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
2859 break;
2861 next();
2862 skip(')');
2863 break;
2864 case TOK_DLLEXPORT:
2865 ad->a.func_export = 1;
2866 break;
2867 case TOK_DLLIMPORT:
2868 ad->a.func_import = 1;
2869 break;
2870 default:
2871 if (tcc_state->warn_unsupported)
2872 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
2873 /* skip parameters */
2874 if (tok == '(') {
2875 int parenthesis = 0;
2876 do {
2877 if (tok == '(')
2878 parenthesis++;
2879 else if (tok == ')')
2880 parenthesis--;
2881 next();
2882 } while (parenthesis && tok != -1);
2884 break;
2886 if (tok != ',')
2887 break;
2888 next();
2890 skip(')');
2891 skip(')');
2895 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
2896 static void struct_decl(CType *type, int u, int tdef)
2898 int a, v, size, align, maxalign, c, offset, flexible;
2899 int bit_size, bit_pos, bsize, bt, lbit_pos, prevbt;
2900 Sym *s, *ss, *ass, **ps;
2901 AttributeDef ad;
2902 CType type1, btype;
2904 a = tok; /* save decl type */
2905 next();
2906 if (tok != '{') {
2907 v = tok;
2908 next();
2909 /* struct already defined ? return it */
2910 if (v < TOK_IDENT)
2911 expect("struct/union/enum name");
2912 s = struct_find(v);
2913 if (s) {
2914 if (s->type.t != a)
2915 tcc_error("invalid type");
2916 goto do_decl;
2917 } else if (tok >= TOK_IDENT && !tdef)
2918 tcc_error("unknown struct/union/enum");
2919 } else {
2920 v = anon_sym++;
2922 type1.t = a;
2923 type1.ref = NULL;
2924 /* we put an undefined size for struct/union */
2925 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
2926 s->r = 0; /* default alignment is zero as gcc */
2927 /* put struct/union/enum name in type */
2928 do_decl:
2929 type->t = u;
2930 type->ref = s;
2932 if (tok == '{') {
2933 next();
2934 if (s->c != -1)
2935 tcc_error("struct/union/enum already defined");
2936 /* cannot be empty */
2937 c = 0;
2938 /* non empty enums are not allowed */
2939 if (a == TOK_ENUM) {
2940 for(;;) {
2941 v = tok;
2942 if (v < TOK_UIDENT)
2943 expect("identifier");
2944 ss = sym_find(v);
2945 if (ss && !local_stack)
2946 tcc_error("redefinition of enumerator '%s'",
2947 get_tok_str(v, NULL));
2948 next();
2949 if (tok == '=') {
2950 next();
2951 c = expr_const();
2953 /* enum symbols have static storage */
2954 ss = sym_push(v, &int_type, VT_CONST, c);
2955 ss->type.t |= VT_STATIC;
2956 if (tok != ',')
2957 break;
2958 next();
2959 c++;
2960 /* NOTE: we accept a trailing comma */
2961 if (tok == '}')
2962 break;
2964 s->c = type_size(&int_type, &align);
2965 skip('}');
2966 } else {
2967 maxalign = 1;
2968 ps = &s->next;
2969 prevbt = VT_INT;
2970 bit_pos = 0;
2971 offset = 0;
2972 flexible = 0;
2973 while (tok != '}') {
2974 parse_btype(&btype, &ad);
2975 while (1) {
2976 if (flexible)
2977 tcc_error("flexible array member '%s' not at the end of struct",
2978 get_tok_str(v, NULL));
2979 bit_size = -1;
2980 v = 0;
2981 type1 = btype;
2982 if (tok != ':') {
2983 type_decl(&type1, &ad, &v, TYPE_DIRECT | TYPE_ABSTRACT);
2984 if (v == 0) {
2985 if ((type1.t & VT_BTYPE) != VT_STRUCT)
2986 expect("identifier");
2987 else {
2988 int v = btype.ref->v;
2989 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
2990 if (tcc_state->ms_extensions == 0)
2991 expect("identifier");
2995 if (type_size(&type1, &align) < 0) {
2996 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
2997 flexible = 1;
2998 else
2999 tcc_error("field '%s' has incomplete type",
3000 get_tok_str(v, NULL));
3002 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3003 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3004 tcc_error("invalid type for '%s'",
3005 get_tok_str(v, NULL));
3007 if (tok == ':') {
3008 next();
3009 bit_size = expr_const();
3010 /* XXX: handle v = 0 case for messages */
3011 if (bit_size < 0)
3012 tcc_error("negative width in bit-field '%s'",
3013 get_tok_str(v, NULL));
3014 if (v && bit_size == 0)
3015 tcc_error("zero width for bit-field '%s'",
3016 get_tok_str(v, NULL));
3018 size = type_size(&type1, &align);
3019 if (ad.a.aligned) {
3020 if (align < ad.a.aligned)
3021 align = ad.a.aligned;
3022 } else if (ad.a.packed) {
3023 align = 1;
3024 } else if (*tcc_state->pack_stack_ptr) {
3025 if (align > *tcc_state->pack_stack_ptr)
3026 align = *tcc_state->pack_stack_ptr;
3028 lbit_pos = 0;
3029 if (bit_size >= 0) {
3030 bt = type1.t & VT_BTYPE;
3031 if (bt != VT_INT &&
3032 bt != VT_BYTE &&
3033 bt != VT_SHORT &&
3034 bt != VT_BOOL &&
3035 bt != VT_ENUM &&
3036 bt != VT_LLONG)
3037 tcc_error("bitfields must have scalar type");
3038 bsize = size * 8;
3039 if (bit_size > bsize) {
3040 tcc_error("width of '%s' exceeds its type",
3041 get_tok_str(v, NULL));
3042 } else if (bit_size == bsize) {
3043 /* no need for bit fields */
3044 bit_pos = 0;
3045 } else if (bit_size == 0) {
3046 /* XXX: what to do if only padding in a
3047 structure ? */
3048 /* zero size: means to pad */
3049 bit_pos = 0;
3050 } else {
3051 /* we do not have enough room ?
3052 did the type change?
3053 is it a union? */
3054 if ((bit_pos + bit_size) > bsize ||
3055 bt != prevbt || a == TOK_UNION)
3056 bit_pos = 0;
3057 lbit_pos = bit_pos;
3058 /* XXX: handle LSB first */
3059 type1.t |= VT_BITFIELD |
3060 (bit_pos << VT_STRUCT_SHIFT) |
3061 (bit_size << (VT_STRUCT_SHIFT + 6));
3062 bit_pos += bit_size;
3064 prevbt = bt;
3065 } else {
3066 bit_pos = 0;
3068 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3069 /* add new memory data only if starting
3070 bit field */
3071 if (lbit_pos == 0) {
3072 if (a == TOK_STRUCT) {
3073 c = (c + align - 1) & -align;
3074 offset = c;
3075 if (size > 0)
3076 c += size;
3077 } else {
3078 offset = 0;
3079 if (size > c)
3080 c = size;
3082 if (align > maxalign)
3083 maxalign = align;
3085 #if 0
3086 printf("add field %s offset=%d",
3087 get_tok_str(v, NULL), offset);
3088 if (type1.t & VT_BITFIELD) {
3089 printf(" pos=%d size=%d",
3090 (type1.t >> VT_STRUCT_SHIFT) & 0x3f,
3091 (type1.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3093 printf("\n");
3094 #endif
3096 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3097 ass = type1.ref;
3098 while ((ass = ass->next) != NULL) {
3099 ss = sym_push(ass->v, &ass->type, 0, offset + ass->c);
3100 *ps = ss;
3101 ps = &ss->next;
3103 } else if (v) {
3104 ss = sym_push(v | SYM_FIELD, &type1, 0, offset);
3105 *ps = ss;
3106 ps = &ss->next;
3108 if (tok == ';' || tok == TOK_EOF)
3109 break;
3110 skip(',');
3112 skip(';');
3114 skip('}');
3115 /* store size and alignment */
3116 s->c = (c + maxalign - 1) & -maxalign;
3117 s->r = maxalign;
3122 /* return 1 if basic type is a type size (short, long, long long) */
3123 ST_FUNC int is_btype_size(int bt)
3125 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3128 /* return 0 if no type declaration. otherwise, return the basic type
3129 and skip it.
3130 */
3131 static int parse_btype(CType *type, AttributeDef *ad)
3133 int t, u, bt_size, complete, type_found, typespec_found;
3134 Sym *s;
3135 CType type1;
3137 memset(ad, 0, sizeof(AttributeDef));
3138 complete = 0;
3139 type_found = 0;
3140 typespec_found = 0;
3141 t = 0;
3142 while(1) {
3143 switch(tok) {
3144 case TOK_EXTENSION:
3145 /* currently, we really ignore extension */
3146 next();
3147 continue;
3149 /* basic types */
3150 case TOK_CHAR:
3151 u = VT_BYTE;
3152 basic_type:
3153 next();
3154 basic_type1:
3155 if (complete)
3156 tcc_error("too many basic types");
3157 t |= u;
3158 bt_size = is_btype_size (u & VT_BTYPE);
3159 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3160 complete = 1;
3161 typespec_found = 1;
3162 break;
3163 case TOK_VOID:
3164 u = VT_VOID;
3165 goto basic_type;
3166 case TOK_SHORT:
3167 u = VT_SHORT;
3168 goto basic_type;
3169 case TOK_INT:
3170 u = VT_INT;
3171 goto basic_type;
3172 case TOK_LONG:
3173 next();
3174 if ((t & VT_BTYPE) == VT_DOUBLE) {
3175 #ifndef TCC_TARGET_PE
3176 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3177 #endif
3178 } else if ((t & VT_BTYPE) == VT_LONG) {
3179 t = (t & ~VT_BTYPE) | VT_LLONG;
3180 } else {
3181 u = VT_LONG;
3182 goto basic_type1;
3184 break;
3185 #ifdef TCC_TARGET_ARM64
3186 case TOK_UINT128:
3187 /* GCC's __uint128_t appears in some Linux header files. Make it a
3188 synonym for long double to get the size and alignment right. */
3189 u = VT_LDOUBLE;
3190 goto basic_type;
3191 #endif
3192 case TOK_BOOL:
3193 u = VT_BOOL;
3194 goto basic_type;
3195 case TOK_FLOAT:
3196 u = VT_FLOAT;
3197 goto basic_type;
3198 case TOK_DOUBLE:
3199 next();
3200 if ((t & VT_BTYPE) == VT_LONG) {
3201 #ifdef TCC_TARGET_PE
3202 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3203 #else
3204 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3205 #endif
3206 } else {
3207 u = VT_DOUBLE;
3208 goto basic_type1;
3210 break;
3211 case TOK_ENUM:
3212 struct_decl(&type1, VT_ENUM, t & (VT_TYPEDEF | VT_EXTERN));
3213 basic_type2:
3214 u = type1.t;
3215 type->ref = type1.ref;
3216 goto basic_type1;
3217 case TOK_STRUCT:
3218 case TOK_UNION:
3219 struct_decl(&type1, VT_STRUCT, t & (VT_TYPEDEF | VT_EXTERN));
3220 goto basic_type2;
3222 /* type modifiers */
3223 case TOK_CONST1:
3224 case TOK_CONST2:
3225 case TOK_CONST3:
3226 t |= VT_CONSTANT;
3227 next();
3228 break;
3229 case TOK_VOLATILE1:
3230 case TOK_VOLATILE2:
3231 case TOK_VOLATILE3:
3232 t |= VT_VOLATILE;
3233 next();
3234 break;
3235 case TOK_SIGNED1:
3236 case TOK_SIGNED2:
3237 case TOK_SIGNED3:
3238 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3239 tcc_error("signed and unsigned modifier");
3240 typespec_found = 1;
3241 t |= VT_DEFSIGN;
3242 next();
3243 break;
3244 case TOK_REGISTER:
3245 case TOK_AUTO:
3246 case TOK_RESTRICT1:
3247 case TOK_RESTRICT2:
3248 case TOK_RESTRICT3:
3249 next();
3250 break;
3251 case TOK_UNSIGNED:
3252 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3253 tcc_error("signed and unsigned modifier");
3254 t |= VT_DEFSIGN | VT_UNSIGNED;
3255 next();
3256 typespec_found = 1;
3257 break;
3259 /* storage */
3260 case TOK_EXTERN:
3261 t |= VT_EXTERN;
3262 next();
3263 break;
3264 case TOK_STATIC:
3265 t |= VT_STATIC;
3266 next();
3267 break;
3268 case TOK_TYPEDEF:
3269 t |= VT_TYPEDEF;
3270 next();
3271 break;
3272 case TOK_INLINE1:
3273 case TOK_INLINE2:
3274 case TOK_INLINE3:
3275 t |= VT_INLINE;
3276 next();
3277 break;
3279 /* GNUC attribute */
3280 case TOK_ATTRIBUTE1:
3281 case TOK_ATTRIBUTE2:
3282 parse_attribute(ad);
3283 if (ad->a.mode) {
3284 u = ad->a.mode -1;
3285 t = (t & ~VT_BTYPE) | u;
3287 break;
3288 /* GNUC typeof */
3289 case TOK_TYPEOF1:
3290 case TOK_TYPEOF2:
3291 case TOK_TYPEOF3:
3292 next();
3293 parse_expr_type(&type1);
3294 /* remove all storage modifiers except typedef */
3295 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3296 goto basic_type2;
3297 default:
3298 if (typespec_found)
3299 goto the_end;
3300 s = sym_find(tok);
3301 if (!s || !(s->type.t & VT_TYPEDEF))
3302 goto the_end;
3303 t |= (s->type.t & ~VT_TYPEDEF);
3304 type->ref = s->type.ref;
3305 if (s->r) {
3306 /* get attributes from typedef */
3307 if (0 == ad->a.aligned)
3308 ad->a.aligned = s->a.aligned;
3309 if (0 == ad->a.func_call)
3310 ad->a.func_call = s->a.func_call;
3311 ad->a.packed |= s->a.packed;
3313 next();
3314 typespec_found = 1;
3315 break;
3317 type_found = 1;
3319 the_end:
3320 if (tcc_state->char_is_unsigned) {
3321 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3322 t |= VT_UNSIGNED;
3325 /* long is never used as type */
3326 if ((t & VT_BTYPE) == VT_LONG)
3327 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3328 defined TCC_TARGET_PE
3329 t = (t & ~VT_BTYPE) | VT_INT;
3330 #else
3331 t = (t & ~VT_BTYPE) | VT_LLONG;
3332 #endif
3333 type->t = t;
3334 return type_found;
3337 /* convert a function parameter type (array to pointer and function to
3338 function pointer) */
3339 static inline void convert_parameter_type(CType *pt)
3341 /* remove const and volatile qualifiers (XXX: const could be used
3342 to indicate a const function parameter */
3343 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3344 /* array must be transformed to pointer according to ANSI C */
3345 pt->t &= ~VT_ARRAY;
3346 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3347 mk_pointer(pt);
3351 ST_FUNC void parse_asm_str(CString *astr)
3353 skip('(');
3354 /* read the string */
3355 if (tok != TOK_STR)
3356 expect("string constant");
3357 cstr_new(astr);
3358 while (tok == TOK_STR) {
3359 /* XXX: add \0 handling too ? */
3360 cstr_cat(astr, tokc.cstr->data);
3361 next();
3363 cstr_ccat(astr, '\0');
3366 /* Parse an asm label and return the label
3367 * Don't forget to free the CString in the caller! */
3368 static void asm_label_instr(CString *astr)
3370 next();
3371 parse_asm_str(astr);
3372 skip(')');
3373 #ifdef ASM_DEBUG
3374 printf("asm_alias: \"%s\"\n", (char *)astr->data);
3375 #endif
3378 static void post_type(CType *type, AttributeDef *ad)
3380 int n, l, t1, arg_size, align;
3381 Sym **plast, *s, *first;
3382 AttributeDef ad1;
3383 CType pt;
3385 if (tok == '(') {
3386 /* function declaration */
3387 next();
3388 l = 0;
3389 first = NULL;
3390 plast = &first;
3391 arg_size = 0;
3392 if (tok != ')') {
3393 for(;;) {
3394 /* read param name and compute offset */
3395 if (l != FUNC_OLD) {
3396 if (!parse_btype(&pt, &ad1)) {
3397 if (l) {
3398 tcc_error("invalid type");
3399 } else {
3400 l = FUNC_OLD;
3401 goto old_proto;
3404 l = FUNC_NEW;
3405 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3406 break;
3407 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3408 if ((pt.t & VT_BTYPE) == VT_VOID)
3409 tcc_error("parameter declared as void");
3410 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3411 } else {
3412 old_proto:
3413 n = tok;
3414 if (n < TOK_UIDENT)
3415 expect("identifier");
3416 pt.t = VT_INT;
3417 next();
3419 convert_parameter_type(&pt);
3420 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3421 *plast = s;
3422 plast = &s->next;
3423 if (tok == ')')
3424 break;
3425 skip(',');
3426 if (l == FUNC_NEW && tok == TOK_DOTS) {
3427 l = FUNC_ELLIPSIS;
3428 next();
3429 break;
3433 /* if no parameters, then old type prototype */
3434 if (l == 0)
3435 l = FUNC_OLD;
3436 skip(')');
3437 /* NOTE: const is ignored in returned type as it has a special
3438 meaning in gcc / C++ */
3439 type->t &= ~VT_CONSTANT;
3440 /* some ancient pre-K&R C allows a function to return an array
3441 and the array brackets to be put after the arguments, such
3442 that "int c()[]" means something like "int[] c()" */
3443 if (tok == '[') {
3444 next();
3445 skip(']'); /* only handle simple "[]" */
3446 type->t |= VT_PTR;
3448 /* we push a anonymous symbol which will contain the function prototype */
3449 ad->a.func_args = arg_size;
3450 s = sym_push(SYM_FIELD, type, 0, l);
3451 s->a = ad->a;
3452 s->next = first;
3453 type->t = VT_FUNC;
3454 type->ref = s;
3455 } else if (tok == '[') {
3456 /* array definition */
3457 next();
3458 if (tok == TOK_RESTRICT1)
3459 next();
3460 n = -1;
3461 t1 = 0;
3462 if (tok != ']') {
3463 if (!local_stack || nocode_wanted)
3464 vpushi(expr_const());
3465 else gexpr();
3466 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3467 n = vtop->c.i;
3468 if (n < 0)
3469 tcc_error("invalid array size");
3470 } else {
3471 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
3472 tcc_error("size of variable length array should be an integer");
3473 t1 = VT_VLA;
3476 skip(']');
3477 /* parse next post type */
3478 post_type(type, ad);
3479 if (type->t == VT_FUNC)
3480 tcc_error("declaration of an array of functions");
3481 t1 |= type->t & VT_VLA;
3483 if (t1 & VT_VLA) {
3484 loc -= type_size(&int_type, &align);
3485 loc &= -align;
3486 n = loc;
3488 vla_runtime_type_size(type, &align);
3489 gen_op('*');
3490 vset(&int_type, VT_LOCAL|VT_LVAL, n);
3491 vswap();
3492 vstore();
3494 if (n != -1)
3495 vpop();
3497 /* we push an anonymous symbol which will contain the array
3498 element type */
3499 s = sym_push(SYM_FIELD, type, 0, n);
3500 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
3501 type->ref = s;
3505 /* Parse a type declaration (except basic type), and return the type
3506 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3507 expected. 'type' should contain the basic type. 'ad' is the
3508 attribute definition of the basic type. It can be modified by
3509 type_decl().
3510 */
3511 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
3513 Sym *s;
3514 CType type1, *type2;
3515 int qualifiers, storage;
3517 while (tok == '*') {
3518 qualifiers = 0;
3519 redo:
3520 next();
3521 switch(tok) {
3522 case TOK_CONST1:
3523 case TOK_CONST2:
3524 case TOK_CONST3:
3525 qualifiers |= VT_CONSTANT;
3526 goto redo;
3527 case TOK_VOLATILE1:
3528 case TOK_VOLATILE2:
3529 case TOK_VOLATILE3:
3530 qualifiers |= VT_VOLATILE;
3531 goto redo;
3532 case TOK_RESTRICT1:
3533 case TOK_RESTRICT2:
3534 case TOK_RESTRICT3:
3535 goto redo;
3537 mk_pointer(type);
3538 type->t |= qualifiers;
3541 /* XXX: clarify attribute handling */
3542 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3543 parse_attribute(ad);
3545 /* recursive type */
3546 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3547 type1.t = 0; /* XXX: same as int */
3548 if (tok == '(') {
3549 next();
3550 /* XXX: this is not correct to modify 'ad' at this point, but
3551 the syntax is not clear */
3552 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3553 parse_attribute(ad);
3554 type_decl(&type1, ad, v, td);
3555 skip(')');
3556 } else {
3557 /* type identifier */
3558 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
3559 *v = tok;
3560 next();
3561 } else {
3562 if (!(td & TYPE_ABSTRACT))
3563 expect("identifier");
3564 *v = 0;
3567 storage = type->t & VT_STORAGE;
3568 type->t &= ~VT_STORAGE;
3569 if (storage & VT_STATIC) {
3570 int saved_nocode_wanted = nocode_wanted;
3571 nocode_wanted = 1;
3572 post_type(type, ad);
3573 nocode_wanted = saved_nocode_wanted;
3574 } else
3575 post_type(type, ad);
3576 type->t |= storage;
3577 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3578 parse_attribute(ad);
3580 if (!type1.t)
3581 return;
3582 /* append type at the end of type1 */
3583 type2 = &type1;
3584 for(;;) {
3585 s = type2->ref;
3586 type2 = &s->type;
3587 if (!type2->t) {
3588 *type2 = *type;
3589 break;
3592 *type = type1;
3595 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3596 ST_FUNC int lvalue_type(int t)
3598 int bt, r;
3599 r = VT_LVAL;
3600 bt = t & VT_BTYPE;
3601 if (bt == VT_BYTE || bt == VT_BOOL)
3602 r |= VT_LVAL_BYTE;
3603 else if (bt == VT_SHORT)
3604 r |= VT_LVAL_SHORT;
3605 else
3606 return r;
3607 if (t & VT_UNSIGNED)
3608 r |= VT_LVAL_UNSIGNED;
3609 return r;
3612 /* indirection with full error checking and bound check */
3613 ST_FUNC void indir(void)
3615 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
3616 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
3617 return;
3618 expect("pointer");
3620 if ((vtop->r & VT_LVAL) && !nocode_wanted)
3621 gv(RC_INT);
3622 vtop->type = *pointed_type(&vtop->type);
3623 /* Arrays and functions are never lvalues */
3624 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
3625 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
3626 vtop->r |= lvalue_type(vtop->type.t);
3627 /* if bound checking, the referenced pointer must be checked */
3628 #ifdef CONFIG_TCC_BCHECK
3629 if (tcc_state->do_bounds_check)
3630 vtop->r |= VT_MUSTBOUND;
3631 #endif
3635 /* pass a parameter to a function and do type checking and casting */
3636 static void gfunc_param_typed(Sym *func, Sym *arg)
3638 int func_type;
3639 CType type;
3641 func_type = func->c;
3642 if (func_type == FUNC_OLD ||
3643 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
3644 /* default casting : only need to convert float to double */
3645 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
3646 type.t = VT_DOUBLE;
3647 gen_cast(&type);
3648 } else if (vtop->type.t & VT_BITFIELD) {
3649 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3650 gen_cast(&type);
3652 } else if (arg == NULL) {
3653 tcc_error("too many arguments to function");
3654 } else {
3655 type = arg->type;
3656 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
3657 gen_assign_cast(&type);
3661 /* parse an expression of the form '(type)' or '(expr)' and return its
3662 type */
3663 static void parse_expr_type(CType *type)
3665 int n;
3666 AttributeDef ad;
3668 skip('(');
3669 if (parse_btype(type, &ad)) {
3670 type_decl(type, &ad, &n, TYPE_ABSTRACT);
3671 } else {
3672 expr_type(type);
3674 skip(')');
3677 static void parse_type(CType *type)
3679 AttributeDef ad;
3680 int n;
3682 if (!parse_btype(type, &ad)) {
3683 expect("type");
3685 type_decl(type, &ad, &n, TYPE_ABSTRACT);
3688 static void vpush_tokc(int t)
3690 CType type;
3691 type.t = t;
3692 type.ref = 0;
3693 vsetc(&type, VT_CONST, &tokc);
3696 ST_FUNC void unary(void)
3698 int n, t, align, size, r, sizeof_caller;
3699 CType type;
3700 Sym *s;
3701 AttributeDef ad;
3702 static int in_sizeof = 0;
3704 sizeof_caller = in_sizeof;
3705 in_sizeof = 0;
3706 /* XXX: GCC 2.95.3 does not generate a table although it should be
3707 better here */
3708 tok_next:
3709 switch(tok) {
3710 case TOK_EXTENSION:
3711 next();
3712 goto tok_next;
3713 case TOK_CINT:
3714 case TOK_CCHAR:
3715 case TOK_LCHAR:
3716 vpushi(tokc.i);
3717 next();
3718 break;
3719 case TOK_CUINT:
3720 vpush_tokc(VT_INT | VT_UNSIGNED);
3721 next();
3722 break;
3723 case TOK_CLLONG:
3724 vpush_tokc(VT_LLONG);
3725 next();
3726 break;
3727 case TOK_CULLONG:
3728 vpush_tokc(VT_LLONG | VT_UNSIGNED);
3729 next();
3730 break;
3731 case TOK_CFLOAT:
3732 vpush_tokc(VT_FLOAT);
3733 next();
3734 break;
3735 case TOK_CDOUBLE:
3736 vpush_tokc(VT_DOUBLE);
3737 next();
3738 break;
3739 case TOK_CLDOUBLE:
3740 vpush_tokc(VT_LDOUBLE);
3741 next();
3742 break;
3743 case TOK___FUNCTION__:
3744 if (!gnu_ext)
3745 goto tok_identifier;
3746 /* fall thru */
3747 case TOK___FUNC__:
3749 void *ptr;
3750 int len;
3751 /* special function name identifier */
3752 len = strlen(funcname) + 1;
3753 /* generate char[len] type */
3754 type.t = VT_BYTE;
3755 mk_pointer(&type);
3756 type.t |= VT_ARRAY;
3757 type.ref->c = len;
3758 vpush_ref(&type, data_section, data_section->data_offset, len);
3759 ptr = section_ptr_add(data_section, len);
3760 memcpy(ptr, funcname, len);
3761 next();
3763 break;
3764 case TOK_LSTR:
3765 #ifdef TCC_TARGET_PE
3766 t = VT_SHORT | VT_UNSIGNED;
3767 #else
3768 t = VT_INT;
3769 #endif
3770 goto str_init;
3771 case TOK_STR:
3772 /* string parsing */
3773 t = VT_BYTE;
3774 str_init:
3775 if (tcc_state->warn_write_strings)
3776 t |= VT_CONSTANT;
3777 type.t = t;
3778 mk_pointer(&type);
3779 type.t |= VT_ARRAY;
3780 memset(&ad, 0, sizeof(AttributeDef));
3781 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, NULL, 0);
3782 break;
3783 case '(':
3784 next();
3785 /* cast ? */
3786 if (parse_btype(&type, &ad)) {
3787 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
3788 skip(')');
3789 /* check ISOC99 compound literal */
3790 if (tok == '{') {
3791 /* data is allocated locally by default */
3792 if (global_expr)
3793 r = VT_CONST;
3794 else
3795 r = VT_LOCAL;
3796 /* all except arrays are lvalues */
3797 if (!(type.t & VT_ARRAY))
3798 r |= lvalue_type(type.t);
3799 memset(&ad, 0, sizeof(AttributeDef));
3800 decl_initializer_alloc(&type, &ad, r, 1, 0, NULL, 0);
3801 } else {
3802 if (sizeof_caller) {
3803 vpush(&type);
3804 return;
3806 unary();
3807 gen_cast(&type);
3809 } else if (tok == '{') {
3810 /*
3811 if (nocode_wanted)
3812 tcc_error("statement expression in global scope"); */
3813 /* this check breaks compilation of the linux 2.4.26 with the meesage:
3814 linux/include/net/tcp.h:945: error: statement expression in global scope */
3816 /* save all registers */
3817 save_regs(0);
3818 /* statement expression : we do not accept break/continue
3819 inside as GCC does */
3820 block(NULL, NULL, NULL, NULL, 0, 1);
3821 skip(')');
3822 } else {
3823 gexpr();
3824 skip(')');
3826 break;
3827 case '*':
3828 next();
3829 unary();
3830 indir();
3831 break;
3832 case '&':
3833 next();
3834 unary();
3835 /* functions names must be treated as function pointers,
3836 except for unary '&' and sizeof. Since we consider that
3837 functions are not lvalues, we only have to handle it
3838 there and in function calls. */
3839 /* arrays can also be used although they are not lvalues */
3840 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
3841 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
3842 test_lvalue();
3843 mk_pointer(&vtop->type);
3844 gaddrof();
3845 break;
3846 case '!':
3847 next();
3848 unary();
3849 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3850 CType boolean;
3851 boolean.t = VT_BOOL;
3852 gen_cast(&boolean);
3853 vtop->c.i = !vtop->c.i;
3854 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
3855 vtop->c.i = vtop->c.i ^ 1;
3856 else if (!nocode_wanted) {
3857 save_regs(1);
3858 vseti(VT_JMP, gvtst(1, 0));
3860 else
3861 vtop--;
3862 break;
3863 case '~':
3864 next();
3865 unary();
3866 vpushi(-1);
3867 gen_op('^');
3868 break;
3869 case '+':
3870 next();
3871 unary();
3872 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
3873 tcc_error("pointer not accepted for unary plus");
3874 /* In order to force cast, we add zero, except for floating point
3875 where we really need an noop (otherwise -0.0 will be transformed
3876 into +0.0). */
3877 if (!is_float(vtop->type.t)) {
3878 vpushi(0);
3879 gen_op('+');
3881 break;
3882 case TOK_SIZEOF:
3883 case TOK_ALIGNOF1:
3884 case TOK_ALIGNOF2:
3885 t = tok;
3886 next();
3887 in_sizeof++;
3888 unary_type(&type); // Perform a in_sizeof = 0;
3889 size = type_size(&type, &align);
3890 if (t == TOK_SIZEOF) {
3891 if (!(type.t & VT_VLA)) {
3892 if (size < 0)
3893 tcc_error("sizeof applied to an incomplete type");
3894 vpushs(size);
3895 } else {
3896 vla_runtime_type_size(&type, &align);
3898 } else {
3899 vpushs(align);
3901 vtop->type.t |= VT_UNSIGNED;
3902 break;
3904 case TOK_builtin_types_compatible_p:
3906 CType type1, type2;
3907 next();
3908 skip('(');
3909 parse_type(&type1);
3910 skip(',');
3911 parse_type(&type2);
3912 skip(')');
3913 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
3914 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
3915 vpushi(is_compatible_types(&type1, &type2));
3917 break;
3918 case TOK_builtin_constant_p:
3920 int saved_nocode_wanted, res;
3921 next();
3922 skip('(');
3923 saved_nocode_wanted = nocode_wanted;
3924 nocode_wanted = 1;
3925 gexpr();
3926 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3927 vpop();
3928 nocode_wanted = saved_nocode_wanted;
3929 skip(')');
3930 vpushi(res);
3932 break;
3933 case TOK_builtin_frame_address:
3934 case TOK_builtin_return_address:
3936 int tok1 = tok;
3937 int level;
3938 CType type;
3939 next();
3940 skip('(');
3941 if (tok != TOK_CINT || tokc.i < 0) {
3942 tcc_error("%s only takes positive integers",
3943 tok1 == TOK_builtin_return_address ?
3944 "__builtin_return_address" :
3945 "__builtin_frame_address");
3947 level = tokc.i;
3948 next();
3949 skip(')');
3950 type.t = VT_VOID;
3951 mk_pointer(&type);
3952 vset(&type, VT_LOCAL, 0); /* local frame */
3953 while (level--) {
3954 mk_pointer(&vtop->type);
3955 indir(); /* -> parent frame */
3957 if (tok1 == TOK_builtin_return_address) {
3958 // assume return address is just above frame pointer on stack
3959 vpushi(PTR_SIZE);
3960 gen_op('+');
3961 mk_pointer(&vtop->type);
3962 indir();
3965 break;
3966 #ifdef TCC_TARGET_X86_64
3967 #ifdef TCC_TARGET_PE
3968 case TOK_builtin_va_start:
3970 next();
3971 skip('(');
3972 expr_eq();
3973 skip(',');
3974 expr_eq();
3975 skip(')');
3976 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
3977 tcc_error("__builtin_va_start expects a local variable");
3978 vtop->r &= ~(VT_LVAL | VT_REF);
3979 vtop->type = char_pointer_type;
3980 vstore();
3982 break;
3983 #else
3984 case TOK_builtin_va_arg_types:
3986 CType type;
3987 next();
3988 skip('(');
3989 parse_type(&type);
3990 skip(')');
3991 vpushi(classify_x86_64_va_arg(&type));
3993 break;
3994 #endif
3995 #endif
3997 #ifdef TCC_TARGET_ARM64
3998 case TOK___va_start: {
3999 if (nocode_wanted)
4000 tcc_error("statement in global scope");
4001 next();
4002 skip('(');
4003 expr_eq();
4004 skip(',');
4005 expr_eq();
4006 skip(')');
4007 //xx check types
4008 gen_va_start();
4009 vpushi(0);
4010 vtop->type.t = VT_VOID;
4011 break;
4013 case TOK___va_arg: {
4014 CType type;
4015 if (nocode_wanted)
4016 tcc_error("statement in global scope");
4017 next();
4018 skip('(');
4019 expr_eq();
4020 skip(',');
4021 parse_type(&type);
4022 skip(')');
4023 //xx check types
4024 gen_va_arg(&type);
4025 vtop->type = type;
4026 break;
4028 case TOK___arm64_clear_cache: {
4029 next();
4030 skip('(');
4031 expr_eq();
4032 skip(',');
4033 expr_eq();
4034 skip(')');
4035 gen_clear_cache();
4036 vpushi(0);
4037 vtop->type.t = VT_VOID;
4038 break;
4040 #endif
4041 /* pre operations */
4042 case TOK_INC:
4043 case TOK_DEC:
4044 t = tok;
4045 next();
4046 unary();
4047 inc(0, t);
4048 break;
4049 case '-':
4050 next();
4051 unary();
4052 t = vtop->type.t & VT_BTYPE;
4053 if (is_float(t)) {
4054 /* In IEEE negate(x) isn't subtract(0,x), but rather
4055 subtract(-0, x). */
4056 vpush(&vtop->type);
4057 if (t == VT_FLOAT)
4058 vtop->c.f = -0.0f;
4059 else if (t == VT_DOUBLE)
4060 vtop->c.d = -0.0;
4061 else
4062 vtop->c.ld = -0.0;
4063 } else
4064 vpushi(0);
4065 vswap();
4066 gen_op('-');
4067 break;
4068 case TOK_LAND:
4069 if (!gnu_ext)
4070 goto tok_identifier;
4071 next();
4072 /* allow to take the address of a label */
4073 if (tok < TOK_UIDENT)
4074 expect("label identifier");
4075 s = label_find(tok);
4076 if (!s) {
4077 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4078 } else {
4079 if (s->r == LABEL_DECLARED)
4080 s->r = LABEL_FORWARD;
4082 if (!s->type.t) {
4083 s->type.t = VT_VOID;
4084 mk_pointer(&s->type);
4085 s->type.t |= VT_STATIC;
4087 vpushsym(&s->type, s);
4088 next();
4089 break;
4091 // special qnan , snan and infinity values
4092 case TOK___NAN__:
4093 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4094 next();
4095 break;
4096 case TOK___SNAN__:
4097 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4098 next();
4099 break;
4100 case TOK___INF__:
4101 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4102 next();
4103 break;
4105 default:
4106 tok_identifier:
4107 t = tok;
4108 next();
4109 if (t < TOK_UIDENT)
4110 expect("identifier");
4111 s = sym_find(t);
4112 if (!s) {
4113 const char *name = get_tok_str(t, NULL);
4114 if (tok != '(')
4115 tcc_error("'%s' undeclared", name);
4116 /* for simple function calls, we tolerate undeclared
4117 external reference to int() function */
4118 if (tcc_state->warn_implicit_function_declaration
4119 #ifdef TCC_TARGET_PE
4120 /* people must be warned about using undeclared WINAPI functions
4121 (which usually start with uppercase letter) */
4122 || (name[0] >= 'A' && name[0] <= 'Z')
4123 #endif
4125 tcc_warning("implicit declaration of function '%s'", name);
4126 s = external_global_sym(t, &func_old_type, 0);
4128 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4129 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4130 /* if referencing an inline function, then we generate a
4131 symbol to it if not already done. It will have the
4132 effect to generate code for it at the end of the
4133 compilation unit. Inline function as always
4134 generated in the text section. */
4135 if (!s->c)
4136 put_extern_sym(s, text_section, 0, 0);
4137 r = VT_SYM | VT_CONST;
4138 } else {
4139 r = s->r;
4141 vset(&s->type, r, s->c);
4142 /* if forward reference, we must point to s */
4143 if (vtop->r & VT_SYM) {
4144 vtop->sym = s;
4145 vtop->c.ptr_offset = 0;
4147 break;
4150 /* post operations */
4151 while (1) {
4152 if (tok == TOK_INC || tok == TOK_DEC) {
4153 inc(1, tok);
4154 next();
4155 } else if (tok == '.' || tok == TOK_ARROW) {
4156 int qualifiers;
4157 /* field */
4158 if (tok == TOK_ARROW)
4159 indir();
4160 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4161 test_lvalue();
4162 gaddrof();
4163 next();
4164 /* expect pointer on structure */
4165 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4166 expect("struct or union");
4167 s = vtop->type.ref;
4168 /* find field */
4169 tok |= SYM_FIELD;
4170 while ((s = s->next) != NULL) {
4171 if (s->v == tok)
4172 break;
4174 if (!s)
4175 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, NULL));
4176 /* add field offset to pointer */
4177 vtop->type = char_pointer_type; /* change type to 'char *' */
4178 vpushi(s->c);
4179 gen_op('+');
4180 /* change type to field type, and set to lvalue */
4181 vtop->type = s->type;
4182 vtop->type.t |= qualifiers;
4183 /* an array is never an lvalue */
4184 if (!(vtop->type.t & VT_ARRAY)) {
4185 vtop->r |= lvalue_type(vtop->type.t);
4186 #ifdef CONFIG_TCC_BCHECK
4187 /* if bound checking, the referenced pointer must be checked */
4188 if (tcc_state->do_bounds_check)
4189 vtop->r |= VT_MUSTBOUND;
4190 #endif
4192 next();
4193 } else if (tok == '[') {
4194 next();
4195 gexpr();
4196 gen_op('+');
4197 indir();
4198 skip(']');
4199 } else if (tok == '(') {
4200 SValue ret;
4201 Sym *sa;
4202 int nb_args, ret_nregs, ret_align, regsize, variadic;
4203 RegArgs args;
4205 /* function call */
4206 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4207 /* pointer test (no array accepted) */
4208 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4209 vtop->type = *pointed_type(&vtop->type);
4210 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4211 goto error_func;
4212 } else {
4213 error_func:
4214 expect("function pointer");
4216 } else {
4217 vtop->r &= ~VT_LVAL; /* no lvalue */
4219 /* get return type */
4220 s = vtop->type.ref;
4221 next();
4222 sa = s->next; /* first parameter */
4223 nb_args = 0;
4224 ret.r2 = VT_CONST;
4225 /* compute first implicit argument if a structure is returned */
4226 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4227 variadic = (s->c == FUNC_ELLIPSIS);
4228 gfunc_sret(&s->type, variadic, &ret.type,
4229 &ret_align, &regsize, &args);
4230 ret_nregs = regargs_nregs(&args);
4232 if (!ret_nregs) {
4233 /* get some space for the returned structure */
4234 size = type_size(&s->type, &align);
4235 #ifdef TCC_TARGET_ARM64
4236 /* On arm64, a small struct is return in registers.
4237 It is much easier to write it to memory if we know
4238 that we are allowed to write some extra bytes, so
4239 round the allocated space up to a power of 2: */
4240 if (size < 16)
4241 while (size & (size - 1))
4242 size = (size | (size - 1)) + 1;
4243 #endif
4244 loc = (loc - size) & -align;
4245 ret.type = s->type;
4246 ret.r = VT_LOCAL | VT_LVAL;
4247 /* pass it as 'int' to avoid structure arg passing
4248 problems */
4249 vseti(VT_LOCAL, loc);
4250 ret.c = vtop->c;
4251 nb_args++;
4253 } else {
4254 ret_nregs = 1;
4255 ret.type = s->type;
4258 if (ret_nregs) {
4259 /* return in register */
4260 if (is_float(ret.type.t)) {
4261 ret.r = reg_fret(ret.type.t);
4262 #ifdef TCC_TARGET_X86_64
4263 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4264 ret.r2 = REG_QRET;
4265 #endif
4266 } else {
4267 #ifndef TCC_TARGET_ARM64
4268 #ifdef TCC_TARGET_X86_64
4269 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4270 #else
4271 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4272 #endif
4273 ret.r2 = REG_LRET;
4274 #endif
4275 ret.r = REG_IRET;
4277 ret.c.i = 0;
4279 if (tok != ')') {
4280 for(;;) {
4281 expr_eq();
4282 gfunc_param_typed(s, sa);
4283 nb_args++;
4284 if (sa)
4285 sa = sa->next;
4286 if (tok == ')')
4287 break;
4288 skip(',');
4291 if (sa)
4292 tcc_error("too few arguments to function");
4293 skip(')');
4294 if (!nocode_wanted) {
4295 gfunc_call(nb_args);
4296 } else {
4297 vtop -= (nb_args + 1);
4300 /* return value */
4301 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4302 vsetc(&ret.type, r, &ret.c);
4303 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4306 /* handle packed struct return */
4307 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4308 int addr, offset;
4309 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4310 int i;
4311 #endif
4314 size = type_size(&s->type, &align);
4315 /* We're writing whole regs often, make sure there's enough
4316 space. Assume register size is power of 2. */
4317 if (regsize > align)
4318 align = regsize;
4319 loc = (loc - size) & -align;
4320 addr = loc;
4321 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4322 for (i=0; i<REG_ARGS_MAX; i++) {
4323 offset = args.ireg[i];
4325 if (offset == -1)
4326 break;
4328 ret.type.t = VT_LLONG;
4329 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4330 vsetc(&ret.type, i ? REG_LRET : REG_IRET, &ret.c);
4331 vstore();
4332 vtop--;
4333 vtop--;
4335 for (i=0; i<REG_ARGS_MAX; i++) {
4336 offset = args.freg[i];
4338 if (offset == -1)
4339 break;
4341 ret.type.t = VT_DOUBLE;
4342 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4343 vsetc(&ret.type, i ? REG_QRET : REG_FRET, &ret.c);
4344 vstore();
4345 vtop--;
4346 vtop--;
4348 #else
4349 offset = 0;
4350 for (;;) {
4351 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4352 vswap();
4353 vstore();
4354 vtop--;
4355 if (--ret_nregs == 0)
4356 break;
4357 offset += regsize;
4359 #endif
4360 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4362 } else {
4363 break;
4368 ST_FUNC void expr_prod(void)
4370 int t;
4372 unary();
4373 while (tok == '*' || tok == '/' || tok == '%') {
4374 t = tok;
4375 next();
4376 unary();
4377 gen_op(t);
4381 ST_FUNC void expr_sum(void)
4383 int t;
4385 expr_prod();
4386 while (tok == '+' || tok == '-') {
4387 t = tok;
4388 next();
4389 expr_prod();
4390 gen_op(t);
4394 static void expr_shift(void)
4396 int t;
4398 expr_sum();
4399 while (tok == TOK_SHL || tok == TOK_SAR) {
4400 t = tok;
4401 next();
4402 expr_sum();
4403 gen_op(t);
4407 static void expr_cmp(void)
4409 int t;
4411 expr_shift();
4412 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4413 tok == TOK_ULT || tok == TOK_UGE) {
4414 t = tok;
4415 next();
4416 expr_shift();
4417 gen_op(t);
4421 static void expr_cmpeq(void)
4423 int t;
4425 expr_cmp();
4426 while (tok == TOK_EQ || tok == TOK_NE) {
4427 t = tok;
4428 next();
4429 expr_cmp();
4430 gen_op(t);
4434 static void expr_and(void)
4436 expr_cmpeq();
4437 while (tok == '&') {
4438 next();
4439 expr_cmpeq();
4440 gen_op('&');
4444 static void expr_xor(void)
4446 expr_and();
4447 while (tok == '^') {
4448 next();
4449 expr_and();
4450 gen_op('^');
4454 static void expr_or(void)
4456 expr_xor();
4457 while (tok == '|') {
4458 next();
4459 expr_xor();
4460 gen_op('|');
4464 /* XXX: fix this mess */
4465 static void expr_land_const(void)
4467 expr_or();
4468 while (tok == TOK_LAND) {
4469 next();
4470 expr_or();
4471 gen_op(TOK_LAND);
4475 /* XXX: fix this mess */
4476 static void expr_lor_const(void)
4478 expr_land_const();
4479 while (tok == TOK_LOR) {
4480 next();
4481 expr_land_const();
4482 gen_op(TOK_LOR);
4486 /* only used if non constant */
4487 static void expr_land(void)
4489 int t;
4491 expr_or();
4492 if (tok == TOK_LAND) {
4493 t = 0;
4494 save_regs(1);
4495 for(;;) {
4496 t = gvtst(1, t);
4497 if (tok != TOK_LAND) {
4498 vseti(VT_JMPI, t);
4499 break;
4501 next();
4502 expr_or();
4507 static void expr_lor(void)
4509 int t;
4511 expr_land();
4512 if (tok == TOK_LOR) {
4513 t = 0;
4514 save_regs(1);
4515 for(;;) {
4516 t = gvtst(0, t);
4517 if (tok != TOK_LOR) {
4518 vseti(VT_JMP, t);
4519 break;
4521 next();
4522 expr_land();
4527 /* XXX: better constant handling */
4528 static void expr_cond(void)
4530 int tt, u, r1, r2, rc, t1, t2, bt1, bt2;
4531 SValue sv;
4532 CType type, type1, type2;
4534 if (const_wanted) {
4535 expr_lor_const();
4536 if (tok == '?') {
4537 CType boolean;
4538 int c;
4539 boolean.t = VT_BOOL;
4540 vdup();
4541 gen_cast(&boolean);
4542 c = vtop->c.i;
4543 vpop();
4544 next();
4545 if (tok != ':' || !gnu_ext) {
4546 vpop();
4547 gexpr();
4549 if (!c)
4550 vpop();
4551 skip(':');
4552 expr_cond();
4553 if (c)
4554 vpop();
4556 } else {
4557 expr_lor();
4558 if (tok == '?') {
4559 next();
4560 if (vtop != vstack) {
4561 /* needed to avoid having different registers saved in
4562 each branch */
4563 if (is_float(vtop->type.t)) {
4564 rc = RC_FLOAT;
4565 #ifdef TCC_TARGET_X86_64
4566 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
4567 rc = RC_ST0;
4569 #endif
4571 else
4572 rc = RC_INT;
4573 gv(rc);
4574 save_regs(1);
4576 if (tok == ':' && gnu_ext) {
4577 gv_dup();
4578 tt = gvtst(1, 0);
4579 } else {
4580 tt = gvtst(1, 0);
4581 gexpr();
4583 type1 = vtop->type;
4584 sv = *vtop; /* save value to handle it later */
4585 vtop--; /* no vpop so that FP stack is not flushed */
4586 skip(':');
4587 u = gjmp(0);
4588 gsym(tt);
4589 expr_cond();
4590 type2 = vtop->type;
4592 t1 = type1.t;
4593 bt1 = t1 & VT_BTYPE;
4594 t2 = type2.t;
4595 bt2 = t2 & VT_BTYPE;
4596 /* cast operands to correct type according to ISOC rules */
4597 if (is_float(bt1) || is_float(bt2)) {
4598 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
4599 type.t = VT_LDOUBLE;
4600 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
4601 type.t = VT_DOUBLE;
4602 } else {
4603 type.t = VT_FLOAT;
4605 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
4606 /* cast to biggest op */
4607 type.t = VT_LLONG;
4608 /* convert to unsigned if it does not fit in a long long */
4609 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
4610 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
4611 type.t |= VT_UNSIGNED;
4612 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
4613 /* If one is a null ptr constant the result type
4614 is the other. */
4615 if (is_null_pointer (vtop))
4616 type = type1;
4617 else if (is_null_pointer (&sv))
4618 type = type2;
4619 /* XXX: test pointer compatibility, C99 has more elaborate
4620 rules here. */
4621 else
4622 type = type1;
4623 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
4624 /* XXX: test function pointer compatibility */
4625 type = bt1 == VT_FUNC ? type1 : type2;
4626 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
4627 /* XXX: test structure compatibility */
4628 type = bt1 == VT_STRUCT ? type1 : type2;
4629 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
4630 /* NOTE: as an extension, we accept void on only one side */
4631 type.t = VT_VOID;
4632 } else {
4633 /* integer operations */
4634 type.t = VT_INT;
4635 /* convert to unsigned if it does not fit in an integer */
4636 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
4637 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
4638 type.t |= VT_UNSIGNED;
4641 /* now we convert second operand */
4642 gen_cast(&type);
4643 if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
4644 gaddrof();
4645 rc = RC_INT;
4646 if (is_float(type.t)) {
4647 rc = RC_FLOAT;
4648 #ifdef TCC_TARGET_X86_64
4649 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
4650 rc = RC_ST0;
4652 #endif
4653 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
4654 /* for long longs, we use fixed registers to avoid having
4655 to handle a complicated move */
4656 rc = RC_IRET;
4659 r2 = gv(rc);
4660 /* this is horrible, but we must also convert first
4661 operand */
4662 tt = gjmp(0);
4663 gsym(u);
4664 /* put again first value and cast it */
4665 *vtop = sv;
4666 gen_cast(&type);
4667 if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
4668 gaddrof();
4669 r1 = gv(rc);
4670 move_reg(r2, r1, type.t);
4671 vtop->r = r2;
4672 gsym(tt);
4677 static void expr_eq(void)
4679 int t;
4681 expr_cond();
4682 if (tok == '=' ||
4683 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
4684 tok == TOK_A_XOR || tok == TOK_A_OR ||
4685 tok == TOK_A_SHL || tok == TOK_A_SAR) {
4686 test_lvalue();
4687 t = tok;
4688 next();
4689 if (t == '=') {
4690 expr_eq();
4691 } else {
4692 vdup();
4693 expr_eq();
4694 gen_op(t & 0x7f);
4696 vstore();
4700 ST_FUNC void gexpr(void)
4702 while (1) {
4703 expr_eq();
4704 if (tok != ',')
4705 break;
4706 vpop();
4707 next();
4711 /* parse an expression and return its type without any side effect. */
4712 static void expr_type(CType *type)
4714 int saved_nocode_wanted;
4716 saved_nocode_wanted = nocode_wanted;
4717 nocode_wanted =