arm64: Fix regression introduced by 6245db9.
[tinycc.git] / tccgen.c
blob3508e84bfed506472f664b865022e5047d9c64a4
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int v1, v2, sym;
66 } **p; int n; /* list of case ranges */
67 int def_sym; /* default symbol */
68 } *cur_switch; /* current switch */
70 /* ------------------------------------------------------------------------- */
71 static void gen_cast(CType *type);
72 static inline CType *pointed_type(CType *type);
73 static int is_compatible_types(CType *type1, CType *type2);
74 static int parse_btype(CType *type, AttributeDef *ad);
75 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
76 static void parse_expr_type(CType *type);
77 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
78 static void block(int *bsym, int *csym, int is_expr);
79 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
80 static int decl0(int l, int is_for_loop_init);
81 static void expr_eq(void);
82 static void expr_lor_const(void);
83 static void unary_type(CType *type);
84 static void vla_runtime_type_size(CType *type, int *a);
85 static void vla_sp_restore(void);
86 static void vla_sp_restore_root(void);
87 static int is_compatible_parameter_types(CType *type1, CType *type2);
88 static void expr_type(CType *type);
89 ST_FUNC void vpush64(int ty, unsigned long long v);
90 ST_FUNC void vpush(CType *type);
91 ST_FUNC int gvtst(int inv, int t);
92 ST_FUNC int is_btype_size(int bt);
93 static void gen_inline_functions(TCCState *s);
95 ST_INLN int is_float(int t)
97 int bt;
98 bt = t & VT_BTYPE;
99 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
102 /* we use our own 'finite' function to avoid potential problems with
103 non standard math libs */
104 /* XXX: endianness dependent */
105 ST_FUNC int ieee_finite(double d)
107 int p[4];
108 memcpy(p, &d, sizeof(double));
109 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
112 ST_FUNC void test_lvalue(void)
114 if (!(vtop->r & VT_LVAL))
115 expect("lvalue");
118 ST_FUNC void check_vstack(void)
120 if (pvtop != vtop)
121 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
124 /* ------------------------------------------------------------------------- */
125 /* vstack debugging aid */
127 #if 0
128 void pv (const char *lbl, int a, int b)
130 int i;
131 for (i = a; i < a + b; ++i) {
132 SValue *p = &vtop[-i];
133 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
134 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
137 #endif
139 /* ------------------------------------------------------------------------- */
140 ST_FUNC void tccgen_start(TCCState *s1)
142 cur_text_section = NULL;
143 funcname = "";
144 anon_sym = SYM_FIRST_ANOM;
145 section_sym = 0;
146 nocode_wanted = 1;
148 /* define some often used types */
149 int_type.t = VT_INT;
150 char_pointer_type.t = VT_BYTE;
151 mk_pointer(&char_pointer_type);
152 #if PTR_SIZE == 4
153 size_type.t = VT_INT;
154 #else
155 size_type.t = VT_LLONG;
156 #endif
157 func_old_type.t = VT_FUNC;
158 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
160 if (s1->do_debug) {
161 char buf[512];
163 /* file info: full path + filename */
164 section_sym = put_elf_sym(symtab_section, 0, 0,
165 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
166 text_section->sh_num, NULL);
167 getcwd(buf, sizeof(buf));
168 #ifdef _WIN32
169 normalize_slashes(buf);
170 #endif
171 pstrcat(buf, sizeof(buf), "/");
172 put_stabs_r(buf, N_SO, 0, 0,
173 text_section->data_offset, text_section, section_sym);
174 put_stabs_r(file->filename, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
177 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
178 symbols can be safely used */
179 put_elf_sym(symtab_section, 0, 0,
180 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
181 SHN_ABS, file->filename);
183 #ifdef TCC_TARGET_ARM
184 arm_init(s1);
185 #endif
188 ST_FUNC void tccgen_end(TCCState *s1)
190 gen_inline_functions(s1);
191 check_vstack();
192 /* end of translation unit info */
193 if (s1->do_debug) {
194 put_stabs_r(NULL, N_SO, 0, 0,
195 text_section->data_offset, text_section, section_sym);
199 /* ------------------------------------------------------------------------- */
200 /* update sym->c so that it points to an external symbol in section
201 'section' with value 'value' */
203 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
204 addr_t value, unsigned long size,
205 int can_add_underscore)
207 int sym_type, sym_bind, sh_num, info, other;
208 ElfW(Sym) *esym;
209 const char *name;
210 char buf1[256];
212 #ifdef CONFIG_TCC_BCHECK
213 char buf[32];
214 #endif
216 if (section == NULL)
217 sh_num = SHN_UNDEF;
218 else if (section == SECTION_ABS)
219 sh_num = SHN_ABS;
220 else
221 sh_num = section->sh_num;
223 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
224 sym_type = STT_FUNC;
225 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
226 sym_type = STT_NOTYPE;
227 } else {
228 sym_type = STT_OBJECT;
231 if (sym->type.t & VT_STATIC)
232 sym_bind = STB_LOCAL;
233 else {
234 if (sym->type.t & VT_WEAK)
235 sym_bind = STB_WEAK;
236 else
237 sym_bind = STB_GLOBAL;
240 if (!sym->c) {
241 name = get_tok_str(sym->v, NULL);
242 #ifdef CONFIG_TCC_BCHECK
243 if (tcc_state->do_bounds_check) {
244 /* XXX: avoid doing that for statics ? */
245 /* if bound checking is activated, we change some function
246 names by adding the "__bound" prefix */
247 switch(sym->v) {
248 #ifdef TCC_TARGET_PE
249 /* XXX: we rely only on malloc hooks */
250 case TOK_malloc:
251 case TOK_free:
252 case TOK_realloc:
253 case TOK_memalign:
254 case TOK_calloc:
255 #endif
256 case TOK_memcpy:
257 case TOK_memmove:
258 case TOK_memset:
259 case TOK_strlen:
260 case TOK_strcpy:
261 case TOK_alloca:
262 strcpy(buf, "__bound_");
263 strcat(buf, name);
264 name = buf;
265 break;
268 #endif
269 other = 0;
271 #ifdef TCC_TARGET_PE
272 if (sym->type.t & VT_EXPORT)
273 other |= ST_PE_EXPORT;
274 if (sym_type == STT_FUNC && sym->type.ref) {
275 Sym *ref = sym->type.ref;
276 if (ref->a.func_export)
277 other |= ST_PE_EXPORT;
278 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
279 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
280 name = buf1;
281 other |= ST_PE_STDCALL;
282 can_add_underscore = 0;
284 } else {
285 if (find_elf_sym(tcc_state->dynsymtab_section, name))
286 other |= ST_PE_IMPORT;
287 if (sym->type.t & VT_IMPORT)
288 other |= ST_PE_IMPORT;
290 #else
291 if (! (sym->type.t & VT_STATIC))
292 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
293 #endif
294 if (tcc_state->leading_underscore && can_add_underscore) {
295 buf1[0] = '_';
296 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
297 name = buf1;
299 if (sym->asm_label) {
300 name = get_tok_str(sym->asm_label, NULL);
302 info = ELFW(ST_INFO)(sym_bind, sym_type);
303 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
304 } else {
305 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
306 esym->st_value = value;
307 esym->st_size = size;
308 esym->st_shndx = sh_num;
312 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
313 addr_t value, unsigned long size)
315 put_extern_sym2(sym, section, value, size, 1);
318 /* add a new relocation entry to symbol 'sym' in section 's' */
319 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
320 addr_t addend)
322 int c = 0;
323 if (sym) {
324 if (0 == sym->c)
325 put_extern_sym(sym, NULL, 0, 0);
326 c = sym->c;
328 /* now we can add ELF relocation info */
329 put_elf_reloca(symtab_section, s, offset, type, c, addend);
332 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
334 greloca(s, sym, offset, type, 0);
337 /* ------------------------------------------------------------------------- */
338 /* symbol allocator */
339 static Sym *__sym_malloc(void)
341 Sym *sym_pool, *sym, *last_sym;
342 int i;
344 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
345 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
347 last_sym = sym_free_first;
348 sym = sym_pool;
349 for(i = 0; i < SYM_POOL_NB; i++) {
350 sym->next = last_sym;
351 last_sym = sym;
352 sym++;
354 sym_free_first = last_sym;
355 return last_sym;
358 static inline Sym *sym_malloc(void)
360 Sym *sym;
361 sym = sym_free_first;
362 if (!sym)
363 sym = __sym_malloc();
364 sym_free_first = sym->next;
365 return sym;
368 ST_INLN void sym_free(Sym *sym)
370 sym->next = sym_free_first;
371 sym_free_first = sym;
374 /* push, without hashing */
375 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
377 Sym *s;
379 s = sym_malloc();
380 s->asm_label = 0;
381 s->v = v;
382 s->type.t = t;
383 s->type.ref = NULL;
384 #ifdef _WIN64
385 s->d = NULL;
386 #endif
387 s->c = c;
388 s->next = NULL;
389 /* add in stack */
390 s->prev = *ps;
391 *ps = s;
392 return s;
395 /* find a symbol and return its associated structure. 's' is the top
396 of the symbol stack */
397 ST_FUNC Sym *sym_find2(Sym *s, int v)
399 while (s) {
400 if (s->v == v)
401 return s;
402 else if (s->v == -1)
403 return NULL;
404 s = s->prev;
406 return NULL;
409 /* structure lookup */
410 ST_INLN Sym *struct_find(int v)
412 v -= TOK_IDENT;
413 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
414 return NULL;
415 return table_ident[v]->sym_struct;
418 /* find an identifier */
419 ST_INLN Sym *sym_find(int v)
421 v -= TOK_IDENT;
422 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
423 return NULL;
424 return table_ident[v]->sym_identifier;
427 /* push a given symbol on the symbol stack */
428 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
430 Sym *s, **ps;
431 TokenSym *ts;
433 if (local_stack)
434 ps = &local_stack;
435 else
436 ps = &global_stack;
437 s = sym_push2(ps, v, type->t, c);
438 s->type.ref = type->ref;
439 s->r = r;
440 /* don't record fields or anonymous symbols */
441 /* XXX: simplify */
442 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
443 /* record symbol in token array */
444 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
445 if (v & SYM_STRUCT)
446 ps = &ts->sym_struct;
447 else
448 ps = &ts->sym_identifier;
449 s->prev_tok = *ps;
450 *ps = s;
451 s->scope = local_scope;
452 if (s->prev_tok && s->prev_tok->scope == s->scope)
453 tcc_error("redeclaration of '%s'",
454 get_tok_str(v & ~SYM_STRUCT, NULL));
456 return s;
459 /* push a global identifier */
460 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
462 Sym *s, **ps;
463 s = sym_push2(&global_stack, v, t, c);
464 /* don't record anonymous symbol */
465 if (v < SYM_FIRST_ANOM) {
466 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
467 /* modify the top most local identifier, so that
468 sym_identifier will point to 's' when popped */
469 while (*ps != NULL)
470 ps = &(*ps)->prev_tok;
471 s->prev_tok = NULL;
472 *ps = s;
474 return s;
477 /* pop symbols until top reaches 'b' */
478 ST_FUNC void sym_pop(Sym **ptop, Sym *b)
480 Sym *s, *ss, **ps;
481 TokenSym *ts;
482 int v;
484 s = *ptop;
485 while(s != b) {
486 ss = s->prev;
487 v = s->v;
488 /* remove symbol in token array */
489 /* XXX: simplify */
490 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
491 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
492 if (v & SYM_STRUCT)
493 ps = &ts->sym_struct;
494 else
495 ps = &ts->sym_identifier;
496 *ps = s->prev_tok;
498 sym_free(s);
499 s = ss;
501 *ptop = b;
504 static void weaken_symbol(Sym *sym)
506 sym->type.t |= VT_WEAK;
507 if (sym->c > 0) {
508 int esym_type;
509 ElfW(Sym) *esym;
511 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
512 esym_type = ELFW(ST_TYPE)(esym->st_info);
513 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
517 static void apply_visibility(Sym *sym, CType *type)
519 int vis = sym->type.t & VT_VIS_MASK;
520 int vis2 = type->t & VT_VIS_MASK;
521 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
522 vis = vis2;
523 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
525 else
526 vis = (vis < vis2) ? vis : vis2;
527 sym->type.t &= ~VT_VIS_MASK;
528 sym->type.t |= vis;
530 if (sym->c > 0) {
531 ElfW(Sym) *esym;
533 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
534 vis >>= VT_VIS_SHIFT;
535 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
539 /* ------------------------------------------------------------------------- */
541 ST_FUNC void swap(int *p, int *q)
543 int t;
544 t = *p;
545 *p = *q;
546 *q = t;
549 static void vsetc(CType *type, int r, CValue *vc)
551 int v;
553 if (vtop >= vstack + (VSTACK_SIZE - 1))
554 tcc_error("memory full (vstack)");
555 /* cannot let cpu flags if other instruction are generated. Also
556 avoid leaving VT_JMP anywhere except on the top of the stack
557 because it would complicate the code generator. */
558 if (vtop >= vstack) {
559 v = vtop->r & VT_VALMASK;
560 if (v == VT_CMP || (v & ~1) == VT_JMP)
561 gv(RC_INT);
563 vtop++;
564 vtop->type = *type;
565 vtop->r = r;
566 vtop->r2 = VT_CONST;
567 vtop->c = *vc;
570 /* push constant of type "type" with useless value */
571 ST_FUNC void vpush(CType *type)
573 CValue cval;
574 vsetc(type, VT_CONST, &cval);
577 /* push integer constant */
578 ST_FUNC void vpushi(int v)
580 CValue cval;
581 cval.i = v;
582 vsetc(&int_type, VT_CONST, &cval);
585 /* push a pointer sized constant */
586 static void vpushs(addr_t v)
588 CValue cval;
589 cval.i = v;
590 vsetc(&size_type, VT_CONST, &cval);
593 /* push arbitrary 64bit constant */
594 ST_FUNC void vpush64(int ty, unsigned long long v)
596 CValue cval;
597 CType ctype;
598 ctype.t = ty;
599 ctype.ref = NULL;
600 cval.i = v;
601 vsetc(&ctype, VT_CONST, &cval);
604 /* push long long constant */
605 static inline void vpushll(long long v)
607 vpush64(VT_LLONG, v);
610 /* push a symbol value of TYPE */
611 static inline void vpushsym(CType *type, Sym *sym)
613 CValue cval;
614 cval.i = 0;
615 vsetc(type, VT_CONST | VT_SYM, &cval);
616 vtop->sym = sym;
619 /* Return a static symbol pointing to a section */
620 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
622 int v;
623 Sym *sym;
625 v = anon_sym++;
626 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
627 sym->type.ref = type->ref;
628 sym->r = VT_CONST | VT_SYM;
629 put_extern_sym(sym, sec, offset, size);
630 return sym;
633 /* push a reference to a section offset by adding a dummy symbol */
634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
636 vpushsym(type, get_sym_ref(type, sec, offset, size));
639 /* define a new external reference to a symbol 'v' of type 'u' */
640 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
642 Sym *s;
644 s = sym_find(v);
645 if (!s) {
646 /* push forward reference */
647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
648 s->type.ref = type->ref;
649 s->r = r | VT_CONST | VT_SYM;
651 return s;
654 /* define a new external reference to a symbol 'v' */
655 static Sym *external_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
663 s->type.t |= VT_EXTERN;
664 } else if (s->type.ref == func_old_type.ref) {
665 s->type.ref = type->ref;
666 s->r = r | VT_CONST | VT_SYM;
667 s->type.t |= VT_EXTERN;
668 } else if (!is_compatible_types(&s->type, type)) {
669 tcc_error("incompatible types for redefinition of '%s'",
670 get_tok_str(v, NULL));
672 /* Merge some storage attributes. */
673 if (type->t & VT_WEAK)
674 weaken_symbol(s);
676 if (type->t & VT_VIS_MASK)
677 apply_visibility(s, type);
679 return s;
682 /* push a reference to global symbol v */
683 ST_FUNC void vpush_global_sym(CType *type, int v)
685 vpushsym(type, external_global_sym(v, type, 0));
688 ST_FUNC void vset(CType *type, int r, int v)
690 CValue cval;
692 cval.i = v;
693 vsetc(type, r, &cval);
696 static void vseti(int r, int v)
698 CType type;
699 type.t = VT_INT;
700 type.ref = 0;
701 vset(&type, r, v);
704 ST_FUNC void vswap(void)
706 SValue tmp;
707 /* cannot let cpu flags if other instruction are generated. Also
708 avoid leaving VT_JMP anywhere except on the top of the stack
709 because it would complicate the code generator. */
710 if (vtop >= vstack) {
711 int v = vtop->r & VT_VALMASK;
712 if (v == VT_CMP || (v & ~1) == VT_JMP)
713 gv(RC_INT);
715 tmp = vtop[0];
716 vtop[0] = vtop[-1];
717 vtop[-1] = tmp;
719 /* XXX: +2% overall speed possible with optimized memswap
721 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
725 ST_FUNC void vpushv(SValue *v)
727 if (vtop >= vstack + (VSTACK_SIZE - 1))
728 tcc_error("memory full (vstack)");
729 vtop++;
730 *vtop = *v;
733 static void vdup(void)
735 vpushv(vtop);
738 /* save registers up to (vtop - n) stack entry */
739 ST_FUNC void save_regs(int n)
741 SValue *p, *p1;
742 for(p = vstack, p1 = vtop - n; p <= p1; p++)
743 save_reg(p->r);
746 /* save r to the memory stack, and mark it as being free */
747 ST_FUNC void save_reg(int r)
749 save_reg_upstack(r, 0);
752 /* save r to the memory stack, and mark it as being free,
753 if seen up to (vtop - n) stack entry */
754 ST_FUNC void save_reg_upstack(int r, int n)
756 int l, saved, size, align;
757 SValue *p, *p1, sv;
758 CType *type;
760 if ((r &= VT_VALMASK) >= VT_CONST)
761 return;
763 /* modify all stack values */
764 saved = 0;
765 l = 0;
766 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
767 if ((p->r & VT_VALMASK) == r ||
768 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
769 /* must save value on stack if not already done */
770 if (!saved) {
771 /* NOTE: must reload 'r' because r might be equal to r2 */
772 r = p->r & VT_VALMASK;
773 /* store register in the stack */
774 type = &p->type;
775 if ((p->r & VT_LVAL) ||
776 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
777 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
778 type = &char_pointer_type;
779 #else
780 type = &int_type;
781 #endif
782 size = type_size(type, &align);
783 loc = (loc - size) & -align;
784 sv.type.t = type->t;
785 sv.r = VT_LOCAL | VT_LVAL;
786 sv.c.i = loc;
787 store(r, &sv);
788 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
789 /* x86 specific: need to pop fp register ST0 if saved */
790 if (r == TREG_ST0) {
791 o(0xd8dd); /* fstp %st(0) */
793 #endif
794 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
795 /* special long long case */
796 if ((type->t & VT_BTYPE) == VT_LLONG) {
797 sv.c.i += 4;
798 store(p->r2, &sv);
800 #endif
801 l = loc;
802 saved = 1;
804 /* mark that stack entry as being saved on the stack */
805 if (p->r & VT_LVAL) {
806 /* also clear the bounded flag because the
807 relocation address of the function was stored in
808 p->c.i */
809 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
810 } else {
811 p->r = lvalue_type(p->type.t) | VT_LOCAL;
813 p->r2 = VT_CONST;
814 p->c.i = l;
819 #ifdef TCC_TARGET_ARM
820 /* find a register of class 'rc2' with at most one reference on stack.
821 * If none, call get_reg(rc) */
822 ST_FUNC int get_reg_ex(int rc, int rc2)
824 int r;
825 SValue *p;
827 for(r=0;r<NB_REGS;r++) {
828 if (reg_classes[r] & rc2) {
829 int n;
830 n=0;
831 for(p = vstack; p <= vtop; p++) {
832 if ((p->r & VT_VALMASK) == r ||
833 (p->r2 & VT_VALMASK) == r)
834 n++;
836 if (n <= 1)
837 return r;
840 return get_reg(rc);
842 #endif
844 /* find a free register of class 'rc'. If none, save one register */
845 ST_FUNC int get_reg(int rc)
847 int r;
848 SValue *p;
850 /* find a free register */
851 for(r=0;r<NB_REGS;r++) {
852 if (reg_classes[r] & rc) {
853 for(p=vstack;p<=vtop;p++) {
854 if ((p->r & VT_VALMASK) == r ||
855 (p->r2 & VT_VALMASK) == r)
856 goto notfound;
858 return r;
860 notfound: ;
863 /* no register left : free the first one on the stack (VERY
864 IMPORTANT to start from the bottom to ensure that we don't
865 spill registers used in gen_opi()) */
866 for(p=vstack;p<=vtop;p++) {
867 /* look at second register (if long long) */
868 r = p->r2 & VT_VALMASK;
869 if (r < VT_CONST && (reg_classes[r] & rc))
870 goto save_found;
871 r = p->r & VT_VALMASK;
872 if (r < VT_CONST && (reg_classes[r] & rc)) {
873 save_found:
874 save_reg(r);
875 return r;
878 /* Should never comes here */
879 return -1;
882 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
883 if needed */
884 static void move_reg(int r, int s, int t)
886 SValue sv;
888 if (r != s) {
889 save_reg(r);
890 sv.type.t = t;
891 sv.type.ref = NULL;
892 sv.r = s;
893 sv.c.i = 0;
894 load(r, &sv);
898 /* get address of vtop (vtop MUST BE an lvalue) */
899 ST_FUNC void gaddrof(void)
901 if (vtop->r & VT_REF && !nocode_wanted)
902 gv(RC_INT);
903 vtop->r &= ~VT_LVAL;
904 /* tricky: if saved lvalue, then we can go back to lvalue */
905 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
906 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
911 #ifdef CONFIG_TCC_BCHECK
912 /* generate lvalue bound code */
913 static void gbound(void)
915 int lval_type;
916 CType type1;
918 vtop->r &= ~VT_MUSTBOUND;
919 /* if lvalue, then use checking code before dereferencing */
920 if (vtop->r & VT_LVAL) {
921 /* if not VT_BOUNDED value, then make one */
922 if (!(vtop->r & VT_BOUNDED)) {
923 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
924 /* must save type because we must set it to int to get pointer */
925 type1 = vtop->type;
926 vtop->type.t = VT_PTR;
927 gaddrof();
928 vpushi(0);
929 gen_bounded_ptr_add();
930 vtop->r |= lval_type;
931 vtop->type = type1;
933 /* then check for dereferencing */
934 gen_bounded_ptr_deref();
937 #endif
939 /* store vtop a register belonging to class 'rc'. lvalues are
940 converted to values. Cannot be used if cannot be converted to
941 register value (such as structures). */
942 ST_FUNC int gv(int rc)
944 int r, bit_pos, bit_size, size, align, i;
945 int rc2;
947 /* NOTE: get_reg can modify vstack[] */
948 if (vtop->type.t & VT_BITFIELD) {
949 CType type;
950 int bits = 32;
951 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
952 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
953 /* remove bit field info to avoid loops */
954 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
955 /* cast to int to propagate signedness in following ops */
956 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
957 type.t = VT_LLONG;
958 bits = 64;
959 } else
960 type.t = VT_INT;
961 if((vtop->type.t & VT_UNSIGNED) ||
962 (vtop->type.t & VT_BTYPE) == VT_BOOL)
963 type.t |= VT_UNSIGNED;
964 gen_cast(&type);
965 /* generate shifts */
966 vpushi(bits - (bit_pos + bit_size));
967 gen_op(TOK_SHL);
968 vpushi(bits - bit_size);
969 /* NOTE: transformed to SHR if unsigned */
970 gen_op(TOK_SAR);
971 r = gv(rc);
972 } else {
973 if (is_float(vtop->type.t) &&
974 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
975 Sym *sym;
976 int *ptr;
977 unsigned long offset;
978 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
979 CValue check;
980 #endif
982 /* XXX: unify with initializers handling ? */
983 /* CPUs usually cannot use float constants, so we store them
984 generically in data segment */
985 size = type_size(&vtop->type, &align);
986 offset = (data_section->data_offset + align - 1) & -align;
987 data_section->data_offset = offset;
988 /* XXX: not portable yet */
989 #if defined(__i386__) || defined(__x86_64__)
990 /* Zero pad x87 tenbyte long doubles */
991 if (size == LDOUBLE_SIZE) {
992 vtop->c.tab[2] &= 0xffff;
993 #if LDOUBLE_SIZE == 16
994 vtop->c.tab[3] = 0;
995 #endif
997 #endif
998 ptr = section_ptr_add(data_section, size);
999 size = size >> 2;
1000 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1001 check.d = 1;
1002 if(check.tab[0])
1003 for(i=0;i<size;i++)
1004 ptr[i] = vtop->c.tab[size-1-i];
1005 else
1006 #endif
1007 for(i=0;i<size;i++)
1008 ptr[i] = vtop->c.tab[i];
1009 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1010 vtop->r |= VT_LVAL | VT_SYM;
1011 vtop->sym = sym;
1012 vtop->c.i = 0;
1014 #ifdef CONFIG_TCC_BCHECK
1015 if (vtop->r & VT_MUSTBOUND)
1016 gbound();
1017 #endif
1019 r = vtop->r & VT_VALMASK;
1020 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1021 #ifndef TCC_TARGET_ARM64
1022 if (rc == RC_IRET)
1023 rc2 = RC_LRET;
1024 #ifdef TCC_TARGET_X86_64
1025 else if (rc == RC_FRET)
1026 rc2 = RC_QRET;
1027 #endif
1028 #endif
1030 /* need to reload if:
1031 - constant
1032 - lvalue (need to dereference pointer)
1033 - already a register, but not in the right class */
1034 if (r >= VT_CONST
1035 || (vtop->r & VT_LVAL)
1036 || !(reg_classes[r] & rc)
1037 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1038 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1039 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1040 #else
1041 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1042 #endif
1045 r = get_reg(rc);
1046 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1047 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1048 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1049 #else
1050 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1051 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1052 unsigned long long ll;
1053 #endif
1054 int r2, original_type;
1055 original_type = vtop->type.t;
1056 /* two register type load : expand to two words
1057 temporarily */
1058 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1059 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1060 /* load constant */
1061 ll = vtop->c.i;
1062 vtop->c.i = ll; /* first word */
1063 load(r, vtop);
1064 vtop->r = r; /* save register value */
1065 vpushi(ll >> 32); /* second word */
1066 } else
1067 #endif
1068 if (vtop->r & VT_LVAL) {
1069 /* We do not want to modifier the long long
1070 pointer here, so the safest (and less
1071 efficient) is to save all the other registers
1072 in the stack. XXX: totally inefficient. */
1073 #if 0
1074 save_regs(1);
1075 #else
1076 /* lvalue_save: save only if used further down the stack */
1077 save_reg_upstack(vtop->r, 1);
1078 #endif
1079 /* load from memory */
1080 vtop->type.t = load_type;
1081 load(r, vtop);
1082 vdup();
1083 vtop[-1].r = r; /* save register value */
1084 /* increment pointer to get second word */
1085 vtop->type.t = addr_type;
1086 gaddrof();
1087 vpushi(load_size);
1088 gen_op('+');
1089 vtop->r |= VT_LVAL;
1090 vtop->type.t = load_type;
1091 } else {
1092 /* move registers */
1093 load(r, vtop);
1094 vdup();
1095 vtop[-1].r = r; /* save register value */
1096 vtop->r = vtop[-1].r2;
1098 /* Allocate second register. Here we rely on the fact that
1099 get_reg() tries first to free r2 of an SValue. */
1100 r2 = get_reg(rc2);
1101 load(r2, vtop);
1102 vpop();
1103 /* write second register */
1104 vtop->r2 = r2;
1105 vtop->type.t = original_type;
1106 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1107 int t1, t;
1108 /* lvalue of scalar type : need to use lvalue type
1109 because of possible cast */
1110 t = vtop->type.t;
1111 t1 = t;
1112 /* compute memory access type */
1113 if (vtop->r & VT_REF)
1114 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1115 t = VT_PTR;
1116 #else
1117 t = VT_INT;
1118 #endif
1119 else if (vtop->r & VT_LVAL_BYTE)
1120 t = VT_BYTE;
1121 else if (vtop->r & VT_LVAL_SHORT)
1122 t = VT_SHORT;
1123 if (vtop->r & VT_LVAL_UNSIGNED)
1124 t |= VT_UNSIGNED;
1125 vtop->type.t = t;
1126 load(r, vtop);
1127 /* restore wanted type */
1128 vtop->type.t = t1;
1129 } else {
1130 /* one register type load */
1131 load(r, vtop);
1134 vtop->r = r;
1135 #ifdef TCC_TARGET_C67
1136 /* uses register pairs for doubles */
1137 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1138 vtop->r2 = r+1;
1139 #endif
1141 return r;
1144 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1145 ST_FUNC void gv2(int rc1, int rc2)
1147 int v;
1149 /* generate more generic register first. But VT_JMP or VT_CMP
1150 values must be generated first in all cases to avoid possible
1151 reload errors */
1152 v = vtop[0].r & VT_VALMASK;
1153 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1154 vswap();
1155 gv(rc1);
1156 vswap();
1157 gv(rc2);
1158 /* test if reload is needed for first register */
1159 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1160 vswap();
1161 gv(rc1);
1162 vswap();
1164 } else {
1165 gv(rc2);
1166 vswap();
1167 gv(rc1);
1168 vswap();
1169 /* test if reload is needed for first register */
1170 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1171 gv(rc2);
1176 #ifndef TCC_TARGET_ARM64
1177 /* wrapper around RC_FRET to return a register by type */
1178 static int rc_fret(int t)
1180 #ifdef TCC_TARGET_X86_64
1181 if (t == VT_LDOUBLE) {
1182 return RC_ST0;
1184 #endif
1185 return RC_FRET;
1187 #endif
1189 /* wrapper around REG_FRET to return a register by type */
1190 static int reg_fret(int t)
1192 #ifdef TCC_TARGET_X86_64
1193 if (t == VT_LDOUBLE) {
1194 return TREG_ST0;
1196 #endif
1197 return REG_FRET;
1200 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1201 /* expand 64bit on stack in two ints */
1202 static void lexpand(void)
1204 int u, v;
1205 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1206 v = vtop->r & (VT_VALMASK | VT_LVAL);
1207 if (v == VT_CONST) {
1208 vdup();
1209 vtop[0].c.i >>= 32;
1210 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1211 vdup();
1212 vtop[0].c.i += 4;
1213 } else {
1214 gv(RC_INT);
1215 vdup();
1216 vtop[0].r = vtop[-1].r2;
1217 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1219 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1221 #endif
1223 #ifdef TCC_TARGET_ARM
1224 /* expand long long on stack */
1225 ST_FUNC void lexpand_nr(void)
1227 int u,v;
1229 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1230 vdup();
1231 vtop->r2 = VT_CONST;
1232 vtop->type.t = VT_INT | u;
1233 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1234 if (v == VT_CONST) {
1235 vtop[-1].c.i = vtop->c.i;
1236 vtop->c.i = vtop->c.i >> 32;
1237 vtop->r = VT_CONST;
1238 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1239 vtop->c.i += 4;
1240 vtop->r = vtop[-1].r;
1241 } else if (v > VT_CONST) {
1242 vtop--;
1243 lexpand();
1244 } else
1245 vtop->r = vtop[-1].r2;
1246 vtop[-1].r2 = VT_CONST;
1247 vtop[-1].type.t = VT_INT | u;
1249 #endif
1251 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1252 /* build a long long from two ints */
1253 static void lbuild(int t)
1255 gv2(RC_INT, RC_INT);
1256 vtop[-1].r2 = vtop[0].r;
1257 vtop[-1].type.t = t;
1258 vpop();
1260 #endif
1262 /* rotate n first stack elements to the bottom
1263 I1 ... In -> I2 ... In I1 [top is right]
1265 ST_FUNC void vrotb(int n)
1267 int i;
1268 SValue tmp;
1270 tmp = vtop[-n + 1];
1271 for(i=-n+1;i!=0;i++)
1272 vtop[i] = vtop[i+1];
1273 vtop[0] = tmp;
1276 /* rotate the n elements before entry e towards the top
1277 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1279 ST_FUNC void vrote(SValue *e, int n)
1281 int i;
1282 SValue tmp;
1284 tmp = *e;
1285 for(i = 0;i < n - 1; i++)
1286 e[-i] = e[-i - 1];
1287 e[-n + 1] = tmp;
1290 /* rotate n first stack elements to the top
1291 I1 ... In -> In I1 ... I(n-1) [top is right]
1293 ST_FUNC void vrott(int n)
1295 vrote(vtop, n);
1298 /* pop stack value */
1299 ST_FUNC void vpop(void)
1301 int v;
1302 v = vtop->r & VT_VALMASK;
1303 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1304 /* for x86, we need to pop the FP stack */
1305 if (v == TREG_ST0 && !nocode_wanted) {
1306 o(0xd8dd); /* fstp %st(0) */
1307 } else
1308 #endif
1309 if (v == VT_JMP || v == VT_JMPI) {
1310 /* need to put correct jump if && or || without test */
1311 gsym(vtop->c.i);
1313 vtop--;
1316 /* convert stack entry to register and duplicate its value in another
1317 register */
1318 static void gv_dup(void)
1320 int rc, t, r, r1;
1321 SValue sv;
1323 t = vtop->type.t;
1324 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1325 if ((t & VT_BTYPE) == VT_LLONG) {
1326 lexpand();
1327 gv_dup();
1328 vswap();
1329 vrotb(3);
1330 gv_dup();
1331 vrotb(4);
1332 /* stack: H L L1 H1 */
1333 lbuild(t);
1334 vrotb(3);
1335 vrotb(3);
1336 vswap();
1337 lbuild(t);
1338 vswap();
1339 } else
1340 #endif
1342 /* duplicate value */
1343 rc = RC_INT;
1344 sv.type.t = VT_INT;
1345 if (is_float(t)) {
1346 rc = RC_FLOAT;
1347 #ifdef TCC_TARGET_X86_64
1348 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1349 rc = RC_ST0;
1351 #endif
1352 sv.type.t = t;
1354 r = gv(rc);
1355 r1 = get_reg(rc);
1356 sv.r = r;
1357 sv.c.i = 0;
1358 load(r1, &sv); /* move r to r1 */
1359 vdup();
1360 /* duplicates value */
1361 if (r != r1)
1362 vtop->r = r1;
1366 /* Generate value test
1368 * Generate a test for any value (jump, comparison and integers) */
1369 ST_FUNC int gvtst(int inv, int t)
1371 int v = vtop->r & VT_VALMASK;
1372 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1373 vpushi(0);
1374 gen_op(TOK_NE);
1376 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1377 /* constant jmp optimization */
1378 if ((vtop->c.i != 0) != inv)
1379 t = gjmp(t);
1380 vtop--;
1381 return t;
1383 return gtst(inv, t);
1386 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1387 /* generate CPU independent (unsigned) long long operations */
1388 static void gen_opl(int op)
1390 int t, a, b, op1, c, i;
1391 int func;
1392 unsigned short reg_iret = REG_IRET;
1393 unsigned short reg_lret = REG_LRET;
1394 SValue tmp;
1396 switch(op) {
1397 case '/':
1398 case TOK_PDIV:
1399 func = TOK___divdi3;
1400 goto gen_func;
1401 case TOK_UDIV:
1402 func = TOK___udivdi3;
1403 goto gen_func;
1404 case '%':
1405 func = TOK___moddi3;
1406 goto gen_mod_func;
1407 case TOK_UMOD:
1408 func = TOK___umoddi3;
1409 gen_mod_func:
1410 #ifdef TCC_ARM_EABI
1411 reg_iret = TREG_R2;
1412 reg_lret = TREG_R3;
1413 #endif
1414 gen_func:
1415 /* call generic long long function */
1416 vpush_global_sym(&func_old_type, func);
1417 vrott(3);
1418 gfunc_call(2);
1419 vpushi(0);
1420 vtop->r = reg_iret;
1421 vtop->r2 = reg_lret;
1422 break;
1423 case '^':
1424 case '&':
1425 case '|':
1426 case '*':
1427 case '+':
1428 case '-':
1429 //pv("gen_opl A",0,2);
1430 t = vtop->type.t;
1431 vswap();
1432 lexpand();
1433 vrotb(3);
1434 lexpand();
1435 /* stack: L1 H1 L2 H2 */
1436 tmp = vtop[0];
1437 vtop[0] = vtop[-3];
1438 vtop[-3] = tmp;
1439 tmp = vtop[-2];
1440 vtop[-2] = vtop[-3];
1441 vtop[-3] = tmp;
1442 vswap();
1443 /* stack: H1 H2 L1 L2 */
1444 //pv("gen_opl B",0,4);
1445 if (op == '*') {
1446 vpushv(vtop - 1);
1447 vpushv(vtop - 1);
1448 gen_op(TOK_UMULL);
1449 lexpand();
1450 /* stack: H1 H2 L1 L2 ML MH */
1451 for(i=0;i<4;i++)
1452 vrotb(6);
1453 /* stack: ML MH H1 H2 L1 L2 */
1454 tmp = vtop[0];
1455 vtop[0] = vtop[-2];
1456 vtop[-2] = tmp;
1457 /* stack: ML MH H1 L2 H2 L1 */
1458 gen_op('*');
1459 vrotb(3);
1460 vrotb(3);
1461 gen_op('*');
1462 /* stack: ML MH M1 M2 */
1463 gen_op('+');
1464 gen_op('+');
1465 } else if (op == '+' || op == '-') {
1466 /* XXX: add non carry method too (for MIPS or alpha) */
1467 if (op == '+')
1468 op1 = TOK_ADDC1;
1469 else
1470 op1 = TOK_SUBC1;
1471 gen_op(op1);
1472 /* stack: H1 H2 (L1 op L2) */
1473 vrotb(3);
1474 vrotb(3);
1475 gen_op(op1 + 1); /* TOK_xxxC2 */
1476 } else {
1477 gen_op(op);
1478 /* stack: H1 H2 (L1 op L2) */
1479 vrotb(3);
1480 vrotb(3);
1481 /* stack: (L1 op L2) H1 H2 */
1482 gen_op(op);
1483 /* stack: (L1 op L2) (H1 op H2) */
1485 /* stack: L H */
1486 lbuild(t);
1487 break;
1488 case TOK_SAR:
1489 case TOK_SHR:
1490 case TOK_SHL:
1491 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1492 t = vtop[-1].type.t;
1493 vswap();
1494 lexpand();
1495 vrotb(3);
1496 /* stack: L H shift */
1497 c = (int)vtop->c.i;
1498 /* constant: simpler */
1499 /* NOTE: all comments are for SHL. the other cases are
1500 done by swaping words */
1501 vpop();
1502 if (op != TOK_SHL)
1503 vswap();
1504 if (c >= 32) {
1505 /* stack: L H */
1506 vpop();
1507 if (c > 32) {
1508 vpushi(c - 32);
1509 gen_op(op);
1511 if (op != TOK_SAR) {
1512 vpushi(0);
1513 } else {
1514 gv_dup();
1515 vpushi(31);
1516 gen_op(TOK_SAR);
1518 vswap();
1519 } else {
1520 vswap();
1521 gv_dup();
1522 /* stack: H L L */
1523 vpushi(c);
1524 gen_op(op);
1525 vswap();
1526 vpushi(32 - c);
1527 if (op == TOK_SHL)
1528 gen_op(TOK_SHR);
1529 else
1530 gen_op(TOK_SHL);
1531 vrotb(3);
1532 /* stack: L L H */
1533 vpushi(c);
1534 if (op == TOK_SHL)
1535 gen_op(TOK_SHL);
1536 else
1537 gen_op(TOK_SHR);
1538 gen_op('|');
1540 if (op != TOK_SHL)
1541 vswap();
1542 lbuild(t);
1543 } else {
1544 /* XXX: should provide a faster fallback on x86 ? */
1545 switch(op) {
1546 case TOK_SAR:
1547 func = TOK___ashrdi3;
1548 goto gen_func;
1549 case TOK_SHR:
1550 func = TOK___lshrdi3;
1551 goto gen_func;
1552 case TOK_SHL:
1553 func = TOK___ashldi3;
1554 goto gen_func;
1557 break;
1558 default:
1559 /* compare operations */
1560 t = vtop->type.t;
1561 vswap();
1562 lexpand();
1563 vrotb(3);
1564 lexpand();
1565 /* stack: L1 H1 L2 H2 */
1566 tmp = vtop[-1];
1567 vtop[-1] = vtop[-2];
1568 vtop[-2] = tmp;
1569 /* stack: L1 L2 H1 H2 */
1570 /* compare high */
1571 op1 = op;
1572 /* when values are equal, we need to compare low words. since
1573 the jump is inverted, we invert the test too. */
1574 if (op1 == TOK_LT)
1575 op1 = TOK_LE;
1576 else if (op1 == TOK_GT)
1577 op1 = TOK_GE;
1578 else if (op1 == TOK_ULT)
1579 op1 = TOK_ULE;
1580 else if (op1 == TOK_UGT)
1581 op1 = TOK_UGE;
1582 a = 0;
1583 b = 0;
1584 gen_op(op1);
1585 if (op1 != TOK_NE) {
1586 a = gvtst(1, 0);
1588 if (op != TOK_EQ) {
1589 /* generate non equal test */
1590 /* XXX: NOT PORTABLE yet */
1591 if (a == 0) {
1592 b = gvtst(0, 0);
1593 } else {
1594 #if defined(TCC_TARGET_I386)
1595 b = psym(0x850f, 0);
1596 #elif defined(TCC_TARGET_ARM)
1597 b = ind;
1598 o(0x1A000000 | encbranch(ind, 0, 1));
1599 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1600 tcc_error("not implemented");
1601 #else
1602 #error not supported
1603 #endif
1606 /* compare low. Always unsigned */
1607 op1 = op;
1608 if (op1 == TOK_LT)
1609 op1 = TOK_ULT;
1610 else if (op1 == TOK_LE)
1611 op1 = TOK_ULE;
1612 else if (op1 == TOK_GT)
1613 op1 = TOK_UGT;
1614 else if (op1 == TOK_GE)
1615 op1 = TOK_UGE;
1616 gen_op(op1);
1617 a = gvtst(1, a);
1618 gsym(b);
1619 vseti(VT_JMPI, a);
1620 break;
1623 #endif
1625 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1627 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1628 return (a ^ b) >> 63 ? -x : x;
1631 static int gen_opic_lt(uint64_t a, uint64_t b)
1633 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1636 /* handle integer constant optimizations and various machine
1637 independent opt */
1638 static void gen_opic(int op)
1640 SValue *v1 = vtop - 1;
1641 SValue *v2 = vtop;
1642 int t1 = v1->type.t & VT_BTYPE;
1643 int t2 = v2->type.t & VT_BTYPE;
1644 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1645 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1646 uint64_t l1 = c1 ? v1->c.i : 0;
1647 uint64_t l2 = c2 ? v2->c.i : 0;
1648 int shm = (t1 == VT_LLONG) ? 63 : 31;
1650 if (t1 != VT_LLONG)
1651 l1 = ((uint32_t)l1 |
1652 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1653 if (t2 != VT_LLONG)
1654 l2 = ((uint32_t)l2 |
1655 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1657 if (c1 && c2) {
1658 switch(op) {
1659 case '+': l1 += l2; break;
1660 case '-': l1 -= l2; break;
1661 case '&': l1 &= l2; break;
1662 case '^': l1 ^= l2; break;
1663 case '|': l1 |= l2; break;
1664 case '*': l1 *= l2; break;
1666 case TOK_PDIV:
1667 case '/':
1668 case '%':
1669 case TOK_UDIV:
1670 case TOK_UMOD:
1671 /* if division by zero, generate explicit division */
1672 if (l2 == 0) {
1673 if (const_wanted)
1674 tcc_error("division by zero in constant");
1675 goto general_case;
1677 switch(op) {
1678 default: l1 = gen_opic_sdiv(l1, l2); break;
1679 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1680 case TOK_UDIV: l1 = l1 / l2; break;
1681 case TOK_UMOD: l1 = l1 % l2; break;
1683 break;
1684 case TOK_SHL: l1 <<= (l2 & shm); break;
1685 case TOK_SHR: l1 >>= (l2 & shm); break;
1686 case TOK_SAR:
1687 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1688 break;
1689 /* tests */
1690 case TOK_ULT: l1 = l1 < l2; break;
1691 case TOK_UGE: l1 = l1 >= l2; break;
1692 case TOK_EQ: l1 = l1 == l2; break;
1693 case TOK_NE: l1 = l1 != l2; break;
1694 case TOK_ULE: l1 = l1 <= l2; break;
1695 case TOK_UGT: l1 = l1 > l2; break;
1696 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1697 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1698 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1699 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1700 /* logical */
1701 case TOK_LAND: l1 = l1 && l2; break;
1702 case TOK_LOR: l1 = l1 || l2; break;
1703 default:
1704 goto general_case;
1706 v1->c.i = l1;
1707 vtop--;
1708 } else {
1709 /* if commutative ops, put c2 as constant */
1710 if (c1 && (op == '+' || op == '&' || op == '^' ||
1711 op == '|' || op == '*')) {
1712 vswap();
1713 c2 = c1; //c = c1, c1 = c2, c2 = c;
1714 l2 = l1; //l = l1, l1 = l2, l2 = l;
1716 if (!const_wanted &&
1717 c1 && ((l1 == 0 &&
1718 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1719 (l1 == -1 && op == TOK_SAR))) {
1720 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1721 vtop--;
1722 } else if (!const_wanted &&
1723 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1724 (l2 == -1 && op == '|') ||
1725 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1726 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1727 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1728 if (l2 == 1)
1729 vtop->c.i = 0;
1730 vswap();
1731 vtop--;
1732 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1733 op == TOK_PDIV) &&
1734 l2 == 1) ||
1735 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1736 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1737 l2 == 0) ||
1738 (op == '&' &&
1739 l2 == -1))) {
1740 /* filter out NOP operations like x*1, x-0, x&-1... */
1741 vtop--;
1742 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1743 /* try to use shifts instead of muls or divs */
1744 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1745 int n = -1;
1746 while (l2) {
1747 l2 >>= 1;
1748 n++;
1750 vtop->c.i = n;
1751 if (op == '*')
1752 op = TOK_SHL;
1753 else if (op == TOK_PDIV)
1754 op = TOK_SAR;
1755 else
1756 op = TOK_SHR;
1758 goto general_case;
1759 } else if (c2 && (op == '+' || op == '-') &&
1760 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1761 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1762 /* symbol + constant case */
1763 if (op == '-')
1764 l2 = -l2;
1765 vtop--;
1766 vtop->c.i += l2;
1767 } else {
1768 general_case:
1769 if (!nocode_wanted) {
1770 /* call low level op generator */
1771 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1772 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1773 gen_opl(op);
1774 else
1775 gen_opi(op);
1776 } else {
1777 vtop--;
1783 /* generate a floating point operation with constant propagation */
1784 static void gen_opif(int op)
1786 int c1, c2;
1787 SValue *v1, *v2;
1788 long double f1, f2;
1790 v1 = vtop - 1;
1791 v2 = vtop;
1792 /* currently, we cannot do computations with forward symbols */
1793 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1794 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1795 if (c1 && c2) {
1796 if (v1->type.t == VT_FLOAT) {
1797 f1 = v1->c.f;
1798 f2 = v2->c.f;
1799 } else if (v1->type.t == VT_DOUBLE) {
1800 f1 = v1->c.d;
1801 f2 = v2->c.d;
1802 } else {
1803 f1 = v1->c.ld;
1804 f2 = v2->c.ld;
1807 /* NOTE: we only do constant propagation if finite number (not
1808 NaN or infinity) (ANSI spec) */
1809 if (!ieee_finite(f1) || !ieee_finite(f2))
1810 goto general_case;
1812 switch(op) {
1813 case '+': f1 += f2; break;
1814 case '-': f1 -= f2; break;
1815 case '*': f1 *= f2; break;
1816 case '/':
1817 if (f2 == 0.0) {
1818 if (const_wanted)
1819 tcc_error("division by zero in constant");
1820 goto general_case;
1822 f1 /= f2;
1823 break;
1824 /* XXX: also handles tests ? */
1825 default:
1826 goto general_case;
1828 /* XXX: overflow test ? */
1829 if (v1->type.t == VT_FLOAT) {
1830 v1->c.f = f1;
1831 } else if (v1->type.t == VT_DOUBLE) {
1832 v1->c.d = f1;
1833 } else {
1834 v1->c.ld = f1;
1836 vtop--;
1837 } else {
1838 general_case:
1839 if (!nocode_wanted) {
1840 gen_opf(op);
1841 } else {
1842 vtop--;
1847 static int pointed_size(CType *type)
1849 int align;
1850 return type_size(pointed_type(type), &align);
1853 static void vla_runtime_pointed_size(CType *type)
1855 int align;
1856 vla_runtime_type_size(pointed_type(type), &align);
1859 static inline int is_null_pointer(SValue *p)
1861 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1862 return 0;
1863 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1864 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1865 ((p->type.t & VT_BTYPE) == VT_PTR &&
1866 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1869 static inline int is_integer_btype(int bt)
1871 return (bt == VT_BYTE || bt == VT_SHORT ||
1872 bt == VT_INT || bt == VT_LLONG);
1875 /* check types for comparison or subtraction of pointers */
1876 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1878 CType *type1, *type2, tmp_type1, tmp_type2;
1879 int bt1, bt2;
1881 /* null pointers are accepted for all comparisons as gcc */
1882 if (is_null_pointer(p1) || is_null_pointer(p2))
1883 return;
1884 type1 = &p1->type;
1885 type2 = &p2->type;
1886 bt1 = type1->t & VT_BTYPE;
1887 bt2 = type2->t & VT_BTYPE;
1888 /* accept comparison between pointer and integer with a warning */
1889 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1890 if (op != TOK_LOR && op != TOK_LAND )
1891 tcc_warning("comparison between pointer and integer");
1892 return;
1895 /* both must be pointers or implicit function pointers */
1896 if (bt1 == VT_PTR) {
1897 type1 = pointed_type(type1);
1898 } else if (bt1 != VT_FUNC)
1899 goto invalid_operands;
1901 if (bt2 == VT_PTR) {
1902 type2 = pointed_type(type2);
1903 } else if (bt2 != VT_FUNC) {
1904 invalid_operands:
1905 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1907 if ((type1->t & VT_BTYPE) == VT_VOID ||
1908 (type2->t & VT_BTYPE) == VT_VOID)
1909 return;
1910 tmp_type1 = *type1;
1911 tmp_type2 = *type2;
1912 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1913 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1914 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1915 /* gcc-like error if '-' is used */
1916 if (op == '-')
1917 goto invalid_operands;
1918 else
1919 tcc_warning("comparison of distinct pointer types lacks a cast");
1923 /* generic gen_op: handles types problems */
1924 ST_FUNC void gen_op(int op)
1926 int u, t1, t2, bt1, bt2, t;
1927 CType type1;
1929 t1 = vtop[-1].type.t;
1930 t2 = vtop[0].type.t;
1931 bt1 = t1 & VT_BTYPE;
1932 bt2 = t2 & VT_BTYPE;
1934 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1935 tcc_error("operation on a struct");
1936 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1937 /* at least one operand is a pointer */
1938 /* relationnal op: must be both pointers */
1939 if (op >= TOK_ULT && op <= TOK_LOR) {
1940 check_comparison_pointer_types(vtop - 1, vtop, op);
1941 /* pointers are handled are unsigned */
1942 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1943 t = VT_LLONG | VT_UNSIGNED;
1944 #else
1945 t = VT_INT | VT_UNSIGNED;
1946 #endif
1947 goto std_op;
1949 /* if both pointers, then it must be the '-' op */
1950 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1951 if (op != '-')
1952 tcc_error("cannot use pointers here");
1953 check_comparison_pointer_types(vtop - 1, vtop, op);
1954 /* XXX: check that types are compatible */
1955 if (vtop[-1].type.t & VT_VLA) {
1956 vla_runtime_pointed_size(&vtop[-1].type);
1957 } else {
1958 vpushi(pointed_size(&vtop[-1].type));
1960 vrott(3);
1961 gen_opic(op);
1962 /* set to integer type */
1963 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1964 vtop->type.t = VT_LLONG;
1965 #else
1966 vtop->type.t = VT_INT;
1967 #endif
1968 vswap();
1969 gen_op(TOK_PDIV);
1970 } else {
1971 /* exactly one pointer : must be '+' or '-'. */
1972 if (op != '-' && op != '+')
1973 tcc_error("cannot use pointers here");
1974 /* Put pointer as first operand */
1975 if (bt2 == VT_PTR) {
1976 vswap();
1977 swap(&t1, &t2);
1979 #if PTR_SIZE == 4
1980 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
1981 /* XXX: truncate here because gen_opl can't handle ptr + long long */
1982 gen_cast(&int_type);
1983 #endif
1984 type1 = vtop[-1].type;
1985 type1.t &= ~VT_ARRAY;
1986 if (vtop[-1].type.t & VT_VLA)
1987 vla_runtime_pointed_size(&vtop[-1].type);
1988 else {
1989 u = pointed_size(&vtop[-1].type);
1990 if (u < 0)
1991 tcc_error("unknown array element size");
1992 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1993 vpushll(u);
1994 #else
1995 /* XXX: cast to int ? (long long case) */
1996 vpushi(u);
1997 #endif
1999 gen_op('*');
2000 #if 0
2001 /* #ifdef CONFIG_TCC_BCHECK
2002 The main reason to removing this code:
2003 #include <stdio.h>
2004 int main ()
2006 int v[10];
2007 int i = 10;
2008 int j = 9;
2009 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2010 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2012 When this code is on. then the output looks like
2013 v+i-j = 0xfffffffe
2014 v+(i-j) = 0xbff84000
2016 /* if evaluating constant expression, no code should be
2017 generated, so no bound check */
2018 if (tcc_state->do_bounds_check && !const_wanted) {
2019 /* if bounded pointers, we generate a special code to
2020 test bounds */
2021 if (op == '-') {
2022 vpushi(0);
2023 vswap();
2024 gen_op('-');
2026 gen_bounded_ptr_add();
2027 } else
2028 #endif
2030 gen_opic(op);
2032 /* put again type if gen_opic() swaped operands */
2033 vtop->type = type1;
2035 } else if (is_float(bt1) || is_float(bt2)) {
2036 /* compute bigger type and do implicit casts */
2037 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2038 t = VT_LDOUBLE;
2039 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2040 t = VT_DOUBLE;
2041 } else {
2042 t = VT_FLOAT;
2044 /* floats can only be used for a few operations */
2045 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2046 (op < TOK_ULT || op > TOK_GT))
2047 tcc_error("invalid operands for binary operation");
2048 goto std_op;
2049 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2050 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2051 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2052 t |= VT_UNSIGNED;
2053 goto std_op;
2054 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2055 /* cast to biggest op */
2056 t = VT_LLONG;
2057 /* convert to unsigned if it does not fit in a long long */
2058 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2059 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2060 t |= VT_UNSIGNED;
2061 goto std_op;
2062 } else {
2063 /* integer operations */
2064 t = VT_INT;
2065 /* convert to unsigned if it does not fit in an integer */
2066 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2067 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2068 t |= VT_UNSIGNED;
2069 std_op:
2070 /* XXX: currently, some unsigned operations are explicit, so
2071 we modify them here */
2072 if (t & VT_UNSIGNED) {
2073 if (op == TOK_SAR)
2074 op = TOK_SHR;
2075 else if (op == '/')
2076 op = TOK_UDIV;
2077 else if (op == '%')
2078 op = TOK_UMOD;
2079 else if (op == TOK_LT)
2080 op = TOK_ULT;
2081 else if (op == TOK_GT)
2082 op = TOK_UGT;
2083 else if (op == TOK_LE)
2084 op = TOK_ULE;
2085 else if (op == TOK_GE)
2086 op = TOK_UGE;
2088 vswap();
2089 type1.t = t;
2090 gen_cast(&type1);
2091 vswap();
2092 /* special case for shifts and long long: we keep the shift as
2093 an integer */
2094 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2095 type1.t = VT_INT;
2096 gen_cast(&type1);
2097 if (is_float(t))
2098 gen_opif(op);
2099 else
2100 gen_opic(op);
2101 if (op >= TOK_ULT && op <= TOK_GT) {
2102 /* relationnal op: the result is an int */
2103 vtop->type.t = VT_INT;
2104 } else {
2105 vtop->type.t = t;
2108 // Make sure that we have converted to an rvalue:
2109 if (vtop->r & VT_LVAL && !nocode_wanted)
2110 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2113 #ifndef TCC_TARGET_ARM
2114 /* generic itof for unsigned long long case */
2115 static void gen_cvt_itof1(int t)
2117 #ifdef TCC_TARGET_ARM64
2118 gen_cvt_itof(t);
2119 #else
2120 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2121 (VT_LLONG | VT_UNSIGNED)) {
2123 if (t == VT_FLOAT)
2124 vpush_global_sym(&func_old_type, TOK___floatundisf);
2125 #if LDOUBLE_SIZE != 8
2126 else if (t == VT_LDOUBLE)
2127 vpush_global_sym(&func_old_type, TOK___floatundixf);
2128 #endif
2129 else
2130 vpush_global_sym(&func_old_type, TOK___floatundidf);
2131 vrott(2);
2132 gfunc_call(1);
2133 vpushi(0);
2134 vtop->r = reg_fret(t);
2135 } else {
2136 gen_cvt_itof(t);
2138 #endif
2140 #endif
2142 /* generic ftoi for unsigned long long case */
2143 static void gen_cvt_ftoi1(int t)
2145 #ifdef TCC_TARGET_ARM64
2146 gen_cvt_ftoi(t);
2147 #else
2148 int st;
2150 if (t == (VT_LLONG | VT_UNSIGNED)) {
2151 /* not handled natively */
2152 st = vtop->type.t & VT_BTYPE;
2153 if (st == VT_FLOAT)
2154 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2155 #if LDOUBLE_SIZE != 8
2156 else if (st == VT_LDOUBLE)
2157 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2158 #endif
2159 else
2160 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2161 vrott(2);
2162 gfunc_call(1);
2163 vpushi(0);
2164 vtop->r = REG_IRET;
2165 vtop->r2 = REG_LRET;
2166 } else {
2167 gen_cvt_ftoi(t);
2169 #endif
2172 /* force char or short cast */
2173 static void force_charshort_cast(int t)
2175 int bits, dbt;
2176 dbt = t & VT_BTYPE;
2177 /* XXX: add optimization if lvalue : just change type and offset */
2178 if (dbt == VT_BYTE)
2179 bits = 8;
2180 else
2181 bits = 16;
2182 if (t & VT_UNSIGNED) {
2183 vpushi((1 << bits) - 1);
2184 gen_op('&');
2185 } else {
2186 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2187 bits = 64 - bits;
2188 else
2189 bits = 32 - bits;
2190 vpushi(bits);
2191 gen_op(TOK_SHL);
2192 /* result must be signed or the SAR is converted to an SHL
2193 This was not the case when "t" was a signed short
2194 and the last value on the stack was an unsigned int */
2195 vtop->type.t &= ~VT_UNSIGNED;
2196 vpushi(bits);
2197 gen_op(TOK_SAR);
2201 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2202 static void gen_cast(CType *type)
2204 int sbt, dbt, sf, df, c, p;
2206 /* special delayed cast for char/short */
2207 /* XXX: in some cases (multiple cascaded casts), it may still
2208 be incorrect */
2209 if (vtop->r & VT_MUSTCAST) {
2210 vtop->r &= ~VT_MUSTCAST;
2211 force_charshort_cast(vtop->type.t);
2214 /* bitfields first get cast to ints */
2215 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2216 gv(RC_INT);
2219 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2220 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2222 if (sbt != dbt) {
2223 sf = is_float(sbt);
2224 df = is_float(dbt);
2225 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2226 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2227 if (c) {
2228 /* constant case: we can do it now */
2229 /* XXX: in ISOC, cannot do it if error in convert */
2230 if (sbt == VT_FLOAT)
2231 vtop->c.ld = vtop->c.f;
2232 else if (sbt == VT_DOUBLE)
2233 vtop->c.ld = vtop->c.d;
2235 if (df) {
2236 if ((sbt & VT_BTYPE) == VT_LLONG) {
2237 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2238 vtop->c.ld = vtop->c.i;
2239 else
2240 vtop->c.ld = -(long double)-vtop->c.i;
2241 } else if(!sf) {
2242 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2243 vtop->c.ld = (uint32_t)vtop->c.i;
2244 else
2245 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2248 if (dbt == VT_FLOAT)
2249 vtop->c.f = (float)vtop->c.ld;
2250 else if (dbt == VT_DOUBLE)
2251 vtop->c.d = (double)vtop->c.ld;
2252 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2253 vtop->c.i = vtop->c.ld;
2254 } else if (sf && dbt == VT_BOOL) {
2255 vtop->c.i = (vtop->c.ld != 0);
2256 } else {
2257 if(sf)
2258 vtop->c.i = vtop->c.ld;
2259 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2261 else if (sbt & VT_UNSIGNED)
2262 vtop->c.i = (uint32_t)vtop->c.i;
2263 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2264 else if (sbt == VT_PTR)
2266 #endif
2267 else if (sbt != VT_LLONG)
2268 vtop->c.i = ((uint32_t)vtop->c.i |
2269 -(vtop->c.i & 0x80000000));
2271 if (dbt == (VT_LLONG|VT_UNSIGNED))
2273 else if (dbt == VT_BOOL)
2274 vtop->c.i = (vtop->c.i != 0);
2275 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2276 else if (dbt == VT_PTR)
2278 #endif
2279 else if (dbt != VT_LLONG) {
2280 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2281 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2282 0xffffffff);
2283 vtop->c.i &= m;
2284 if (!(dbt & VT_UNSIGNED))
2285 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2288 } else if (p && dbt == VT_BOOL) {
2289 vtop->r = VT_CONST;
2290 vtop->c.i = 1;
2291 } else if (!nocode_wanted) {
2292 /* non constant case: generate code */
2293 if (sf && df) {
2294 /* convert from fp to fp */
2295 gen_cvt_ftof(dbt);
2296 } else if (df) {
2297 /* convert int to fp */
2298 gen_cvt_itof1(dbt);
2299 } else if (sf) {
2300 /* convert fp to int */
2301 if (dbt == VT_BOOL) {
2302 vpushi(0);
2303 gen_op(TOK_NE);
2304 } else {
2305 /* we handle char/short/etc... with generic code */
2306 if (dbt != (VT_INT | VT_UNSIGNED) &&
2307 dbt != (VT_LLONG | VT_UNSIGNED) &&
2308 dbt != VT_LLONG)
2309 dbt = VT_INT;
2310 gen_cvt_ftoi1(dbt);
2311 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2312 /* additional cast for char/short... */
2313 vtop->type.t = dbt;
2314 gen_cast(type);
2317 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2318 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2319 if ((sbt & VT_BTYPE) != VT_LLONG) {
2320 /* scalar to long long */
2321 /* machine independent conversion */
2322 gv(RC_INT);
2323 /* generate high word */
2324 if (sbt == (VT_INT | VT_UNSIGNED)) {
2325 vpushi(0);
2326 gv(RC_INT);
2327 } else {
2328 if (sbt == VT_PTR) {
2329 /* cast from pointer to int before we apply
2330 shift operation, which pointers don't support*/
2331 gen_cast(&int_type);
2333 gv_dup();
2334 vpushi(31);
2335 gen_op(TOK_SAR);
2337 /* patch second register */
2338 vtop[-1].r2 = vtop->r;
2339 vpop();
2341 #else
2342 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2343 (dbt & VT_BTYPE) == VT_PTR ||
2344 (dbt & VT_BTYPE) == VT_FUNC) {
2345 if ((sbt & VT_BTYPE) != VT_LLONG &&
2346 (sbt & VT_BTYPE) != VT_PTR &&
2347 (sbt & VT_BTYPE) != VT_FUNC) {
2348 /* need to convert from 32bit to 64bit */
2349 gv(RC_INT);
2350 if (sbt != (VT_INT | VT_UNSIGNED)) {
2351 #if defined(TCC_TARGET_ARM64)
2352 gen_cvt_sxtw();
2353 #elif defined(TCC_TARGET_X86_64)
2354 int r = gv(RC_INT);
2355 /* x86_64 specific: movslq */
2356 o(0x6348);
2357 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2358 #else
2359 #error
2360 #endif
2363 #endif
2364 } else if (dbt == VT_BOOL) {
2365 /* scalar to bool */
2366 vpushi(0);
2367 gen_op(TOK_NE);
2368 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2369 (dbt & VT_BTYPE) == VT_SHORT) {
2370 if (sbt == VT_PTR) {
2371 vtop->type.t = VT_INT;
2372 tcc_warning("nonportable conversion from pointer to char/short");
2374 force_charshort_cast(dbt);
2375 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2376 } else if ((dbt & VT_BTYPE) == VT_INT) {
2377 /* scalar to int */
2378 if ((sbt & VT_BTYPE) == VT_LLONG) {
2379 /* from long long: just take low order word */
2380 lexpand();
2381 vpop();
2383 /* if lvalue and single word type, nothing to do because
2384 the lvalue already contains the real type size (see
2385 VT_LVAL_xxx constants) */
2386 #endif
2389 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2390 /* if we are casting between pointer types,
2391 we must update the VT_LVAL_xxx size */
2392 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2393 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2395 vtop->type = *type;
2398 /* return type size as known at compile time. Put alignment at 'a' */
2399 ST_FUNC int type_size(CType *type, int *a)
2401 Sym *s;
2402 int bt;
2404 bt = type->t & VT_BTYPE;
2405 if (bt == VT_STRUCT) {
2406 /* struct/union */
2407 s = type->ref;
2408 *a = s->r;
2409 return s->c;
2410 } else if (bt == VT_PTR) {
2411 if (type->t & VT_ARRAY) {
2412 int ts;
2414 s = type->ref;
2415 ts = type_size(&s->type, a);
2417 if (ts < 0 && s->c < 0)
2418 ts = -ts;
2420 return ts * s->c;
2421 } else {
2422 *a = PTR_SIZE;
2423 return PTR_SIZE;
2425 } else if (bt == VT_LDOUBLE) {
2426 *a = LDOUBLE_ALIGN;
2427 return LDOUBLE_SIZE;
2428 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2429 #ifdef TCC_TARGET_I386
2430 #ifdef TCC_TARGET_PE
2431 *a = 8;
2432 #else
2433 *a = 4;
2434 #endif
2435 #elif defined(TCC_TARGET_ARM)
2436 #ifdef TCC_ARM_EABI
2437 *a = 8;
2438 #else
2439 *a = 4;
2440 #endif
2441 #else
2442 *a = 8;
2443 #endif
2444 return 8;
2445 } else if (bt == VT_INT || bt == VT_FLOAT) {
2446 *a = 4;
2447 return 4;
2448 } else if (bt == VT_SHORT) {
2449 *a = 2;
2450 return 2;
2451 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2452 *a = 8;
2453 return 16;
2454 } else if (bt == VT_ENUM) {
2455 *a = 4;
2456 /* Enums might be incomplete, so don't just return '4' here. */
2457 return type->ref->c;
2458 } else {
2459 /* char, void, function, _Bool */
2460 *a = 1;
2461 return 1;
2465 /* push type size as known at runtime time on top of value stack. Put
2466 alignment at 'a' */
2467 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2469 if (type->t & VT_VLA) {
2470 type_size(&type->ref->type, a);
2471 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2472 } else {
2473 vpushi(type_size(type, a));
2477 static void vla_sp_restore(void) {
2478 if (vlas_in_scope) {
2479 gen_vla_sp_restore(vla_sp_loc);
2483 static void vla_sp_restore_root(void) {
2484 if (vlas_in_scope) {
2485 gen_vla_sp_restore(vla_sp_root_loc);
2489 /* return the pointed type of t */
2490 static inline CType *pointed_type(CType *type)
2492 return &type->ref->type;
2495 /* modify type so that its it is a pointer to type. */
2496 ST_FUNC void mk_pointer(CType *type)
2498 Sym *s;
2499 s = sym_push(SYM_FIELD, type, 0, -1);
2500 type->t = VT_PTR | (type->t & ~VT_TYPE);
2501 type->ref = s;
2504 /* compare function types. OLD functions match any new functions */
2505 static int is_compatible_func(CType *type1, CType *type2)
2507 Sym *s1, *s2;
2509 s1 = type1->ref;
2510 s2 = type2->ref;
2511 if (!is_compatible_types(&s1->type, &s2->type))
2512 return 0;
2513 /* check func_call */
2514 if (s1->a.func_call != s2->a.func_call)
2515 return 0;
2516 /* XXX: not complete */
2517 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2518 return 1;
2519 if (s1->c != s2->c)
2520 return 0;
2521 while (s1 != NULL) {
2522 if (s2 == NULL)
2523 return 0;
2524 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2525 return 0;
2526 s1 = s1->next;
2527 s2 = s2->next;
2529 if (s2)
2530 return 0;
2531 return 1;
2534 /* return true if type1 and type2 are the same. If unqualified is
2535 true, qualifiers on the types are ignored.
2537 - enums are not checked as gcc __builtin_types_compatible_p ()
2539 static int compare_types(CType *type1, CType *type2, int unqualified)
2541 int bt1, t1, t2;
2543 t1 = type1->t & VT_TYPE;
2544 t2 = type2->t & VT_TYPE;
2545 if (unqualified) {
2546 /* strip qualifiers before comparing */
2547 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2548 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2550 /* Default Vs explicit signedness only matters for char */
2551 if ((t1 & VT_BTYPE) != VT_BYTE) {
2552 t1 &= ~VT_DEFSIGN;
2553 t2 &= ~VT_DEFSIGN;
2555 /* XXX: bitfields ? */
2556 if (t1 != t2)
2557 return 0;
2558 /* test more complicated cases */
2559 bt1 = t1 & VT_BTYPE;
2560 if (bt1 == VT_PTR) {
2561 type1 = pointed_type(type1);
2562 type2 = pointed_type(type2);
2563 return is_compatible_types(type1, type2);
2564 } else if (bt1 == VT_STRUCT) {
2565 return (type1->ref == type2->ref);
2566 } else if (bt1 == VT_FUNC) {
2567 return is_compatible_func(type1, type2);
2568 } else {
2569 return 1;
2573 /* return true if type1 and type2 are exactly the same (including
2574 qualifiers).
2576 static int is_compatible_types(CType *type1, CType *type2)
2578 return compare_types(type1,type2,0);
2581 /* return true if type1 and type2 are the same (ignoring qualifiers).
2583 static int is_compatible_parameter_types(CType *type1, CType *type2)
2585 return compare_types(type1,type2,1);
2588 /* print a type. If 'varstr' is not NULL, then the variable is also
2589 printed in the type */
2590 /* XXX: union */
2591 /* XXX: add array and function pointers */
2592 static void type_to_str(char *buf, int buf_size,
2593 CType *type, const char *varstr)
2595 int bt, v, t;
2596 Sym *s, *sa;
2597 char buf1[256];
2598 const char *tstr;
2600 t = type->t & VT_TYPE;
2601 bt = t & VT_BTYPE;
2602 buf[0] = '\0';
2603 if (t & VT_CONSTANT)
2604 pstrcat(buf, buf_size, "const ");
2605 if (t & VT_VOLATILE)
2606 pstrcat(buf, buf_size, "volatile ");
2607 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2608 pstrcat(buf, buf_size, "unsigned ");
2609 else if (t & VT_DEFSIGN)
2610 pstrcat(buf, buf_size, "signed ");
2611 switch(bt) {
2612 case VT_VOID:
2613 tstr = "void";
2614 goto add_tstr;
2615 case VT_BOOL:
2616 tstr = "_Bool";
2617 goto add_tstr;
2618 case VT_BYTE:
2619 tstr = "char";
2620 goto add_tstr;
2621 case VT_SHORT:
2622 tstr = "short";
2623 goto add_tstr;
2624 case VT_INT:
2625 tstr = "int";
2626 goto add_tstr;
2627 case VT_LONG:
2628 tstr = "long";
2629 goto add_tstr;
2630 case VT_LLONG:
2631 tstr = "long long";
2632 goto add_tstr;
2633 case VT_FLOAT:
2634 tstr = "float";
2635 goto add_tstr;
2636 case VT_DOUBLE:
2637 tstr = "double";
2638 goto add_tstr;
2639 case VT_LDOUBLE:
2640 tstr = "long double";
2641 add_tstr:
2642 pstrcat(buf, buf_size, tstr);
2643 break;
2644 case VT_ENUM:
2645 case VT_STRUCT:
2646 if (bt == VT_STRUCT)
2647 tstr = "struct ";
2648 else
2649 tstr = "enum ";
2650 pstrcat(buf, buf_size, tstr);
2651 v = type->ref->v & ~SYM_STRUCT;
2652 if (v >= SYM_FIRST_ANOM)
2653 pstrcat(buf, buf_size, "<anonymous>");
2654 else
2655 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2656 break;
2657 case VT_FUNC:
2658 s = type->ref;
2659 type_to_str(buf, buf_size, &s->type, varstr);
2660 pstrcat(buf, buf_size, "(");
2661 sa = s->next;
2662 while (sa != NULL) {
2663 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2664 pstrcat(buf, buf_size, buf1);
2665 sa = sa->next;
2666 if (sa)
2667 pstrcat(buf, buf_size, ", ");
2669 pstrcat(buf, buf_size, ")");
2670 goto no_var;
2671 case VT_PTR:
2672 s = type->ref;
2673 if (t & VT_ARRAY) {
2674 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2675 type_to_str(buf, buf_size, &s->type, buf1);
2676 goto no_var;
2678 pstrcpy(buf1, sizeof(buf1), "*");
2679 if (t & VT_CONSTANT)
2680 pstrcat(buf1, buf_size, "const ");
2681 if (t & VT_VOLATILE)
2682 pstrcat(buf1, buf_size, "volatile ");
2683 if (varstr)
2684 pstrcat(buf1, sizeof(buf1), varstr);
2685 type_to_str(buf, buf_size, &s->type, buf1);
2686 goto no_var;
2688 if (varstr) {
2689 pstrcat(buf, buf_size, " ");
2690 pstrcat(buf, buf_size, varstr);
2692 no_var: ;
2695 /* verify type compatibility to store vtop in 'dt' type, and generate
2696 casts if needed. */
2697 static void gen_assign_cast(CType *dt)
2699 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2700 char buf1[256], buf2[256];
2701 int dbt, sbt;
2703 st = &vtop->type; /* source type */
2704 dbt = dt->t & VT_BTYPE;
2705 sbt = st->t & VT_BTYPE;
2706 if (sbt == VT_VOID || dbt == VT_VOID) {
2707 if (sbt == VT_VOID && dbt == VT_VOID)
2708 ; /*
2709 It is Ok if both are void
2710 A test program:
2711 void func1() {}
2712 void func2() {
2713 return func1();
2715 gcc accepts this program
2717 else
2718 tcc_error("cannot cast from/to void");
2720 if (dt->t & VT_CONSTANT)
2721 tcc_warning("assignment of read-only location");
2722 switch(dbt) {
2723 case VT_PTR:
2724 /* special cases for pointers */
2725 /* '0' can also be a pointer */
2726 if (is_null_pointer(vtop))
2727 goto type_ok;
2728 /* accept implicit pointer to integer cast with warning */
2729 if (is_integer_btype(sbt)) {
2730 tcc_warning("assignment makes pointer from integer without a cast");
2731 goto type_ok;
2733 type1 = pointed_type(dt);
2734 /* a function is implicitely a function pointer */
2735 if (sbt == VT_FUNC) {
2736 if ((type1->t & VT_BTYPE) != VT_VOID &&
2737 !is_compatible_types(pointed_type(dt), st))
2738 tcc_warning("assignment from incompatible pointer type");
2739 goto type_ok;
2741 if (sbt != VT_PTR)
2742 goto error;
2743 type2 = pointed_type(st);
2744 if ((type1->t & VT_BTYPE) == VT_VOID ||
2745 (type2->t & VT_BTYPE) == VT_VOID) {
2746 /* void * can match anything */
2747 } else {
2748 /* exact type match, except for unsigned */
2749 tmp_type1 = *type1;
2750 tmp_type2 = *type2;
2751 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT |
2752 VT_VOLATILE);
2753 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT |
2754 VT_VOLATILE);
2755 if (!is_compatible_types(&tmp_type1, &tmp_type2))
2756 tcc_warning("assignment from incompatible pointer type");
2758 /* check const and volatile */
2759 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2760 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2761 tcc_warning("assignment discards qualifiers from pointer target type");
2762 break;
2763 case VT_BYTE:
2764 case VT_SHORT:
2765 case VT_INT:
2766 case VT_LLONG:
2767 if (sbt == VT_PTR || sbt == VT_FUNC) {
2768 tcc_warning("assignment makes integer from pointer without a cast");
2769 } else if (sbt == VT_STRUCT) {
2770 goto case_VT_STRUCT;
2772 /* XXX: more tests */
2773 break;
2774 case VT_STRUCT:
2775 case_VT_STRUCT:
2776 tmp_type1 = *dt;
2777 tmp_type2 = *st;
2778 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2779 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2780 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2781 error:
2782 type_to_str(buf1, sizeof(buf1), st, NULL);
2783 type_to_str(buf2, sizeof(buf2), dt, NULL);
2784 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2786 break;
2788 type_ok:
2789 gen_cast(dt);
2792 /* store vtop in lvalue pushed on stack */
2793 ST_FUNC void vstore(void)
2795 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2797 ft = vtop[-1].type.t;
2798 sbt = vtop->type.t & VT_BTYPE;
2799 dbt = ft & VT_BTYPE;
2800 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2801 (sbt == VT_INT && dbt == VT_SHORT))
2802 && !(vtop->type.t & VT_BITFIELD)) {
2803 /* optimize char/short casts */
2804 delayed_cast = VT_MUSTCAST;
2805 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2806 ((1 << VT_STRUCT_SHIFT) - 1));
2807 /* XXX: factorize */
2808 if (ft & VT_CONSTANT)
2809 tcc_warning("assignment of read-only location");
2810 } else {
2811 delayed_cast = 0;
2812 if (!(ft & VT_BITFIELD))
2813 gen_assign_cast(&vtop[-1].type);
2816 if (sbt == VT_STRUCT) {
2817 /* if structure, only generate pointer */
2818 /* structure assignment : generate memcpy */
2819 /* XXX: optimize if small size */
2820 if (!nocode_wanted) {
2821 size = type_size(&vtop->type, &align);
2823 /* destination */
2824 vswap();
2825 vtop->type.t = VT_PTR;
2826 gaddrof();
2828 /* address of memcpy() */
2829 #ifdef TCC_ARM_EABI
2830 if(!(align & 7))
2831 vpush_global_sym(&func_old_type, TOK_memcpy8);
2832 else if(!(align & 3))
2833 vpush_global_sym(&func_old_type, TOK_memcpy4);
2834 else
2835 #endif
2836 /* Use memmove, rather than memcpy, as dest and src may be same: */
2837 vpush_global_sym(&func_old_type, TOK_memmove);
2839 vswap();
2840 /* source */
2841 vpushv(vtop - 2);
2842 vtop->type.t = VT_PTR;
2843 gaddrof();
2844 /* type size */
2845 vpushi(size);
2846 gfunc_call(3);
2847 } else {
2848 vswap();
2849 vpop();
2851 /* leave source on stack */
2852 } else if (ft & VT_BITFIELD) {
2853 /* bitfield store handling */
2855 /* save lvalue as expression result (example: s.b = s.a = n;) */
2856 vdup(), vtop[-1] = vtop[-2];
2858 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2859 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2860 /* remove bit field info to avoid loops */
2861 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2863 if((ft & VT_BTYPE) == VT_BOOL) {
2864 gen_cast(&vtop[-1].type);
2865 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2868 /* duplicate destination */
2869 vdup();
2870 vtop[-1] = vtop[-2];
2872 /* mask and shift source */
2873 if((ft & VT_BTYPE) != VT_BOOL) {
2874 if((ft & VT_BTYPE) == VT_LLONG) {
2875 vpushll((1ULL << bit_size) - 1ULL);
2876 } else {
2877 vpushi((1 << bit_size) - 1);
2879 gen_op('&');
2881 vpushi(bit_pos);
2882 gen_op(TOK_SHL);
2883 /* load destination, mask and or with source */
2884 vswap();
2885 if((ft & VT_BTYPE) == VT_LLONG) {
2886 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2887 } else {
2888 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2890 gen_op('&');
2891 gen_op('|');
2892 /* store result */
2893 vstore();
2894 /* ... and discard */
2895 vpop();
2897 } else {
2898 if (!nocode_wanted) {
2899 #ifdef CONFIG_TCC_BCHECK
2900 /* bound check case */
2901 if (vtop[-1].r & VT_MUSTBOUND) {
2902 vswap();
2903 gbound();
2904 vswap();
2906 #endif
2907 rc = RC_INT;
2908 if (is_float(ft)) {
2909 rc = RC_FLOAT;
2910 #ifdef TCC_TARGET_X86_64
2911 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2912 rc = RC_ST0;
2913 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2914 rc = RC_FRET;
2916 #endif
2918 r = gv(rc); /* generate value */
2919 /* if lvalue was saved on stack, must read it */
2920 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2921 SValue sv;
2922 t = get_reg(RC_INT);
2923 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2924 sv.type.t = VT_PTR;
2925 #else
2926 sv.type.t = VT_INT;
2927 #endif
2928 sv.r = VT_LOCAL | VT_LVAL;
2929 sv.c.i = vtop[-1].c.i;
2930 load(t, &sv);
2931 vtop[-1].r = t | VT_LVAL;
2933 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2934 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2935 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2936 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2937 #else
2938 if ((ft & VT_BTYPE) == VT_LLONG) {
2939 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
2940 #endif
2941 vtop[-1].type.t = load_type;
2942 store(r, vtop - 1);
2943 vswap();
2944 /* convert to int to increment easily */
2945 vtop->type.t = addr_type;
2946 gaddrof();
2947 vpushi(load_size);
2948 gen_op('+');
2949 vtop->r |= VT_LVAL;
2950 vswap();
2951 vtop[-1].type.t = load_type;
2952 /* XXX: it works because r2 is spilled last ! */
2953 store(vtop->r2, vtop - 1);
2954 } else {
2955 store(r, vtop - 1);
2958 vswap();
2959 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
2960 vtop->r |= delayed_cast;
2964 /* post defines POST/PRE add. c is the token ++ or -- */
2965 ST_FUNC void inc(int post, int c)
2967 test_lvalue();
2968 vdup(); /* save lvalue */
2969 if (post) {
2970 if (!nocode_wanted)
2971 gv_dup(); /* duplicate value */
2972 else
2973 vdup(); /* duplicate value */
2974 vrotb(3);
2975 vrotb(3);
2977 /* add constant */
2978 vpushi(c - TOK_MID);
2979 gen_op('+');
2980 vstore(); /* store value */
2981 if (post)
2982 vpop(); /* if post op, return saved value */
2985 /* Parse GNUC __attribute__ extension. Currently, the following
2986 extensions are recognized:
2987 - aligned(n) : set data/function alignment.
2988 - packed : force data alignment to 1
2989 - section(x) : generate data/code in this section.
2990 - unused : currently ignored, but may be used someday.
2991 - regparm(n) : pass function parameters in registers (i386 only)
2993 static void parse_attribute(AttributeDef *ad)
2995 int t, n;
2997 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
2998 next();
2999 skip('(');
3000 skip('(');
3001 while (tok != ')') {
3002 if (tok < TOK_IDENT)
3003 expect("attribute name");
3004 t = tok;
3005 next();
3006 switch(t) {
3007 case TOK_SECTION1:
3008 case TOK_SECTION2:
3009 skip('(');
3010 if (tok != TOK_STR)
3011 expect("section name");
3012 ad->section = find_section(tcc_state, (char *)tokc.str.data);
3013 next();
3014 skip(')');
3015 break;
3016 case TOK_ALIAS1:
3017 case TOK_ALIAS2:
3018 skip('(');
3019 if (tok != TOK_STR)
3020 expect("alias(\"target\")");
3021 ad->alias_target = /* save string as token, for later */
3022 tok_alloc((char*)tokc.str.data, tokc.str.size-1)->tok;
3023 next();
3024 skip(')');
3025 break;
3026 case TOK_VISIBILITY1:
3027 case TOK_VISIBILITY2:
3028 skip('(');
3029 if (tok != TOK_STR)
3030 expect("visibility(\"default|hidden|internal|protected\")");
3031 if (!strcmp (tokc.str.data, "default"))
3032 ad->a.visibility = STV_DEFAULT;
3033 else if (!strcmp (tokc.str.data, "hidden"))
3034 ad->a.visibility = STV_HIDDEN;
3035 else if (!strcmp (tokc.str.data, "internal"))
3036 ad->a.visibility = STV_INTERNAL;
3037 else if (!strcmp (tokc.str.data, "protected"))
3038 ad->a.visibility = STV_PROTECTED;
3039 else
3040 expect("visibility(\"default|hidden|internal|protected\")");
3041 next();
3042 skip(')');
3043 break;
3044 case TOK_ALIGNED1:
3045 case TOK_ALIGNED2:
3046 if (tok == '(') {
3047 next();
3048 n = expr_const();
3049 if (n <= 0 || (n & (n - 1)) != 0)
3050 tcc_error("alignment must be a positive power of two");
3051 skip(')');
3052 } else {
3053 n = MAX_ALIGN;
3055 ad->a.aligned = n;
3056 break;
3057 case TOK_PACKED1:
3058 case TOK_PACKED2:
3059 ad->a.packed = 1;
3060 break;
3061 case TOK_WEAK1:
3062 case TOK_WEAK2:
3063 ad->a.weak = 1;
3064 break;
3065 case TOK_UNUSED1:
3066 case TOK_UNUSED2:
3067 /* currently, no need to handle it because tcc does not
3068 track unused objects */
3069 break;
3070 case TOK_NORETURN1:
3071 case TOK_NORETURN2:
3072 /* currently, no need to handle it because tcc does not
3073 track unused objects */
3074 break;
3075 case TOK_CDECL1:
3076 case TOK_CDECL2:
3077 case TOK_CDECL3:
3078 ad->a.func_call = FUNC_CDECL;
3079 break;
3080 case TOK_STDCALL1:
3081 case TOK_STDCALL2:
3082 case TOK_STDCALL3:
3083 ad->a.func_call = FUNC_STDCALL;
3084 break;
3085 #ifdef TCC_TARGET_I386
3086 case TOK_REGPARM1:
3087 case TOK_REGPARM2:
3088 skip('(');
3089 n = expr_const();
3090 if (n > 3)
3091 n = 3;
3092 else if (n < 0)
3093 n = 0;
3094 if (n > 0)
3095 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3096 skip(')');
3097 break;
3098 case TOK_FASTCALL1:
3099 case TOK_FASTCALL2:
3100 case TOK_FASTCALL3:
3101 ad->a.func_call = FUNC_FASTCALLW;
3102 break;
3103 #endif
3104 case TOK_MODE:
3105 skip('(');
3106 switch(tok) {
3107 case TOK_MODE_DI:
3108 ad->a.mode = VT_LLONG + 1;
3109 break;
3110 case TOK_MODE_QI:
3111 ad->a.mode = VT_BYTE + 1;
3112 break;
3113 case TOK_MODE_HI:
3114 ad->a.mode = VT_SHORT + 1;
3115 break;
3116 case TOK_MODE_SI:
3117 case TOK_MODE_word:
3118 ad->a.mode = VT_INT + 1;
3119 break;
3120 default:
3121 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3122 break;
3124 next();
3125 skip(')');
3126 break;
3127 case TOK_DLLEXPORT:
3128 ad->a.func_export = 1;
3129 break;
3130 case TOK_DLLIMPORT:
3131 ad->a.func_import = 1;
3132 break;
3133 default:
3134 if (tcc_state->warn_unsupported)
3135 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3136 /* skip parameters */
3137 if (tok == '(') {
3138 int parenthesis = 0;
3139 do {
3140 if (tok == '(')
3141 parenthesis++;
3142 else if (tok == ')')
3143 parenthesis--;
3144 next();
3145 } while (parenthesis && tok != -1);
3147 break;
3149 if (tok != ',')
3150 break;
3151 next();
3153 skip(')');
3154 skip(')');
3158 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3159 static void struct_decl(CType *type, AttributeDef *ad, int u)
3161 int a, v, size, align, maxalign, c, offset, flexible, extra_bytes;
3162 int bit_size, bit_pos, bsize, bt, lbit_pos, prevbt;
3163 Sym *s, *ss, *ass, **ps;
3164 AttributeDef ad1;
3165 CType type1, btype;
3167 a = tok; /* save decl type */
3168 next();
3169 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3170 parse_attribute(ad);
3171 next();
3173 if (tok != '{') {
3174 v = tok;
3175 next();
3176 /* struct already defined ? return it */
3177 if (v < TOK_IDENT)
3178 expect("struct/union/enum name");
3179 s = struct_find(v);
3180 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3181 if (s->type.t != a)
3182 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3183 goto do_decl;
3185 } else {
3186 v = anon_sym++;
3188 type1.t = a;
3189 type1.ref = NULL;
3190 /* we put an undefined size for struct/union */
3191 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3192 s->r = 0; /* default alignment is zero as gcc */
3193 /* put struct/union/enum name in type */
3194 do_decl:
3195 type->t = u;
3196 type->ref = s;
3198 if (tok == '{') {
3199 next();
3200 if (s->c != -1)
3201 tcc_error("struct/union/enum already defined");
3202 /* cannot be empty */
3203 c = 0;
3204 /* non empty enums are not allowed */
3205 if (a == TOK_ENUM) {
3206 for(;;) {
3207 v = tok;
3208 if (v < TOK_UIDENT)
3209 expect("identifier");
3210 ss = sym_find(v);
3211 if (ss && !local_stack)
3212 tcc_error("redefinition of enumerator '%s'",
3213 get_tok_str(v, NULL));
3214 next();
3215 if (tok == '=') {
3216 next();
3217 c = expr_const();
3219 /* enum symbols have static storage */
3220 ss = sym_push(v, &int_type, VT_CONST, c);
3221 ss->type.t |= VT_STATIC;
3222 if (tok != ',')
3223 break;
3224 next();
3225 c++;
3226 /* NOTE: we accept a trailing comma */
3227 if (tok == '}')
3228 break;
3230 s->c = type_size(&int_type, &align);
3231 skip('}');
3232 } else {
3233 maxalign = 1;
3234 ps = &s->next;
3235 prevbt = VT_INT;
3236 bit_pos = 0;
3237 offset = 0;
3238 flexible = 0;
3239 while (tok != '}') {
3240 parse_btype(&btype, &ad1);
3241 while (1) {
3242 extra_bytes = 0;
3243 if (flexible)
3244 tcc_error("flexible array member '%s' not at the end of struct",
3245 get_tok_str(v, NULL));
3246 bit_size = -1;
3247 v = 0;
3248 type1 = btype;
3249 if (tok != ':') {
3250 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3251 if (v == 0) {
3252 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3253 expect("identifier");
3254 else {
3255 int v = btype.ref->v;
3256 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3257 if (tcc_state->ms_extensions == 0)
3258 expect("identifier");
3262 if (type_size(&type1, &align) < 0) {
3263 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3264 flexible = 1;
3265 else
3266 tcc_error("field '%s' has incomplete type",
3267 get_tok_str(v, NULL));
3269 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3270 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3271 tcc_error("invalid type for '%s'",
3272 get_tok_str(v, NULL));
3274 if (tok == ':') {
3275 next();
3276 bit_size = expr_const();
3277 /* XXX: handle v = 0 case for messages */
3278 if (bit_size < 0)
3279 tcc_error("negative width in bit-field '%s'",
3280 get_tok_str(v, NULL));
3281 if (v && bit_size == 0)
3282 tcc_error("zero width for bit-field '%s'",
3283 get_tok_str(v, NULL));
3285 size = type_size(&type1, &align);
3286 if (ad1.a.aligned) {
3287 if (align < ad1.a.aligned)
3288 align = ad1.a.aligned;
3289 } else if (ad1.a.packed) {
3290 align = 1;
3291 } else if (*tcc_state->pack_stack_ptr) {
3292 if (align > *tcc_state->pack_stack_ptr)
3293 align = *tcc_state->pack_stack_ptr;
3295 lbit_pos = 0;
3296 if (bit_size >= 0) {
3297 bt = type1.t & VT_BTYPE;
3298 if (bt != VT_INT &&
3299 bt != VT_BYTE &&
3300 bt != VT_SHORT &&
3301 bt != VT_BOOL &&
3302 bt != VT_ENUM &&
3303 bt != VT_LLONG)
3304 tcc_error("bitfields must have scalar type");
3305 bsize = size * 8;
3306 if (bit_size > bsize) {
3307 tcc_error("width of '%s' exceeds its type",
3308 get_tok_str(v, NULL));
3309 } else if (bit_size == bsize) {
3310 /* no need for bit fields */
3311 bit_pos = 0;
3312 } else if (bit_size == 0) {
3313 /* XXX: what to do if only padding in a
3314 structure ? */
3315 /* zero size: means to pad */
3316 bit_pos = 0;
3317 } else {
3318 /* if type change, union, or will overrun
3319 * allignment slot, start at a newly
3320 * alligned slot */
3321 if ((bit_pos + bit_size) > bsize ||
3322 bt != prevbt || a == TOK_UNION)
3323 bit_pos = 0;
3324 lbit_pos = bit_pos;
3325 /* XXX: handle LSB first */
3326 type1.t |= VT_BITFIELD |
3327 (bit_pos << VT_STRUCT_SHIFT) |
3328 (bit_size << (VT_STRUCT_SHIFT + 6));
3329 bit_pos += bit_size;
3330 /* without ms-bitfields, allocate the
3331 * minimum number of bytes necessary,
3332 * adding single bytes as needed */
3333 if (!tcc_state->ms_bitfields) {
3334 if (lbit_pos == 0)
3335 /* minimum bytes for new bitfield */
3336 size = (bit_size + 7) / 8;
3337 else {
3338 /* enough spare bits already allocated? */
3339 bit_size = (lbit_pos - 1) % 8 + 1 + bit_size;
3340 if (bit_size > 8) /* doesn't fit */
3341 extra_bytes = (bit_size - 1) / 8;
3345 prevbt = bt;
3346 } else {
3347 bit_pos = 0;
3349 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3350 /* add new memory data only if starting bit
3351 field or adding bytes to existing bit field */
3352 if (extra_bytes) c += extra_bytes;
3353 else if (lbit_pos == 0) {
3354 if (a == TOK_STRUCT) {
3355 c = (c + align - 1) & -align;
3356 offset = c;
3357 if (size > 0)
3358 c += size;
3359 } else {
3360 offset = 0;
3361 if (size > c)
3362 c = size;
3364 if (align > maxalign)
3365 maxalign = align;
3367 #if 0
3368 printf("add field %s offset=%d",
3369 get_tok_str(v, NULL), offset);
3370 if (type1.t & VT_BITFIELD) {
3371 printf(" pos=%d size=%d",
3372 (type1.t >> VT_STRUCT_SHIFT) & 0x3f,
3373 (type1.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3375 printf("\n");
3376 #endif
3378 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3379 ass = type1.ref;
3380 while ((ass = ass->next) != NULL) {
3381 ss = sym_push(ass->v, &ass->type, 0, offset + ass->c);
3382 *ps = ss;
3383 ps = &ss->next;
3385 } else if (v) {
3386 ss = sym_push(v | SYM_FIELD, &type1, 0, offset);
3387 *ps = ss;
3388 ps = &ss->next;
3390 if (tok == ';' || tok == TOK_EOF)
3391 break;
3392 skip(',');
3394 skip(';');
3396 skip('}');
3397 /* store size and alignment */
3398 s->c = (c + maxalign - 1) & -maxalign;
3399 s->r = maxalign;
3404 /* return 1 if basic type is a type size (short, long, long long) */
3405 ST_FUNC int is_btype_size(int bt)
3407 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3410 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3411 are added to the element type, copied because it could be a typedef. */
3412 static void parse_btype_qualify(CType *type, int qualifiers)
3414 while (type->t & VT_ARRAY) {
3415 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3416 type = &type->ref->type;
3418 type->t |= qualifiers;
3421 /* return 0 if no type declaration. otherwise, return the basic type
3422 and skip it.
3424 static int parse_btype(CType *type, AttributeDef *ad)
3426 int t, u, bt_size, complete, type_found, typespec_found;
3427 Sym *s;
3428 CType type1;
3430 memset(ad, 0, sizeof(AttributeDef));
3431 complete = 0;
3432 type_found = 0;
3433 typespec_found = 0;
3434 t = 0;
3435 while(1) {
3436 switch(tok) {
3437 case TOK_EXTENSION:
3438 /* currently, we really ignore extension */
3439 next();
3440 continue;
3442 /* basic types */
3443 case TOK_CHAR:
3444 u = VT_BYTE;
3445 basic_type:
3446 next();
3447 basic_type1:
3448 if (complete)
3449 tcc_error("too many basic types");
3450 t |= u;
3451 bt_size = is_btype_size (u & VT_BTYPE);
3452 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3453 complete = 1;
3454 typespec_found = 1;
3455 break;
3456 case TOK_VOID:
3457 u = VT_VOID;
3458 goto basic_type;
3459 case TOK_SHORT:
3460 u = VT_SHORT;
3461 goto basic_type;
3462 case TOK_INT:
3463 u = VT_INT;
3464 goto basic_type;
3465 case TOK_LONG:
3466 next();
3467 if ((t & VT_BTYPE) == VT_DOUBLE) {
3468 #ifndef TCC_TARGET_PE
3469 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3470 #endif
3471 } else if ((t & VT_BTYPE) == VT_LONG) {
3472 t = (t & ~VT_BTYPE) | VT_LLONG;
3473 } else {
3474 u = VT_LONG;
3475 goto basic_type1;
3477 break;
3478 #ifdef TCC_TARGET_ARM64
3479 case TOK_UINT128:
3480 /* GCC's __uint128_t appears in some Linux header files. Make it a
3481 synonym for long double to get the size and alignment right. */
3482 u = VT_LDOUBLE;
3483 goto basic_type;
3484 #endif
3485 case TOK_BOOL:
3486 u = VT_BOOL;
3487 goto basic_type;
3488 case TOK_FLOAT:
3489 u = VT_FLOAT;
3490 goto basic_type;
3491 case TOK_DOUBLE:
3492 next();
3493 if ((t & VT_BTYPE) == VT_LONG) {
3494 #ifdef TCC_TARGET_PE
3495 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3496 #else
3497 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3498 #endif
3499 } else {
3500 u = VT_DOUBLE;
3501 goto basic_type1;
3503 break;
3504 case TOK_ENUM:
3505 struct_decl(&type1, ad, VT_ENUM);
3506 basic_type2:
3507 u = type1.t;
3508 type->ref = type1.ref;
3509 goto basic_type1;
3510 case TOK_STRUCT:
3511 case TOK_UNION:
3512 struct_decl(&type1, ad, VT_STRUCT);
3513 goto basic_type2;
3515 /* type modifiers */
3516 case TOK_CONST1:
3517 case TOK_CONST2:
3518 case TOK_CONST3:
3519 type->t = t;
3520 parse_btype_qualify(type, VT_CONSTANT);
3521 t = type->t;
3522 next();
3523 break;
3524 case TOK_VOLATILE1:
3525 case TOK_VOLATILE2:
3526 case TOK_VOLATILE3:
3527 type->t = t;
3528 parse_btype_qualify(type, VT_VOLATILE);
3529 t = type->t;
3530 next();
3531 break;
3532 case TOK_SIGNED1:
3533 case TOK_SIGNED2:
3534 case TOK_SIGNED3:
3535 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3536 tcc_error("signed and unsigned modifier");
3537 typespec_found = 1;
3538 t |= VT_DEFSIGN;
3539 next();
3540 break;
3541 case TOK_REGISTER:
3542 case TOK_AUTO:
3543 case TOK_RESTRICT1:
3544 case TOK_RESTRICT2:
3545 case TOK_RESTRICT3:
3546 next();
3547 break;
3548 case TOK_UNSIGNED:
3549 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3550 tcc_error("signed and unsigned modifier");
3551 t |= VT_DEFSIGN | VT_UNSIGNED;
3552 next();
3553 typespec_found = 1;
3554 break;
3556 /* storage */
3557 case TOK_EXTERN:
3558 t |= VT_EXTERN;
3559 next();
3560 break;
3561 case TOK_STATIC:
3562 t |= VT_STATIC;
3563 next();
3564 break;
3565 case TOK_TYPEDEF:
3566 t |= VT_TYPEDEF;
3567 next();
3568 break;
3569 case TOK_INLINE1:
3570 case TOK_INLINE2:
3571 case TOK_INLINE3:
3572 t |= VT_INLINE;
3573 next();
3574 break;
3576 /* GNUC attribute */
3577 case TOK_ATTRIBUTE1:
3578 case TOK_ATTRIBUTE2:
3579 parse_attribute(ad);
3580 if (ad->a.mode) {
3581 u = ad->a.mode -1;
3582 t = (t & ~VT_BTYPE) | u;
3584 break;
3585 /* GNUC typeof */
3586 case TOK_TYPEOF1:
3587 case TOK_TYPEOF2:
3588 case TOK_TYPEOF3:
3589 next();
3590 parse_expr_type(&type1);
3591 /* remove all storage modifiers except typedef */
3592 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3593 goto basic_type2;
3594 default:
3595 if (typespec_found)
3596 goto the_end;
3597 s = sym_find(tok);
3598 if (!s || !(s->type.t & VT_TYPEDEF))
3599 goto the_end;
3601 type->t = ((s->type.t & ~VT_TYPEDEF) |
3602 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3603 type->ref = s->type.ref;
3604 if (t & (VT_CONSTANT | VT_VOLATILE))
3605 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3606 t = type->t;
3608 if (s->r) {
3609 /* get attributes from typedef */
3610 if (0 == ad->a.aligned)
3611 ad->a.aligned = s->a.aligned;
3612 if (0 == ad->a.func_call)
3613 ad->a.func_call = s->a.func_call;
3614 ad->a.packed |= s->a.packed;
3616 next();
3617 typespec_found = 1;
3618 break;
3620 type_found = 1;
3622 the_end:
3623 if (tcc_state->char_is_unsigned) {
3624 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3625 t |= VT_UNSIGNED;
3628 /* long is never used as type */
3629 if ((t & VT_BTYPE) == VT_LONG)
3630 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3631 defined TCC_TARGET_PE
3632 t = (t & ~VT_BTYPE) | VT_INT;
3633 #else
3634 t = (t & ~VT_BTYPE) | VT_LLONG;
3635 #endif
3636 type->t = t;
3637 return type_found;
3640 /* convert a function parameter type (array to pointer and function to
3641 function pointer) */
3642 static inline void convert_parameter_type(CType *pt)
3644 /* remove const and volatile qualifiers (XXX: const could be used
3645 to indicate a const function parameter */
3646 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3647 /* array must be transformed to pointer according to ANSI C */
3648 pt->t &= ~VT_ARRAY;
3649 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3650 mk_pointer(pt);
3654 ST_FUNC void parse_asm_str(CString *astr)
3656 skip('(');
3657 /* read the string */
3658 if (tok != TOK_STR)
3659 expect("string constant");
3660 cstr_new(astr);
3661 while (tok == TOK_STR) {
3662 /* XXX: add \0 handling too ? */
3663 cstr_cat(astr, tokc.str.data, -1);
3664 next();
3666 cstr_ccat(astr, '\0');
3669 /* Parse an asm label and return the token */
3670 static int asm_label_instr(void)
3672 int v;
3673 CString astr;
3675 next();
3676 parse_asm_str(&astr);
3677 skip(')');
3678 #ifdef ASM_DEBUG
3679 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3680 #endif
3681 v = tok_alloc(astr.data, astr.size - 1)->tok;
3682 cstr_free(&astr);
3683 return v;
3686 static void post_type(CType *type, AttributeDef *ad)
3688 int n, l, t1, arg_size, align;
3689 Sym **plast, *s, *first;
3690 AttributeDef ad1;
3691 CType pt;
3693 if (tok == '(') {
3694 /* function declaration */
3695 next();
3696 l = 0;
3697 first = NULL;
3698 plast = &first;
3699 arg_size = 0;
3700 if (tok != ')') {
3701 for(;;) {
3702 /* read param name and compute offset */
3703 if (l != FUNC_OLD) {
3704 if (!parse_btype(&pt, &ad1)) {
3705 if (l) {
3706 tcc_error("invalid type");
3707 } else {
3708 l = FUNC_OLD;
3709 goto old_proto;
3712 l = FUNC_NEW;
3713 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3714 break;
3715 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3716 if ((pt.t & VT_BTYPE) == VT_VOID)
3717 tcc_error("parameter declared as void");
3718 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3719 } else {
3720 old_proto:
3721 n = tok;
3722 if (n < TOK_UIDENT)
3723 expect("identifier");
3724 pt.t = VT_INT;
3725 next();
3727 convert_parameter_type(&pt);
3728 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3729 *plast = s;
3730 plast = &s->next;
3731 if (tok == ')')
3732 break;
3733 skip(',');
3734 if (l == FUNC_NEW && tok == TOK_DOTS) {
3735 l = FUNC_ELLIPSIS;
3736 next();
3737 break;
3741 /* if no parameters, then old type prototype */
3742 if (l == 0)
3743 l = FUNC_OLD;
3744 skip(')');
3745 /* NOTE: const is ignored in returned type as it has a special
3746 meaning in gcc / C++ */
3747 type->t &= ~VT_CONSTANT;
3748 /* some ancient pre-K&R C allows a function to return an array
3749 and the array brackets to be put after the arguments, such
3750 that "int c()[]" means something like "int[] c()" */
3751 if (tok == '[') {
3752 next();
3753 skip(']'); /* only handle simple "[]" */
3754 type->t |= VT_PTR;
3756 /* we push a anonymous symbol which will contain the function prototype */
3757 ad->a.func_args = arg_size;
3758 s = sym_push(SYM_FIELD, type, 0, l);
3759 s->a = ad->a;
3760 s->next = first;
3761 type->t = VT_FUNC;
3762 type->ref = s;
3763 } else if (tok == '[') {
3764 /* array definition */
3765 next();
3766 if (tok == TOK_RESTRICT1)
3767 next();
3768 n = -1;
3769 t1 = 0;
3770 if (tok != ']') {
3771 if (!local_stack || nocode_wanted)
3772 vpushi(expr_const());
3773 else gexpr();
3774 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3775 n = vtop->c.i;
3776 if (n < 0)
3777 tcc_error("invalid array size");
3778 } else {
3779 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
3780 tcc_error("size of variable length array should be an integer");
3781 t1 = VT_VLA;
3784 skip(']');
3785 /* parse next post type */
3786 post_type(type, ad);
3787 if (type->t == VT_FUNC)
3788 tcc_error("declaration of an array of functions");
3789 t1 |= type->t & VT_VLA;
3791 if (t1 & VT_VLA) {
3792 loc -= type_size(&int_type, &align);
3793 loc &= -align;
3794 n = loc;
3796 vla_runtime_type_size(type, &align);
3797 gen_op('*');
3798 vset(&int_type, VT_LOCAL|VT_LVAL, n);
3799 vswap();
3800 vstore();
3802 if (n != -1)
3803 vpop();
3805 /* we push an anonymous symbol which will contain the array
3806 element type */
3807 s = sym_push(SYM_FIELD, type, 0, n);
3808 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
3809 type->ref = s;
3813 /* Parse a type declaration (except basic type), and return the type
3814 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3815 expected. 'type' should contain the basic type. 'ad' is the
3816 attribute definition of the basic type. It can be modified by
3817 type_decl().
3819 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
3821 Sym *s;
3822 CType type1, *type2;
3823 int qualifiers, storage;
3825 while (tok == '*') {
3826 qualifiers = 0;
3827 redo:
3828 next();
3829 switch(tok) {
3830 case TOK_CONST1:
3831 case TOK_CONST2:
3832 case TOK_CONST3:
3833 qualifiers |= VT_CONSTANT;
3834 goto redo;
3835 case TOK_VOLATILE1:
3836 case TOK_VOLATILE2:
3837 case TOK_VOLATILE3:
3838 qualifiers |= VT_VOLATILE;
3839 goto redo;
3840 case TOK_RESTRICT1:
3841 case TOK_RESTRICT2:
3842 case TOK_RESTRICT3:
3843 goto redo;
3845 mk_pointer(type);
3846 type->t |= qualifiers;
3849 /* XXX: clarify attribute handling */
3850 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3851 parse_attribute(ad);
3853 /* recursive type */
3854 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3855 type1.t = 0; /* XXX: same as int */
3856 if (tok == '(') {
3857 next();
3858 /* XXX: this is not correct to modify 'ad' at this point, but
3859 the syntax is not clear */
3860 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3861 parse_attribute(ad);
3862 type_decl(&type1, ad, v, td);
3863 skip(')');
3864 } else {
3865 /* type identifier */
3866 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
3867 *v = tok;
3868 next();
3869 } else {
3870 if (!(td & TYPE_ABSTRACT))
3871 expect("identifier");
3872 *v = 0;
3875 storage = type->t & VT_STORAGE;
3876 type->t &= ~VT_STORAGE;
3877 if (storage & VT_STATIC) {
3878 int saved_nocode_wanted = nocode_wanted;
3879 nocode_wanted = 1;
3880 post_type(type, ad);
3881 nocode_wanted = saved_nocode_wanted;
3882 } else
3883 post_type(type, ad);
3884 type->t |= storage;
3885 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3886 parse_attribute(ad);
3888 if (!type1.t)
3889 return;
3890 /* append type at the end of type1 */
3891 type2 = &type1;
3892 for(;;) {
3893 s = type2->ref;
3894 type2 = &s->type;
3895 if (!type2->t) {
3896 *type2 = *type;
3897 break;
3900 *type = type1;
3903 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3904 ST_FUNC int lvalue_type(int t)
3906 int bt, r;
3907 r = VT_LVAL;
3908 bt = t & VT_BTYPE;
3909 if (bt == VT_BYTE || bt == VT_BOOL)
3910 r |= VT_LVAL_BYTE;
3911 else if (bt == VT_SHORT)
3912 r |= VT_LVAL_SHORT;
3913 else
3914 return r;
3915 if (t & VT_UNSIGNED)
3916 r |= VT_LVAL_UNSIGNED;
3917 return r;
3920 /* indirection with full error checking and bound check */
3921 ST_FUNC void indir(void)
3923 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
3924 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
3925 return;
3926 expect("pointer");
3928 if ((vtop->r & VT_LVAL) && !nocode_wanted)
3929 gv(RC_INT);
3930 vtop->type = *pointed_type(&vtop->type);
3931 /* Arrays and functions are never lvalues */
3932 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
3933 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
3934 vtop->r |= lvalue_type(vtop->type.t);
3935 /* if bound checking, the referenced pointer must be checked */
3936 #ifdef CONFIG_TCC_BCHECK
3937 if (tcc_state->do_bounds_check)
3938 vtop->r |= VT_MUSTBOUND;
3939 #endif
3943 /* pass a parameter to a function and do type checking and casting */
3944 static void gfunc_param_typed(Sym *func, Sym *arg)
3946 int func_type;
3947 CType type;
3949 func_type = func->c;
3950 if (func_type == FUNC_OLD ||
3951 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
3952 /* default casting : only need to convert float to double */
3953 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
3954 type.t = VT_DOUBLE;
3955 gen_cast(&type);
3956 } else if (vtop->type.t & VT_BITFIELD) {
3957 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3958 gen_cast(&type);
3960 } else if (arg == NULL) {
3961 tcc_error("too many arguments to function");
3962 } else {
3963 type = arg->type;
3964 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
3965 gen_assign_cast(&type);
3969 /* parse an expression of the form '(type)' or '(expr)' and return its
3970 type */
3971 static void parse_expr_type(CType *type)
3973 int n;
3974 AttributeDef ad;
3976 skip('(');
3977 if (parse_btype(type, &ad)) {
3978 type_decl(type, &ad, &n, TYPE_ABSTRACT);
3979 } else {
3980 expr_type(type);
3982 skip(')');
3985 static void parse_type(CType *type)
3987 AttributeDef ad;
3988 int n;
3990 if (!parse_btype(type, &ad)) {
3991 expect("type");
3993 type_decl(type, &ad, &n, TYPE_ABSTRACT);
3996 static void vpush_tokc(int t)
3998 CType type;
3999 type.t = t;
4000 type.ref = 0;
4001 vsetc(&type, VT_CONST, &tokc);
4004 ST_FUNC void unary(void)
4006 int n, t, align, size, r, sizeof_caller;
4007 CType type;
4008 Sym *s;
4009 AttributeDef ad;
4011 sizeof_caller = in_sizeof;
4012 in_sizeof = 0;
4013 /* XXX: GCC 2.95.3 does not generate a table although it should be
4014 better here */
4015 tok_next:
4016 switch(tok) {
4017 case TOK_EXTENSION:
4018 next();
4019 goto tok_next;
4020 case TOK_CINT:
4021 case TOK_CCHAR:
4022 case TOK_LCHAR:
4023 vpushi(tokc.i);
4024 next();
4025 break;
4026 case TOK_CUINT:
4027 vpush_tokc(VT_INT | VT_UNSIGNED);
4028 next();
4029 break;
4030 case TOK_CLLONG:
4031 vpush_tokc(VT_LLONG);
4032 next();
4033 break;
4034 case TOK_CULLONG:
4035 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4036 next();
4037 break;
4038 case TOK_CFLOAT:
4039 vpush_tokc(VT_FLOAT);
4040 next();
4041 break;
4042 case TOK_CDOUBLE:
4043 vpush_tokc(VT_DOUBLE);
4044 next();
4045 break;
4046 case TOK_CLDOUBLE:
4047 vpush_tokc(VT_LDOUBLE);
4048 next();
4049 break;
4050 case TOK___FUNCTION__:
4051 if (!gnu_ext)
4052 goto tok_identifier;
4053 /* fall thru */
4054 case TOK___FUNC__:
4056 void *ptr;
4057 int len;
4058 /* special function name identifier */
4059 len = strlen(funcname) + 1;
4060 /* generate char[len] type */
4061 type.t = VT_BYTE;
4062 mk_pointer(&type);
4063 type.t |= VT_ARRAY;
4064 type.ref->c = len;
4065 vpush_ref(&type, data_section, data_section->data_offset, len);
4066 ptr = section_ptr_add(data_section, len);
4067 memcpy(ptr, funcname, len);
4068 next();
4070 break;
4071 case TOK_LSTR:
4072 #ifdef TCC_TARGET_PE
4073 t = VT_SHORT | VT_UNSIGNED;
4074 #else
4075 t = VT_INT;
4076 #endif
4077 goto str_init;
4078 case TOK_STR:
4079 /* string parsing */
4080 t = VT_BYTE;
4081 str_init:
4082 if (tcc_state->warn_write_strings)
4083 t |= VT_CONSTANT;
4084 type.t = t;
4085 mk_pointer(&type);
4086 type.t |= VT_ARRAY;
4087 memset(&ad, 0, sizeof(AttributeDef));
4088 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4089 break;
4090 case '(':
4091 next();
4092 /* cast ? */
4093 if (parse_btype(&type, &ad)) {
4094 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4095 skip(')');
4096 /* check ISOC99 compound literal */
4097 if (tok == '{') {
4098 /* data is allocated locally by default */
4099 if (global_expr)
4100 r = VT_CONST;
4101 else
4102 r = VT_LOCAL;
4103 /* all except arrays are lvalues */
4104 if (!(type.t & VT_ARRAY))
4105 r |= lvalue_type(type.t);
4106 memset(&ad, 0, sizeof(AttributeDef));
4107 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4108 } else {
4109 if (sizeof_caller) {
4110 vpush(&type);
4111 return;
4113 unary();
4114 gen_cast(&type);
4116 } else if (tok == '{') {
4117 if (const_wanted)
4118 tcc_error("expected constant");
4119 /* save all registers */
4120 if (!nocode_wanted)
4121 save_regs(0);
4122 /* statement expression : we do not accept break/continue
4123 inside as GCC does */
4124 block(NULL, NULL, 1);
4125 skip(')');
4126 } else {
4127 gexpr();
4128 skip(')');
4130 break;
4131 case '*':
4132 next();
4133 unary();
4134 indir();
4135 break;
4136 case '&':
4137 next();
4138 unary();
4139 /* functions names must be treated as function pointers,
4140 except for unary '&' and sizeof. Since we consider that
4141 functions are not lvalues, we only have to handle it
4142 there and in function calls. */
4143 /* arrays can also be used although they are not lvalues */
4144 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4145 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4146 test_lvalue();
4147 mk_pointer(&vtop->type);
4148 gaddrof();
4149 break;
4150 case '!':
4151 next();
4152 unary();
4153 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4154 CType boolean;
4155 boolean.t = VT_BOOL;
4156 gen_cast(&boolean);
4157 vtop->c.i = !vtop->c.i;
4158 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4159 vtop->c.i ^= 1;
4160 else {
4161 save_regs(1);
4162 vseti(VT_JMP, gvtst(1, 0));
4164 break;
4165 case '~':
4166 next();
4167 unary();
4168 vpushi(-1);
4169 gen_op('^');
4170 break;
4171 case '+':
4172 next();
4173 unary();
4174 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4175 tcc_error("pointer not accepted for unary plus");
4176 /* In order to force cast, we add zero, except for floating point
4177 where we really need an noop (otherwise -0.0 will be transformed
4178 into +0.0). */
4179 if (!is_float(vtop->type.t)) {
4180 vpushi(0);
4181 gen_op('+');
4183 break;
4184 case TOK_SIZEOF:
4185 case TOK_ALIGNOF1:
4186 case TOK_ALIGNOF2:
4187 t = tok;
4188 next();
4189 in_sizeof++;
4190 unary_type(&type); // Perform a in_sizeof = 0;
4191 size = type_size(&type, &align);
4192 if (t == TOK_SIZEOF) {
4193 if (!(type.t & VT_VLA)) {
4194 if (size < 0)
4195 tcc_error("sizeof applied to an incomplete type");
4196 vpushs(size);
4197 } else {
4198 vla_runtime_type_size(&type, &align);
4200 } else {
4201 vpushs(align);
4203 vtop->type.t |= VT_UNSIGNED;
4204 break;
4206 case TOK_builtin_expect:
4208 /* __builtin_expect is a no-op for now */
4209 int saved_nocode_wanted;
4210 next();
4211 skip('(');
4212 expr_eq();
4213 skip(',');
4214 saved_nocode_wanted = nocode_wanted;
4215 nocode_wanted = 1;
4216 expr_lor_const();
4217 vpop();
4218 nocode_wanted = saved_nocode_wanted;
4219 skip(')');
4221 break;
4222 case TOK_builtin_types_compatible_p:
4224 CType type1, type2;
4225 next();
4226 skip('(');
4227 parse_type(&type1);
4228 skip(',');
4229 parse_type(&type2);
4230 skip(')');
4231 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4232 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4233 vpushi(is_compatible_types(&type1, &type2));
4235 break;
4236 case TOK_builtin_constant_p:
4238 int saved_nocode_wanted, res;
4239 next();
4240 skip('(');
4241 saved_nocode_wanted = nocode_wanted;
4242 nocode_wanted = 1;
4243 gexpr();
4244 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4245 vpop();
4246 nocode_wanted = saved_nocode_wanted;
4247 skip(')');
4248 vpushi(res);
4250 break;
4251 case TOK_builtin_frame_address:
4252 case TOK_builtin_return_address:
4254 int tok1 = tok;
4255 int level;
4256 CType type;
4257 next();
4258 skip('(');
4259 if (tok != TOK_CINT) {
4260 tcc_error("%s only takes positive integers",
4261 tok1 == TOK_builtin_return_address ?
4262 "__builtin_return_address" :
4263 "__builtin_frame_address");
4265 level = (uint32_t)tokc.i;
4266 next();
4267 skip(')');
4268 type.t = VT_VOID;
4269 mk_pointer(&type);
4270 vset(&type, VT_LOCAL, 0); /* local frame */
4271 while (level--) {
4272 mk_pointer(&vtop->type);
4273 indir(); /* -> parent frame */
4275 if (tok1 == TOK_builtin_return_address) {
4276 // assume return address is just above frame pointer on stack
4277 vpushi(PTR_SIZE);
4278 gen_op('+');
4279 mk_pointer(&vtop->type);
4280 indir();
4283 break;
4284 #ifdef TCC_TARGET_X86_64
4285 #ifdef TCC_TARGET_PE
4286 case TOK_builtin_va_start:
4288 next();
4289 skip('(');
4290 expr_eq();
4291 skip(',');
4292 expr_eq();
4293 skip(')');
4294 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4295 tcc_error("__builtin_va_start expects a local variable");
4296 vtop->r &= ~(VT_LVAL | VT_REF);
4297 vtop->type = char_pointer_type;
4298 vtop->c.i += 8;
4299 vstore();
4301 break;
4302 #else
4303 case TOK_builtin_va_arg_types:
4305 CType type;
4306 next();
4307 skip('(');
4308 parse_type(&type);
4309 skip(')');
4310 vpushi(classify_x86_64_va_arg(&type));
4312 break;
4313 #endif
4314 #endif
4316 #ifdef TCC_TARGET_ARM64
4317 case TOK___va_start: {
4318 if (nocode_wanted)
4319 tcc_error("statement in global scope");
4320 next();
4321 skip('(');
4322 expr_eq();
4323 skip(',');
4324 expr_eq();
4325 skip(')');
4326 //xx check types
4327 gen_va_start();
4328 vpushi(0);
4329 vtop->type.t = VT_VOID;
4330 break;
4332 case TOK___va_arg: {
4333 CType type;
4334 if (nocode_wanted)
4335 tcc_error("statement in global scope");
4336 next();
4337 skip('(');
4338 expr_eq();
4339 skip(',');
4340 parse_type(&type);
4341 skip(')');
4342 //xx check types
4343 gen_va_arg(&type);
4344 vtop->type = type;
4345 break;
4347 case TOK___arm64_clear_cache: {
4348 next();
4349 skip('(');
4350 expr_eq();
4351 skip(',');
4352 expr_eq();
4353 skip(')');
4354 gen_clear_cache();
4355 vpushi(0);
4356 vtop->type.t = VT_VOID;
4357 break;
4359 #endif
4360 /* pre operations */
4361 case TOK_INC:
4362 case TOK_DEC:
4363 t = tok;
4364 next();
4365 unary();
4366 inc(0, t);
4367 break;
4368 case '-':
4369 next();
4370 unary();
4371 t = vtop->type.t & VT_BTYPE;
4372 if (is_float(t)) {
4373 /* In IEEE negate(x) isn't subtract(0,x), but rather
4374 subtract(-0, x). */
4375 vpush(&vtop->type);
4376 if (t == VT_FLOAT)
4377 vtop->c.f = -0.0f;
4378 else if (t == VT_DOUBLE)
4379 vtop->c.d = -0.0;
4380 else
4381 vtop->c.ld = -0.0;
4382 } else
4383 vpushi(0);
4384 vswap();
4385 gen_op('-');
4386 break;
4387 case TOK_LAND:
4388 if (!gnu_ext)
4389 goto tok_identifier;
4390 next();
4391 /* allow to take the address of a label */
4392 if (tok < TOK_UIDENT)
4393 expect("label identifier");
4394 s = label_find(tok);
4395 if (!s) {
4396 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4397 } else {
4398 if (s->r == LABEL_DECLARED)
4399 s->r = LABEL_FORWARD;
4401 if (!s->type.t) {
4402 s->type.t = VT_VOID;
4403 mk_pointer(&s->type);
4404 s->type.t |= VT_STATIC;
4406 vpushsym(&s->type, s);
4407 next();
4408 break;
4410 // special qnan , snan and infinity values
4411 case TOK___NAN__:
4412 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4413 next();
4414 break;
4415 case TOK___SNAN__:
4416 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4417 next();
4418 break;
4419 case TOK___INF__:
4420 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4421 next();
4422 break;
4424 default:
4425 tok_identifier:
4426 t = tok;
4427 next();
4428 if (t < TOK_UIDENT)
4429 expect("identifier");
4430 s = sym_find(t);
4431 if (!s) {
4432 const char *name = get_tok_str(t, NULL);
4433 if (tok != '(')
4434 tcc_error("'%s' undeclared", name);
4435 /* for simple function calls, we tolerate undeclared
4436 external reference to int() function */
4437 if (tcc_state->warn_implicit_function_declaration
4438 #ifdef TCC_TARGET_PE
4439 /* people must be warned about using undeclared WINAPI functions
4440 (which usually start with uppercase letter) */
4441 || (name[0] >= 'A' && name[0] <= 'Z')
4442 #endif
4444 tcc_warning("implicit declaration of function '%s'", name);
4445 s = external_global_sym(t, &func_old_type, 0);
4447 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4448 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4449 /* if referencing an inline function, then we generate a
4450 symbol to it if not already done. It will have the
4451 effect to generate code for it at the end of the
4452 compilation unit. Inline function as always
4453 generated in the text section. */
4454 if (!s->c)
4455 put_extern_sym(s, text_section, 0, 0);
4456 r = VT_SYM | VT_CONST;
4457 } else {
4458 r = s->r;
4460 vset(&s->type, r, s->c);
4461 /* if forward reference, we must point to s */
4462 if (vtop->r & VT_SYM) {
4463 vtop->sym = s;
4464 vtop->c.i = 0;
4466 break;
4469 /* post operations */
4470 while (1) {
4471 if (tok == TOK_INC || tok == TOK_DEC) {
4472 inc(1, tok);
4473 next();
4474 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4475 int qualifiers;
4476 /* field */
4477 if (tok == TOK_ARROW)
4478 indir();
4479 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4480 test_lvalue();
4481 gaddrof();
4482 /* expect pointer on structure */
4483 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4484 expect("struct or union");
4485 if (tok == TOK_CDOUBLE)
4486 expect("field name");
4487 next();
4488 if (tok == TOK_CINT || tok == TOK_CUINT)
4489 expect("field name");
4490 s = vtop->type.ref;
4491 /* find field */
4492 tok |= SYM_FIELD;
4493 while ((s = s->next) != NULL) {
4494 if (s->v == tok)
4495 break;
4497 if (!s)
4498 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4499 /* add field offset to pointer */
4500 vtop->type = char_pointer_type; /* change type to 'char *' */
4501 vpushi(s->c);
4502 gen_op('+');
4503 /* change type to field type, and set to lvalue */
4504 vtop->type = s->type;
4505 vtop->type.t |= qualifiers;
4506 /* an array is never an lvalue */
4507 if (!(vtop->type.t & VT_ARRAY)) {
4508 vtop->r |= lvalue_type(vtop->type.t);
4509 #ifdef CONFIG_TCC_BCHECK
4510 /* if bound checking, the referenced pointer must be checked */
4511 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4512 vtop->r |= VT_MUSTBOUND;
4513 #endif
4515 next();
4516 } else if (tok == '[') {
4517 next();
4518 gexpr();
4519 gen_op('+');
4520 indir();
4521 skip(']');
4522 } else if (tok == '(') {
4523 SValue ret;
4524 Sym *sa;
4525 int nb_args, ret_nregs, ret_align, regsize, variadic;
4527 /* function call */
4528 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4529 /* pointer test (no array accepted) */
4530 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4531 vtop->type = *pointed_type(&vtop->type);
4532 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4533 goto error_func;
4534 } else {
4535 error_func:
4536 expect("function pointer");
4538 } else {
4539 vtop->r &= ~VT_LVAL; /* no lvalue */
4541 /* get return type */
4542 s = vtop->type.ref;
4543 next();
4544 sa = s->next; /* first parameter */
4545 nb_args = 0;
4546 ret.r2 = VT_CONST;
4547 /* compute first implicit argument if a structure is returned */
4548 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4549 variadic = (s->c == FUNC_ELLIPSIS);
4550 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4551 &ret_align, &regsize);
4552 if (!ret_nregs) {
4553 /* get some space for the returned structure */
4554 size = type_size(&s->type, &align);
4555 #ifdef TCC_TARGET_ARM64
4556 /* On arm64, a small struct is return in registers.
4557 It is much easier to write it to memory if we know
4558 that we are allowed to write some extra bytes, so
4559 round the allocated space up to a power of 2: */
4560 if (size < 16)
4561 while (size & (size - 1))
4562 size = (size | (size - 1)) + 1;
4563 #endif
4564 loc = (loc - size) & -align;
4565 ret.type = s->type;
4566 ret.r = VT_LOCAL | VT_LVAL;
4567 /* pass it as 'int' to avoid structure arg passing
4568 problems */
4569 vseti(VT_LOCAL, loc);
4570 ret.c = vtop->c;
4571 nb_args++;
4573 } else {
4574 ret_nregs = 1;
4575 ret.type = s->type;
4578 if (ret_nregs) {
4579 /* return in register */
4580 if (is_float(ret.type.t)) {
4581 ret.r = reg_fret(ret.type.t);
4582 #ifdef TCC_TARGET_X86_64
4583 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4584 ret.r2 = REG_QRET;
4585 #endif
4586 } else {
4587 #ifndef TCC_TARGET_ARM64
4588 #ifdef TCC_TARGET_X86_64
4589 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4590 #else
4591 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4592 #endif
4593 ret.r2 = REG_LRET;
4594 #endif
4595 ret.r = REG_IRET;
4597 ret.c.i = 0;
4599 if (tok != ')') {
4600 for(;;) {
4601 expr_eq();
4602 gfunc_param_typed(s, sa);
4603 nb_args++;
4604 if (sa)
4605 sa = sa->next;
4606 if (tok == ')')
4607 break;
4608 skip(',');
4611 if (sa)
4612 tcc_error("too few arguments to function");
4613 skip(')');
4614 if (!nocode_wanted) {
4615 gfunc_call(nb_args);
4616 } else {
4617 vtop -= (nb_args + 1);
4620 /* return value */
4621 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4622 vsetc(&ret.type, r, &ret.c);
4623 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4626 /* handle packed struct return */
4627 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4628 int addr, offset;
4630 size = type_size(&s->type, &align);
4631 /* We're writing whole regs often, make sure there's enough
4632 space. Assume register size is power of 2. */
4633 if (regsize > align)
4634 align = regsize;
4635 loc = (loc - size) & -align;
4636 addr = loc;
4637 offset = 0;
4638 for (;;) {
4639 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4640 vswap();
4641 vstore();
4642 vtop--;
4643 if (--ret_nregs == 0)
4644 break;
4645 offset += regsize;
4647 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4649 } else {
4650 break;
4655 ST_FUNC void expr_prod(void)
4657 int t;
4659 unary();
4660 while (tok == '*' || tok == '/' || tok == '%') {
4661 t = tok;
4662 next();
4663 unary();
4664 gen_op(t);
4668 ST_FUNC void expr_sum(void)
4670 int t;
4672 expr_prod();
4673 while (tok == '+' || tok == '-') {
4674 t = tok;
4675 next();
4676 expr_prod();
4677 gen_op(t);
4681 static void expr_shift(void)
4683 int t;
4685 expr_sum();
4686 while (tok == TOK_SHL || tok == TOK_SAR) {
4687 t = tok;
4688 next();
4689 expr_sum();
4690 gen_op(t);
4694 static void expr_cmp(void)
4696 int t;
4698 expr_shift();
4699 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4700 tok == TOK_ULT || tok == TOK_UGE) {
4701 t = tok;
4702 next();
4703 expr_shift();
4704 gen_op(t);
4708 static void expr_cmpeq(void)
4710 int t;
4712 expr_cmp();
4713 while (tok == TOK_EQ || tok == TOK_NE) {
4714 t = tok;
4715 next();
4716 expr_cmp();
4717 gen_op(t);