Add support of musl-libc
[tinycc.git] / tccgen.c
blob4c5e4d629e297a9108b7804c6e009ccf52700b65
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC void tcc_debug_start(TCCState *s1)
146 if (s1->do_debug) {
147 char buf[512];
149 /* file info: full path + filename */
150 section_sym = put_elf_sym(symtab_section, 0, 0,
151 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
152 text_section->sh_num, NULL);
153 getcwd(buf, sizeof(buf));
154 #ifdef _WIN32
155 normalize_slashes(buf);
156 #endif
157 pstrcat(buf, sizeof(buf), "/");
158 put_stabs_r(buf, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 put_stabs_r(file->filename, N_SO, 0, 0,
161 text_section->data_offset, text_section, section_sym);
162 last_ind = 0;
163 last_line_num = 0;
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
170 SHN_ABS, file->filename);
173 /* put end of translation unit info */
174 ST_FUNC void tcc_debug_end(TCCState *s1)
176 if (!s1->do_debug)
177 return;
178 put_stabs_r(NULL, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
183 /* generate line number info */
184 ST_FUNC void tcc_debug_line(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 if ((last_line_num != file->line_num || last_ind != ind)) {
189 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
190 last_ind = ind;
191 last_line_num = file->line_num;
195 /* put function symbol */
196 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
198 char buf[512];
200 if (!s1->do_debug)
201 return;
203 /* stabs info */
204 /* XXX: we put here a dummy type */
205 snprintf(buf, sizeof(buf), "%s:%c1",
206 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
207 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
208 cur_text_section, sym->c);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE, 0, file->line_num, 0);
212 last_ind = 0;
213 last_line_num = 0;
216 /* put function size */
217 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
219 if (!s1->do_debug)
220 return;
221 put_stabn(N_FUN, 0, 0, size);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC void tccgen_start(TCCState *s1)
227 cur_text_section = NULL;
228 funcname = "";
229 anon_sym = SYM_FIRST_ANOM;
230 section_sym = 0;
231 const_wanted = 0;
232 nocode_wanted = 1;
234 /* define some often used types */
235 int_type.t = VT_INT;
236 char_pointer_type.t = VT_BYTE;
237 mk_pointer(&char_pointer_type);
238 #if PTR_SIZE == 4
239 size_type.t = VT_INT;
240 #else
241 size_type.t = VT_LLONG;
242 #endif
243 func_old_type.t = VT_FUNC;
244 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
246 tcc_debug_start(s1);
248 #ifdef TCC_TARGET_ARM
249 arm_init(s1);
250 #endif
253 ST_FUNC void tccgen_end(TCCState *s1)
255 gen_inline_functions(s1);
256 check_vstack();
257 /* end of translation unit info */
258 tcc_debug_end(s1);
261 /* ------------------------------------------------------------------------- */
262 /* apply storage attibutes to Elf symbol */
264 static void update_storage(Sym *sym)
266 int t;
267 ElfW(Sym) *esym;
269 if (0 == sym->c)
270 return;
272 t = sym->type.t;
273 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
275 if (t & VT_VIS_MASK)
276 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
277 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
279 if (t & VT_WEAK)
280 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
282 #ifdef TCC_TARGET_PE
283 if (t & VT_EXPORT)
284 esym->st_other |= ST_PE_EXPORT;
285 #endif
288 /* ------------------------------------------------------------------------- */
289 /* update sym->c so that it points to an external symbol in section
290 'section' with value 'value' */
292 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
293 addr_t value, unsigned long size,
294 int can_add_underscore)
296 int sym_type, sym_bind, sh_num, info, other, t;
297 ElfW(Sym) *esym;
298 const char *name;
299 char buf1[256];
300 #ifdef CONFIG_TCC_BCHECK
301 char buf[32];
302 #endif
304 if (section == NULL)
305 sh_num = SHN_UNDEF;
306 else if (section == SECTION_ABS)
307 sh_num = SHN_ABS;
308 else if (section == SECTION_COMMON)
309 sh_num = SHN_COMMON;
310 else
311 sh_num = section->sh_num;
313 if (!sym->c) {
314 name = get_tok_str(sym->v, NULL);
315 #ifdef CONFIG_TCC_BCHECK
316 if (tcc_state->do_bounds_check) {
317 /* XXX: avoid doing that for statics ? */
318 /* if bound checking is activated, we change some function
319 names by adding the "__bound" prefix */
320 switch(sym->v) {
321 #ifdef TCC_TARGET_PE
322 /* XXX: we rely only on malloc hooks */
323 case TOK_malloc:
324 case TOK_free:
325 case TOK_realloc:
326 case TOK_memalign:
327 case TOK_calloc:
328 #endif
329 case TOK_memcpy:
330 case TOK_memmove:
331 case TOK_memset:
332 case TOK_strlen:
333 case TOK_strcpy:
334 case TOK_alloca:
335 strcpy(buf, "__bound_");
336 strcat(buf, name);
337 name = buf;
338 break;
341 #endif
342 t = sym->type.t;
343 if ((t & VT_BTYPE) == VT_FUNC) {
344 sym_type = STT_FUNC;
345 } else if ((t & VT_BTYPE) == VT_VOID) {
346 sym_type = STT_NOTYPE;
347 } else {
348 sym_type = STT_OBJECT;
350 if (t & VT_STATIC)
351 sym_bind = STB_LOCAL;
352 else
353 sym_bind = STB_GLOBAL;
354 other = 0;
355 #ifdef TCC_TARGET_PE
356 if (sym_type == STT_FUNC && sym->type.ref) {
357 Sym *ref = sym->type.ref;
358 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
359 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
360 name = buf1;
361 other |= ST_PE_STDCALL;
362 can_add_underscore = 0;
365 if (t & VT_IMPORT)
366 other |= ST_PE_IMPORT;
367 #endif
368 if (tcc_state->leading_underscore && can_add_underscore) {
369 buf1[0] = '_';
370 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
371 name = buf1;
373 if (sym->asm_label)
374 name = get_tok_str(sym->asm_label, NULL);
375 info = ELFW(ST_INFO)(sym_bind, sym_type);
376 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
377 } else {
378 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
379 esym->st_value = value;
380 esym->st_size = size;
381 esym->st_shndx = sh_num;
383 update_storage(sym);
386 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
387 addr_t value, unsigned long size)
389 put_extern_sym2(sym, section, value, size, 1);
392 /* add a new relocation entry to symbol 'sym' in section 's' */
393 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
394 addr_t addend)
396 int c = 0;
398 if (nocode_wanted && s == cur_text_section)
399 return;
401 if (sym) {
402 if (0 == sym->c)
403 put_extern_sym(sym, NULL, 0, 0);
404 c = sym->c;
407 /* now we can add ELF relocation info */
408 put_elf_reloca(symtab_section, s, offset, type, c, addend);
411 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
413 greloca(s, sym, offset, type, 0);
416 /* ------------------------------------------------------------------------- */
417 /* symbol allocator */
418 static Sym *__sym_malloc(void)
420 Sym *sym_pool, *sym, *last_sym;
421 int i;
423 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
424 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
426 last_sym = sym_free_first;
427 sym = sym_pool;
428 for(i = 0; i < SYM_POOL_NB; i++) {
429 sym->next = last_sym;
430 last_sym = sym;
431 sym++;
433 sym_free_first = last_sym;
434 return last_sym;
437 static inline Sym *sym_malloc(void)
439 Sym *sym;
440 #ifndef SYM_DEBUG
441 sym = sym_free_first;
442 if (!sym)
443 sym = __sym_malloc();
444 sym_free_first = sym->next;
445 return sym;
446 #else
447 sym = tcc_malloc(sizeof(Sym));
448 return sym;
449 #endif
452 ST_INLN void sym_free(Sym *sym)
454 #ifndef SYM_DEBUG
455 sym->next = sym_free_first;
456 sym_free_first = sym;
457 #else
458 tcc_free(sym);
459 #endif
462 /* push, without hashing */
463 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
465 Sym *s;
467 s = sym_malloc();
468 s->scope = 0;
469 s->v = v;
470 s->type.t = t;
471 s->type.ref = NULL;
472 #ifdef _WIN64
473 s->d = NULL;
474 #endif
475 s->c = c;
476 s->next = NULL;
477 /* add in stack */
478 s->prev = *ps;
479 *ps = s;
480 return s;
483 /* find a symbol and return its associated structure. 's' is the top
484 of the symbol stack */
485 ST_FUNC Sym *sym_find2(Sym *s, int v)
487 while (s) {
488 if (s->v == v)
489 return s;
490 else if (s->v == -1)
491 return NULL;
492 s = s->prev;
494 return NULL;
497 /* structure lookup */
498 ST_INLN Sym *struct_find(int v)
500 v -= TOK_IDENT;
501 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
502 return NULL;
503 return table_ident[v]->sym_struct;
506 /* find an identifier */
507 ST_INLN Sym *sym_find(int v)
509 v -= TOK_IDENT;
510 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
511 return NULL;
512 return table_ident[v]->sym_identifier;
515 /* push a given symbol on the symbol stack */
516 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
518 Sym *s, **ps;
519 TokenSym *ts;
521 if (local_stack)
522 ps = &local_stack;
523 else
524 ps = &global_stack;
525 s = sym_push2(ps, v, type->t, c);
526 s->type.ref = type->ref;
527 s->r = r;
528 /* don't record fields or anonymous symbols */
529 /* XXX: simplify */
530 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
531 /* record symbol in token array */
532 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
533 if (v & SYM_STRUCT)
534 ps = &ts->sym_struct;
535 else
536 ps = &ts->sym_identifier;
537 s->prev_tok = *ps;
538 *ps = s;
539 s->scope = local_scope;
540 if (s->prev_tok && s->prev_tok->scope == s->scope)
541 tcc_error("redeclaration of '%s'",
542 get_tok_str(v & ~SYM_STRUCT, NULL));
544 return s;
547 /* push a global identifier */
548 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
550 Sym *s, **ps;
551 s = sym_push2(&global_stack, v, t, c);
552 /* don't record anonymous symbol */
553 if (v < SYM_FIRST_ANOM) {
554 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
555 /* modify the top most local identifier, so that
556 sym_identifier will point to 's' when popped */
557 while (*ps != NULL)
558 ps = &(*ps)->prev_tok;
559 s->prev_tok = NULL;
560 *ps = s;
562 return s;
565 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
566 pop them yet from the list, but do remove them from the token array. */
567 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
569 Sym *s, *ss, **ps;
570 TokenSym *ts;
571 int v;
573 s = *ptop;
574 while(s != b) {
575 ss = s->prev;
576 v = s->v;
577 /* remove symbol in token array */
578 /* XXX: simplify */
579 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
580 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
581 if (v & SYM_STRUCT)
582 ps = &ts->sym_struct;
583 else
584 ps = &ts->sym_identifier;
585 *ps = s->prev_tok;
587 if (!keep)
588 sym_free(s);
589 s = ss;
591 if (!keep)
592 *ptop = b;
595 /* ------------------------------------------------------------------------- */
597 static void vsetc(CType *type, int r, CValue *vc)
599 int v;
601 if (vtop >= vstack + (VSTACK_SIZE - 1))
602 tcc_error("memory full (vstack)");
603 /* cannot let cpu flags if other instruction are generated. Also
604 avoid leaving VT_JMP anywhere except on the top of the stack
605 because it would complicate the code generator.
607 Don't do this when nocode_wanted. vtop might come from
608 !nocode_wanted regions (see 88_codeopt.c) and transforming
609 it to a register without actually generating code is wrong
610 as their value might still be used for real. All values
611 we push under nocode_wanted will eventually be popped
612 again, so that the VT_CMP/VT_JMP value will be in vtop
613 when code is unsuppressed again.
615 Same logic below in vswap(); */
616 if (vtop >= vstack && !nocode_wanted) {
617 v = vtop->r & VT_VALMASK;
618 if (v == VT_CMP || (v & ~1) == VT_JMP)
619 gv(RC_INT);
622 vtop++;
623 vtop->type = *type;
624 vtop->r = r;
625 vtop->r2 = VT_CONST;
626 vtop->c = *vc;
627 vtop->sym = NULL;
630 ST_FUNC void vswap(void)
632 SValue tmp;
633 /* cannot vswap cpu flags. See comment at vsetc() above */
634 if (vtop >= vstack && !nocode_wanted) {
635 int v = vtop->r & VT_VALMASK;
636 if (v == VT_CMP || (v & ~1) == VT_JMP)
637 gv(RC_INT);
639 tmp = vtop[0];
640 vtop[0] = vtop[-1];
641 vtop[-1] = tmp;
644 /* pop stack value */
645 ST_FUNC void vpop(void)
647 int v;
648 v = vtop->r & VT_VALMASK;
649 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
650 /* for x86, we need to pop the FP stack */
651 if (v == TREG_ST0) {
652 o(0xd8dd); /* fstp %st(0) */
653 } else
654 #endif
655 if (v == VT_JMP || v == VT_JMPI) {
656 /* need to put correct jump if && or || without test */
657 gsym(vtop->c.i);
659 vtop--;
662 /* push constant of type "type" with useless value */
663 ST_FUNC void vpush(CType *type)
665 CValue cval;
666 vsetc(type, VT_CONST, &cval);
669 /* push integer constant */
670 ST_FUNC void vpushi(int v)
672 CValue cval;
673 cval.i = v;
674 vsetc(&int_type, VT_CONST, &cval);
677 /* push a pointer sized constant */
678 static void vpushs(addr_t v)
680 CValue cval;
681 cval.i = v;
682 vsetc(&size_type, VT_CONST, &cval);
685 /* push arbitrary 64bit constant */
686 ST_FUNC void vpush64(int ty, unsigned long long v)
688 CValue cval;
689 CType ctype;
690 ctype.t = ty;
691 ctype.ref = NULL;
692 cval.i = v;
693 vsetc(&ctype, VT_CONST, &cval);
696 /* push long long constant */
697 static inline void vpushll(long long v)
699 vpush64(VT_LLONG, v);
702 ST_FUNC void vset(CType *type, int r, long v)
704 CValue cval;
706 cval.i = v;
707 vsetc(type, r, &cval);
710 static void vseti(int r, int v)
712 CType type;
713 type.t = VT_INT;
714 type.ref = 0;
715 vset(&type, r, v);
718 ST_FUNC void vpushv(SValue *v)
720 if (vtop >= vstack + (VSTACK_SIZE - 1))
721 tcc_error("memory full (vstack)");
722 vtop++;
723 *vtop = *v;
726 static void vdup(void)
728 vpushv(vtop);
731 /* rotate n first stack elements to the bottom
732 I1 ... In -> I2 ... In I1 [top is right]
734 ST_FUNC void vrotb(int n)
736 int i;
737 SValue tmp;
739 tmp = vtop[-n + 1];
740 for(i=-n+1;i!=0;i++)
741 vtop[i] = vtop[i+1];
742 vtop[0] = tmp;
745 /* rotate the n elements before entry e towards the top
746 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
748 ST_FUNC void vrote(SValue *e, int n)
750 int i;
751 SValue tmp;
753 tmp = *e;
754 for(i = 0;i < n - 1; i++)
755 e[-i] = e[-i - 1];
756 e[-n + 1] = tmp;
759 /* rotate n first stack elements to the top
760 I1 ... In -> In I1 ... I(n-1) [top is right]
762 ST_FUNC void vrott(int n)
764 vrote(vtop, n);
767 /* push a symbol value of TYPE */
768 static inline void vpushsym(CType *type, Sym *sym)
770 CValue cval;
771 cval.i = 0;
772 vsetc(type, VT_CONST | VT_SYM, &cval);
773 vtop->sym = sym;
776 /* Return a static symbol pointing to a section */
777 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
779 int v;
780 Sym *sym;
782 v = anon_sym++;
783 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
784 sym->type.ref = type->ref;
785 sym->r = VT_CONST | VT_SYM;
786 put_extern_sym(sym, sec, offset, size);
787 return sym;
790 /* push a reference to a section offset by adding a dummy symbol */
791 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
793 vpushsym(type, get_sym_ref(type, sec, offset, size));
796 /* define a new external reference to a symbol 'v' of type 'u' */
797 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
799 Sym *s;
801 s = sym_find(v);
802 if (!s) {
803 /* push forward reference */
804 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
805 s->type.ref = type->ref;
806 s->r = r | VT_CONST | VT_SYM;
808 return s;
811 /* Merge some storage attributes. */
812 static void patch_storage(Sym *sym, CType *type)
814 int t;
815 if (!is_compatible_types(&sym->type, type))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym->v, NULL));
818 t = type->t;
819 #ifdef TCC_TARGET_PE
820 if ((sym->type.t ^ t) & VT_IMPORT)
821 tcc_error("incompatible dll linkage for redefinition of '%s'",
822 get_tok_str(sym->v, NULL));
823 #endif
824 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
825 if (t & VT_VIS_MASK) {
826 int vis = sym->type.t & VT_VIS_MASK;
827 int vis2 = t & VT_VIS_MASK;
828 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
829 vis = vis2;
830 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
831 vis = (vis < vis2) ? vis : vis2;
832 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
836 /* define a new external reference to a symbol 'v' */
837 static Sym *external_sym(int v, CType *type, int r)
839 Sym *s;
840 s = sym_find(v);
841 if (!s) {
842 /* push forward reference */
843 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
844 s->type.t |= VT_EXTERN;
845 } else {
846 if (s->type.ref == func_old_type.ref) {
847 s->type.ref = type->ref;
848 s->r = r | VT_CONST | VT_SYM;
849 s->type.t |= VT_EXTERN;
851 patch_storage(s, type);
852 update_storage(s);
854 return s;
857 /* push a reference to global symbol v */
858 ST_FUNC void vpush_global_sym(CType *type, int v)
860 vpushsym(type, external_global_sym(v, type, 0));
863 /* save registers up to (vtop - n) stack entry */
864 ST_FUNC void save_regs(int n)
866 SValue *p, *p1;
867 for(p = vstack, p1 = vtop - n; p <= p1; p++)
868 save_reg(p->r);
871 /* save r to the memory stack, and mark it as being free */
872 ST_FUNC void save_reg(int r)
874 save_reg_upstack(r, 0);
877 /* save r to the memory stack, and mark it as being free,
878 if seen up to (vtop - n) stack entry */
879 ST_FUNC void save_reg_upstack(int r, int n)
881 int l, saved, size, align;
882 SValue *p, *p1, sv;
883 CType *type;
885 if ((r &= VT_VALMASK) >= VT_CONST)
886 return;
887 if (nocode_wanted)
888 return;
890 /* modify all stack values */
891 saved = 0;
892 l = 0;
893 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
894 if ((p->r & VT_VALMASK) == r ||
895 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
896 /* must save value on stack if not already done */
897 if (!saved) {
898 /* NOTE: must reload 'r' because r might be equal to r2 */
899 r = p->r & VT_VALMASK;
900 /* store register in the stack */
901 type = &p->type;
902 if ((p->r & VT_LVAL) ||
903 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
904 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
905 type = &char_pointer_type;
906 #else
907 type = &int_type;
908 #endif
909 if ((type->t & VT_BTYPE) == VT_FLOAT) {
910 /* cast to DOUBLE to avoid precision loss */
911 type->t = (type->t & ~VT_BTYPE) | VT_DOUBLE;
913 size = type_size(type, &align);
914 loc = (loc - size) & -align;
915 sv.type.t = type->t;
916 sv.r = VT_LOCAL | VT_LVAL;
917 sv.c.i = loc;
918 store(r, &sv);
919 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
920 /* x86 specific: need to pop fp register ST0 if saved */
921 if (r == TREG_ST0) {
922 o(0xd8dd); /* fstp %st(0) */
924 #endif
925 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
926 /* special long long case */
927 if ((type->t & VT_BTYPE) == VT_LLONG) {
928 sv.c.i += 4;
929 store(p->r2, &sv);
931 #endif
932 l = loc;
933 saved = 1;
935 /* mark that stack entry as being saved on the stack */
936 if (p->r & VT_LVAL) {
937 /* also clear the bounded flag because the
938 relocation address of the function was stored in
939 p->c.i */
940 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
941 } else {
942 p->r = lvalue_type(p->type.t) | VT_LOCAL;
944 p->r2 = VT_CONST;
945 p->c.i = l;
950 #ifdef TCC_TARGET_ARM
951 /* find a register of class 'rc2' with at most one reference on stack.
952 * If none, call get_reg(rc) */
953 ST_FUNC int get_reg_ex(int rc, int rc2)
955 int r;
956 SValue *p;
958 for(r=0;r<NB_REGS;r++) {
959 if (reg_classes[r] & rc2) {
960 int n;
961 n=0;
962 for(p = vstack; p <= vtop; p++) {
963 if ((p->r & VT_VALMASK) == r ||
964 (p->r2 & VT_VALMASK) == r)
965 n++;
967 if (n <= 1)
968 return r;
971 return get_reg(rc);
973 #endif
975 /* find a free register of class 'rc'. If none, save one register */
976 ST_FUNC int get_reg(int rc)
978 int r;
979 SValue *p;
981 /* find a free register */
982 for(r=0;r<NB_REGS;r++) {
983 if (reg_classes[r] & rc) {
984 if (nocode_wanted)
985 return r;
986 for(p=vstack;p<=vtop;p++) {
987 if ((p->r & VT_VALMASK) == r ||
988 (p->r2 & VT_VALMASK) == r)
989 goto notfound;
991 return r;
993 notfound: ;
996 /* no register left : free the first one on the stack (VERY
997 IMPORTANT to start from the bottom to ensure that we don't
998 spill registers used in gen_opi()) */
999 for(p=vstack;p<=vtop;p++) {
1000 /* look at second register (if long long) */
1001 r = p->r2 & VT_VALMASK;
1002 if (r < VT_CONST && (reg_classes[r] & rc))
1003 goto save_found;
1004 r = p->r & VT_VALMASK;
1005 if (r < VT_CONST && (reg_classes[r] & rc)) {
1006 save_found:
1007 save_reg(r);
1008 return r;
1011 /* Should never comes here */
1012 return -1;
1015 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1016 if needed */
1017 static void move_reg(int r, int s, int t)
1019 SValue sv;
1021 if (r != s) {
1022 save_reg(r);
1023 sv.type.t = t;
1024 sv.type.ref = NULL;
1025 sv.r = s;
1026 sv.c.i = 0;
1027 load(r, &sv);
1031 /* get address of vtop (vtop MUST BE an lvalue) */
1032 ST_FUNC void gaddrof(void)
1034 if (vtop->r & VT_REF)
1035 gv(RC_INT);
1036 vtop->r &= ~VT_LVAL;
1037 /* tricky: if saved lvalue, then we can go back to lvalue */
1038 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1039 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1044 #ifdef CONFIG_TCC_BCHECK
1045 /* generate lvalue bound code */
1046 static void gbound(void)
1048 int lval_type;
1049 CType type1;
1051 vtop->r &= ~VT_MUSTBOUND;
1052 /* if lvalue, then use checking code before dereferencing */
1053 if (vtop->r & VT_LVAL) {
1054 /* if not VT_BOUNDED value, then make one */
1055 if (!(vtop->r & VT_BOUNDED)) {
1056 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1057 /* must save type because we must set it to int to get pointer */
1058 type1 = vtop->type;
1059 vtop->type.t = VT_PTR;
1060 gaddrof();
1061 vpushi(0);
1062 gen_bounded_ptr_add();
1063 vtop->r |= lval_type;
1064 vtop->type = type1;
1066 /* then check for dereferencing */
1067 gen_bounded_ptr_deref();
1070 #endif
1072 /* store vtop a register belonging to class 'rc'. lvalues are
1073 converted to values. Cannot be used if cannot be converted to
1074 register value (such as structures). */
1075 ST_FUNC int gv(int rc)
1077 int r, bit_pos, bit_size, size, align, i;
1078 int rc2;
1080 /* NOTE: get_reg can modify vstack[] */
1081 if (vtop->type.t & VT_BITFIELD) {
1082 CType type;
1083 int bits = 32;
1084 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1085 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1086 /* remove bit field info to avoid loops */
1087 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1088 /* cast to int to propagate signedness in following ops */
1089 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1090 type.t = VT_LLONG;
1091 bits = 64;
1092 } else
1093 type.t = VT_INT;
1094 if((vtop->type.t & VT_UNSIGNED) ||
1095 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1096 type.t |= VT_UNSIGNED;
1097 gen_cast(&type);
1098 /* generate shifts */
1099 vpushi(bits - (bit_pos + bit_size));
1100 gen_op(TOK_SHL);
1101 vpushi(bits - bit_size);
1102 /* NOTE: transformed to SHR if unsigned */
1103 gen_op(TOK_SAR);
1104 r = gv(rc);
1105 } else {
1106 if (is_float(vtop->type.t) &&
1107 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1108 Sym *sym;
1109 int *ptr;
1110 unsigned long offset;
1111 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1112 CValue check;
1113 #endif
1115 /* XXX: unify with initializers handling ? */
1116 /* CPUs usually cannot use float constants, so we store them
1117 generically in data segment */
1118 size = type_size(&vtop->type, &align);
1119 offset = (data_section->data_offset + align - 1) & -align;
1120 data_section->data_offset = offset;
1121 /* XXX: not portable yet */
1122 #if defined(__i386__) || defined(__x86_64__)
1123 /* Zero pad x87 tenbyte long doubles */
1124 if (size == LDOUBLE_SIZE) {
1125 vtop->c.tab[2] &= 0xffff;
1126 #if LDOUBLE_SIZE == 16
1127 vtop->c.tab[3] = 0;
1128 #endif
1130 #endif
1131 ptr = section_ptr_add(data_section, size);
1132 size = size >> 2;
1133 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1134 check.d = 1;
1135 if(check.tab[0])
1136 for(i=0;i<size;i++)
1137 ptr[i] = vtop->c.tab[size-1-i];
1138 else
1139 #endif
1140 for(i=0;i<size;i++)
1141 ptr[i] = vtop->c.tab[i];
1142 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1143 vtop->r |= VT_LVAL | VT_SYM;
1144 vtop->sym = sym;
1145 vtop->c.i = 0;
1147 #ifdef CONFIG_TCC_BCHECK
1148 if (vtop->r & VT_MUSTBOUND)
1149 gbound();
1150 #endif
1152 r = vtop->r & VT_VALMASK;
1153 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1154 #ifndef TCC_TARGET_ARM64
1155 if (rc == RC_IRET)
1156 rc2 = RC_LRET;
1157 #ifdef TCC_TARGET_X86_64
1158 else if (rc == RC_FRET)
1159 rc2 = RC_QRET;
1160 #endif
1161 #endif
1162 /* need to reload if:
1163 - constant
1164 - lvalue (need to dereference pointer)
1165 - already a register, but not in the right class */
1166 if (r >= VT_CONST
1167 || (vtop->r & VT_LVAL)
1168 || !(reg_classes[r] & rc)
1169 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1170 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1171 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1172 #else
1173 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1174 #endif
1177 r = get_reg(rc);
1178 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1179 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1180 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1181 #else
1182 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1183 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1184 unsigned long long ll;
1185 #endif
1186 int r2, original_type;
1187 original_type = vtop->type.t;
1188 /* two register type load : expand to two words
1189 temporarily */
1190 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1191 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1192 /* load constant */
1193 ll = vtop->c.i;
1194 vtop->c.i = ll; /* first word */
1195 load(r, vtop);
1196 vtop->r = r; /* save register value */
1197 vpushi(ll >> 32); /* second word */
1198 } else
1199 #endif
1200 if (vtop->r & VT_LVAL) {
1201 /* We do not want to modifier the long long
1202 pointer here, so the safest (and less
1203 efficient) is to save all the other registers
1204 in the stack. XXX: totally inefficient. */
1205 #if 0
1206 save_regs(1);
1207 #else
1208 /* lvalue_save: save only if used further down the stack */
1209 save_reg_upstack(vtop->r, 1);
1210 #endif
1211 /* load from memory */
1212 vtop->type.t = load_type;
1213 load(r, vtop);
1214 vdup();
1215 vtop[-1].r = r; /* save register value */
1216 /* increment pointer to get second word */
1217 vtop->type.t = addr_type;
1218 gaddrof();
1219 vpushi(load_size);
1220 gen_op('+');
1221 vtop->r |= VT_LVAL;
1222 vtop->type.t = load_type;
1223 } else {
1224 /* move registers */
1225 load(r, vtop);
1226 vdup();
1227 vtop[-1].r = r; /* save register value */
1228 vtop->r = vtop[-1].r2;
1230 /* Allocate second register. Here we rely on the fact that
1231 get_reg() tries first to free r2 of an SValue. */
1232 r2 = get_reg(rc2);
1233 load(r2, vtop);
1234 vpop();
1235 /* write second register */
1236 vtop->r2 = r2;
1237 vtop->type.t = original_type;
1238 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1239 int t1, t;
1240 /* lvalue of scalar type : need to use lvalue type
1241 because of possible cast */
1242 t = vtop->type.t;
1243 t1 = t;
1244 /* compute memory access type */
1245 if (vtop->r & VT_REF)
1246 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1247 t = VT_PTR;
1248 #else
1249 t = VT_INT;
1250 #endif
1251 else if (vtop->r & VT_LVAL_BYTE)
1252 t = VT_BYTE;
1253 else if (vtop->r & VT_LVAL_SHORT)
1254 t = VT_SHORT;
1255 if (vtop->r & VT_LVAL_UNSIGNED)
1256 t |= VT_UNSIGNED;
1257 vtop->type.t = t;
1258 load(r, vtop);
1259 /* restore wanted type */
1260 vtop->type.t = t1;
1261 } else {
1262 /* one register type load */
1263 load(r, vtop);
1266 vtop->r = r;
1267 #ifdef TCC_TARGET_C67
1268 /* uses register pairs for doubles */
1269 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1270 vtop->r2 = r+1;
1271 #endif
1273 return r;
1276 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1277 ST_FUNC void gv2(int rc1, int rc2)
1279 int v;
1281 /* generate more generic register first. But VT_JMP or VT_CMP
1282 values must be generated first in all cases to avoid possible
1283 reload errors */
1284 v = vtop[0].r & VT_VALMASK;
1285 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1286 vswap();
1287 gv(rc1);
1288 vswap();
1289 gv(rc2);
1290 /* test if reload is needed for first register */
1291 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1292 vswap();
1293 gv(rc1);
1294 vswap();
1296 } else {
1297 gv(rc2);
1298 vswap();
1299 gv(rc1);
1300 vswap();
1301 /* test if reload is needed for first register */
1302 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1303 gv(rc2);
1308 #ifndef TCC_TARGET_ARM64
1309 /* wrapper around RC_FRET to return a register by type */
1310 static int rc_fret(int t)
1312 #ifdef TCC_TARGET_X86_64
1313 if (t == VT_LDOUBLE) {
1314 return RC_ST0;
1316 #endif
1317 return RC_FRET;
1319 #endif
1321 /* wrapper around REG_FRET to return a register by type */
1322 static int reg_fret(int t)
1324 #ifdef TCC_TARGET_X86_64
1325 if (t == VT_LDOUBLE) {
1326 return TREG_ST0;
1328 #endif
1329 return REG_FRET;
1332 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1333 /* expand 64bit on stack in two ints */
1334 static void lexpand(void)
1336 int u, v;
1337 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1338 v = vtop->r & (VT_VALMASK | VT_LVAL);
1339 if (v == VT_CONST) {
1340 vdup();
1341 vtop[0].c.i >>= 32;
1342 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1343 vdup();
1344 vtop[0].c.i += 4;
1345 } else {
1346 gv(RC_INT);
1347 vdup();
1348 vtop[0].r = vtop[-1].r2;
1349 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1351 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1353 #endif
1355 #ifdef TCC_TARGET_ARM
1356 /* expand long long on stack */
1357 ST_FUNC void lexpand_nr(void)
1359 int u,v;
1361 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1362 vdup();
1363 vtop->r2 = VT_CONST;
1364 vtop->type.t = VT_INT | u;
1365 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1366 if (v == VT_CONST) {
1367 vtop[-1].c.i = vtop->c.i;
1368 vtop->c.i = vtop->c.i >> 32;
1369 vtop->r = VT_CONST;
1370 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1371 vtop->c.i += 4;
1372 vtop->r = vtop[-1].r;
1373 } else if (v > VT_CONST) {
1374 vtop--;
1375 lexpand();
1376 } else
1377 vtop->r = vtop[-1].r2;
1378 vtop[-1].r2 = VT_CONST;
1379 vtop[-1].type.t = VT_INT | u;
1381 #endif
1383 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1384 /* build a long long from two ints */
1385 static void lbuild(int t)
1387 gv2(RC_INT, RC_INT);
1388 vtop[-1].r2 = vtop[0].r;
1389 vtop[-1].type.t = t;
1390 vpop();
1392 #endif
1394 /* convert stack entry to register and duplicate its value in another
1395 register */
1396 static void gv_dup(void)
1398 int rc, t, r, r1;
1399 SValue sv;
1401 t = vtop->type.t;
1402 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1403 if ((t & VT_BTYPE) == VT_LLONG) {
1404 lexpand();
1405 gv_dup();
1406 vswap();
1407 vrotb(3);
1408 gv_dup();
1409 vrotb(4);
1410 /* stack: H L L1 H1 */
1411 lbuild(t);
1412 vrotb(3);
1413 vrotb(3);
1414 vswap();
1415 lbuild(t);
1416 vswap();
1417 } else
1418 #endif
1420 /* duplicate value */
1421 rc = RC_INT;
1422 sv.type.t = VT_INT;
1423 if (is_float(t)) {
1424 rc = RC_FLOAT;
1425 #ifdef TCC_TARGET_X86_64
1426 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1427 rc = RC_ST0;
1429 #endif
1430 sv.type.t = t;
1432 r = gv(rc);
1433 r1 = get_reg(rc);
1434 sv.r = r;
1435 sv.c.i = 0;
1436 load(r1, &sv); /* move r to r1 */
1437 vdup();
1438 /* duplicates value */
1439 if (r != r1)
1440 vtop->r = r1;
1444 /* Generate value test
1446 * Generate a test for any value (jump, comparison and integers) */
1447 ST_FUNC int gvtst(int inv, int t)
1449 int v = vtop->r & VT_VALMASK;
1450 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1451 vpushi(0);
1452 gen_op(TOK_NE);
1454 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1455 /* constant jmp optimization */
1456 if ((vtop->c.i != 0) != inv)
1457 t = gjmp(t);
1458 vtop--;
1459 return t;
1461 return gtst(inv, t);
1464 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1465 /* generate CPU independent (unsigned) long long operations */
1466 static void gen_opl(int op)
1468 int t, a, b, op1, c, i;
1469 int func;
1470 unsigned short reg_iret = REG_IRET;
1471 unsigned short reg_lret = REG_LRET;
1472 SValue tmp;
1474 switch(op) {
1475 case '/':
1476 case TOK_PDIV:
1477 func = TOK___divdi3;
1478 goto gen_func;
1479 case TOK_UDIV:
1480 func = TOK___udivdi3;
1481 goto gen_func;
1482 case '%':
1483 func = TOK___moddi3;
1484 goto gen_mod_func;
1485 case TOK_UMOD:
1486 func = TOK___umoddi3;
1487 gen_mod_func:
1488 #ifdef TCC_ARM_EABI
1489 reg_iret = TREG_R2;
1490 reg_lret = TREG_R3;
1491 #endif
1492 gen_func:
1493 /* call generic long long function */
1494 vpush_global_sym(&func_old_type, func);
1495 vrott(3);
1496 gfunc_call(2);
1497 vpushi(0);
1498 vtop->r = reg_iret;
1499 vtop->r2 = reg_lret;
1500 break;
1501 case '^':
1502 case '&':
1503 case '|':
1504 case '*':
1505 case '+':
1506 case '-':
1507 //pv("gen_opl A",0,2);
1508 t = vtop->type.t;
1509 vswap();
1510 lexpand();
1511 vrotb(3);
1512 lexpand();
1513 /* stack: L1 H1 L2 H2 */
1514 tmp = vtop[0];
1515 vtop[0] = vtop[-3];
1516 vtop[-3] = tmp;
1517 tmp = vtop[-2];
1518 vtop[-2] = vtop[-3];
1519 vtop[-3] = tmp;
1520 vswap();
1521 /* stack: H1 H2 L1 L2 */
1522 //pv("gen_opl B",0,4);
1523 if (op == '*') {
1524 vpushv(vtop - 1);
1525 vpushv(vtop - 1);
1526 gen_op(TOK_UMULL);
1527 lexpand();
1528 /* stack: H1 H2 L1 L2 ML MH */
1529 for(i=0;i<4;i++)
1530 vrotb(6);
1531 /* stack: ML MH H1 H2 L1 L2 */
1532 tmp = vtop[0];
1533 vtop[0] = vtop[-2];
1534 vtop[-2] = tmp;
1535 /* stack: ML MH H1 L2 H2 L1 */
1536 gen_op('*');
1537 vrotb(3);
1538 vrotb(3);
1539 gen_op('*');
1540 /* stack: ML MH M1 M2 */
1541 gen_op('+');
1542 gen_op('+');
1543 } else if (op == '+' || op == '-') {
1544 /* XXX: add non carry method too (for MIPS or alpha) */
1545 if (op == '+')
1546 op1 = TOK_ADDC1;
1547 else
1548 op1 = TOK_SUBC1;
1549 gen_op(op1);
1550 /* stack: H1 H2 (L1 op L2) */
1551 vrotb(3);
1552 vrotb(3);
1553 gen_op(op1 + 1); /* TOK_xxxC2 */
1554 } else {
1555 gen_op(op);
1556 /* stack: H1 H2 (L1 op L2) */
1557 vrotb(3);
1558 vrotb(3);
1559 /* stack: (L1 op L2) H1 H2 */
1560 gen_op(op);
1561 /* stack: (L1 op L2) (H1 op H2) */
1563 /* stack: L H */
1564 lbuild(t);
1565 break;
1566 case TOK_SAR:
1567 case TOK_SHR:
1568 case TOK_SHL:
1569 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1570 t = vtop[-1].type.t;
1571 vswap();
1572 lexpand();
1573 vrotb(3);
1574 /* stack: L H shift */
1575 c = (int)vtop->c.i;
1576 /* constant: simpler */
1577 /* NOTE: all comments are for SHL. the other cases are
1578 done by swaping words */
1579 vpop();
1580 if (op != TOK_SHL)
1581 vswap();
1582 if (c >= 32) {
1583 /* stack: L H */
1584 vpop();
1585 if (c > 32) {
1586 vpushi(c - 32);
1587 gen_op(op);
1589 if (op != TOK_SAR) {
1590 vpushi(0);
1591 } else {
1592 gv_dup();
1593 vpushi(31);
1594 gen_op(TOK_SAR);
1596 vswap();
1597 } else {
1598 vswap();
1599 gv_dup();
1600 /* stack: H L L */
1601 vpushi(c);
1602 gen_op(op);
1603 vswap();
1604 vpushi(32 - c);
1605 if (op == TOK_SHL)
1606 gen_op(TOK_SHR);
1607 else
1608 gen_op(TOK_SHL);
1609 vrotb(3);
1610 /* stack: L L H */
1611 vpushi(c);
1612 if (op == TOK_SHL)
1613 gen_op(TOK_SHL);
1614 else
1615 gen_op(TOK_SHR);
1616 gen_op('|');
1618 if (op != TOK_SHL)
1619 vswap();
1620 lbuild(t);
1621 } else {
1622 /* XXX: should provide a faster fallback on x86 ? */
1623 switch(op) {
1624 case TOK_SAR:
1625 func = TOK___ashrdi3;
1626 goto gen_func;
1627 case TOK_SHR:
1628 func = TOK___lshrdi3;
1629 goto gen_func;
1630 case TOK_SHL:
1631 func = TOK___ashldi3;
1632 goto gen_func;
1635 break;
1636 default:
1637 /* compare operations */
1638 t = vtop->type.t;
1639 vswap();
1640 lexpand();
1641 vrotb(3);
1642 lexpand();
1643 /* stack: L1 H1 L2 H2 */
1644 tmp = vtop[-1];
1645 vtop[-1] = vtop[-2];
1646 vtop[-2] = tmp;
1647 /* stack: L1 L2 H1 H2 */
1648 /* compare high */
1649 op1 = op;
1650 /* when values are equal, we need to compare low words. since
1651 the jump is inverted, we invert the test too. */
1652 if (op1 == TOK_LT)
1653 op1 = TOK_LE;
1654 else if (op1 == TOK_GT)
1655 op1 = TOK_GE;
1656 else if (op1 == TOK_ULT)
1657 op1 = TOK_ULE;
1658 else if (op1 == TOK_UGT)
1659 op1 = TOK_UGE;
1660 a = 0;
1661 b = 0;
1662 gen_op(op1);
1663 if (op == TOK_NE) {
1664 b = gvtst(0, 0);
1665 } else {
1666 a = gvtst(1, 0);
1667 if (op != TOK_EQ) {
1668 /* generate non equal test */
1669 vpushi(TOK_NE);
1670 vtop->r = VT_CMP;
1671 b = gvtst(0, 0);
1674 /* compare low. Always unsigned */
1675 op1 = op;
1676 if (op1 == TOK_LT)
1677 op1 = TOK_ULT;
1678 else if (op1 == TOK_LE)
1679 op1 = TOK_ULE;
1680 else if (op1 == TOK_GT)
1681 op1 = TOK_UGT;
1682 else if (op1 == TOK_GE)
1683 op1 = TOK_UGE;
1684 gen_op(op1);
1685 a = gvtst(1, a);
1686 gsym(b);
1687 vseti(VT_JMPI, a);
1688 break;
1691 #endif
1693 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1695 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1696 return (a ^ b) >> 63 ? -x : x;
1699 static int gen_opic_lt(uint64_t a, uint64_t b)
1701 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1704 /* handle integer constant optimizations and various machine
1705 independent opt */
1706 static void gen_opic(int op)
1708 SValue *v1 = vtop - 1;
1709 SValue *v2 = vtop;
1710 int t1 = v1->type.t & VT_BTYPE;
1711 int t2 = v2->type.t & VT_BTYPE;
1712 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1713 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1714 uint64_t l1 = c1 ? v1->c.i : 0;
1715 uint64_t l2 = c2 ? v2->c.i : 0;
1716 int shm = (t1 == VT_LLONG) ? 63 : 31;
1718 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1719 l1 = ((uint32_t)l1 |
1720 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1721 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1722 l2 = ((uint32_t)l2 |
1723 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1725 if (c1 && c2) {
1726 switch(op) {
1727 case '+': l1 += l2; break;
1728 case '-': l1 -= l2; break;
1729 case '&': l1 &= l2; break;
1730 case '^': l1 ^= l2; break;
1731 case '|': l1 |= l2; break;
1732 case '*': l1 *= l2; break;
1734 case TOK_PDIV:
1735 case '/':
1736 case '%':
1737 case TOK_UDIV:
1738 case TOK_UMOD:
1739 /* if division by zero, generate explicit division */
1740 if (l2 == 0) {
1741 if (const_wanted)
1742 tcc_error("division by zero in constant");
1743 goto general_case;
1745 switch(op) {
1746 default: l1 = gen_opic_sdiv(l1, l2); break;
1747 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1748 case TOK_UDIV: l1 = l1 / l2; break;
1749 case TOK_UMOD: l1 = l1 % l2; break;
1751 break;
1752 case TOK_SHL: l1 <<= (l2 & shm); break;
1753 case TOK_SHR: l1 >>= (l2 & shm); break;
1754 case TOK_SAR:
1755 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1756 break;
1757 /* tests */
1758 case TOK_ULT: l1 = l1 < l2; break;
1759 case TOK_UGE: l1 = l1 >= l2; break;
1760 case TOK_EQ: l1 = l1 == l2; break;
1761 case TOK_NE: l1 = l1 != l2; break;
1762 case TOK_ULE: l1 = l1 <= l2; break;
1763 case TOK_UGT: l1 = l1 > l2; break;
1764 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1765 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1766 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1767 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1768 /* logical */
1769 case TOK_LAND: l1 = l1 && l2; break;
1770 case TOK_LOR: l1 = l1 || l2; break;
1771 default:
1772 goto general_case;
1774 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1775 l1 = ((uint32_t)l1 |
1776 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1777 v1->c.i = l1;
1778 vtop--;
1779 } else {
1780 /* if commutative ops, put c2 as constant */
1781 if (c1 && (op == '+' || op == '&' || op == '^' ||
1782 op == '|' || op == '*')) {
1783 vswap();
1784 c2 = c1; //c = c1, c1 = c2, c2 = c;
1785 l2 = l1; //l = l1, l1 = l2, l2 = l;
1787 if (!const_wanted &&
1788 c1 && ((l1 == 0 &&
1789 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1790 (l1 == -1 && op == TOK_SAR))) {
1791 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1792 vtop--;
1793 } else if (!const_wanted &&
1794 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1795 (l2 == -1 && op == '|') ||
1796 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1797 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1798 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1799 if (l2 == 1)
1800 vtop->c.i = 0;
1801 vswap();
1802 vtop--;
1803 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1804 op == TOK_PDIV) &&
1805 l2 == 1) ||
1806 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1807 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1808 l2 == 0) ||
1809 (op == '&' &&
1810 l2 == -1))) {
1811 /* filter out NOP operations like x*1, x-0, x&-1... */
1812 vtop--;
1813 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1814 /* try to use shifts instead of muls or divs */
1815 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1816 int n = -1;
1817 while (l2) {
1818 l2 >>= 1;
1819 n++;
1821 vtop->c.i = n;
1822 if (op == '*')
1823 op = TOK_SHL;
1824 else if (op == TOK_PDIV)
1825 op = TOK_SAR;
1826 else
1827 op = TOK_SHR;
1829 goto general_case;
1830 } else if (c2 && (op == '+' || op == '-') &&
1831 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1832 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1833 /* symbol + constant case */
1834 if (op == '-')
1835 l2 = -l2;
1836 l2 += vtop[-1].c.i;
1837 /* The backends can't always deal with addends to symbols
1838 larger than +-1<<31. Don't construct such. */
1839 if ((int)l2 != l2)
1840 goto general_case;
1841 vtop--;
1842 vtop->c.i = l2;
1843 } else {
1844 general_case:
1845 /* call low level op generator */
1846 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1847 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1848 gen_opl(op);
1849 else
1850 gen_opi(op);
1855 /* generate a floating point operation with constant propagation */
1856 static void gen_opif(int op)
1858 int c1, c2;
1859 SValue *v1, *v2;
1860 long double f1, f2;
1862 v1 = vtop - 1;
1863 v2 = vtop;
1864 /* currently, we cannot do computations with forward symbols */
1865 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1866 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1867 if (c1 && c2) {
1868 if (v1->type.t == VT_FLOAT) {
1869 f1 = v1->c.f;
1870 f2 = v2->c.f;
1871 } else if (v1->type.t == VT_DOUBLE) {
1872 f1 = v1->c.d;
1873 f2 = v2->c.d;
1874 } else {
1875 f1 = v1->c.ld;
1876 f2 = v2->c.ld;
1879 /* NOTE: we only do constant propagation if finite number (not
1880 NaN or infinity) (ANSI spec) */
1881 if (!ieee_finite(f1) || !ieee_finite(f2))
1882 goto general_case;
1884 switch(op) {
1885 case '+': f1 += f2; break;
1886 case '-': f1 -= f2; break;
1887 case '*': f1 *= f2; break;
1888 case '/':
1889 if (f2 == 0.0) {
1890 if (const_wanted)
1891 tcc_error("division by zero in constant");
1892 goto general_case;
1894 f1 /= f2;
1895 break;
1896 /* XXX: also handles tests ? */
1897 default:
1898 goto general_case;
1900 /* XXX: overflow test ? */
1901 if (v1->type.t == VT_FLOAT) {
1902 v1->c.f = f1;
1903 } else if (v1->type.t == VT_DOUBLE) {
1904 v1->c.d = f1;
1905 } else {
1906 v1->c.ld = f1;
1908 vtop--;
1909 } else {
1910 general_case:
1911 gen_opf(op);
1915 static int pointed_size(CType *type)
1917 int align;
1918 return type_size(pointed_type(type), &align);
1921 static void vla_runtime_pointed_size(CType *type)
1923 int align;
1924 vla_runtime_type_size(pointed_type(type), &align);
1927 static inline int is_null_pointer(SValue *p)
1929 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1930 return 0;
1931 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1932 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1933 ((p->type.t & VT_BTYPE) == VT_PTR &&
1934 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1937 static inline int is_integer_btype(int bt)
1939 return (bt == VT_BYTE || bt == VT_SHORT ||
1940 bt == VT_INT || bt == VT_LLONG);
1943 /* check types for comparison or subtraction of pointers */
1944 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1946 CType *type1, *type2, tmp_type1, tmp_type2;
1947 int bt1, bt2;
1949 /* null pointers are accepted for all comparisons as gcc */
1950 if (is_null_pointer(p1) || is_null_pointer(p2))
1951 return;
1952 type1 = &p1->type;
1953 type2 = &p2->type;
1954 bt1 = type1->t & VT_BTYPE;
1955 bt2 = type2->t & VT_BTYPE;
1956 /* accept comparison between pointer and integer with a warning */
1957 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1958 if (op != TOK_LOR && op != TOK_LAND )
1959 tcc_warning("comparison between pointer and integer");
1960 return;
1963 /* both must be pointers or implicit function pointers */
1964 if (bt1 == VT_PTR) {
1965 type1 = pointed_type(type1);
1966 } else if (bt1 != VT_FUNC)
1967 goto invalid_operands;
1969 if (bt2 == VT_PTR) {
1970 type2 = pointed_type(type2);
1971 } else if (bt2 != VT_FUNC) {
1972 invalid_operands:
1973 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1975 if ((type1->t & VT_BTYPE) == VT_VOID ||
1976 (type2->t & VT_BTYPE) == VT_VOID)
1977 return;
1978 tmp_type1 = *type1;
1979 tmp_type2 = *type2;
1980 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1981 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1982 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1983 /* gcc-like error if '-' is used */
1984 if (op == '-')
1985 goto invalid_operands;
1986 else
1987 tcc_warning("comparison of distinct pointer types lacks a cast");
1991 /* generic gen_op: handles types problems */
1992 ST_FUNC void gen_op(int op)
1994 int u, t1, t2, bt1, bt2, t;
1995 CType type1;
1997 redo:
1998 t1 = vtop[-1].type.t;
1999 t2 = vtop[0].type.t;
2000 bt1 = t1 & VT_BTYPE;
2001 bt2 = t2 & VT_BTYPE;
2003 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2004 tcc_error("operation on a struct");
2005 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2006 if (bt2 == VT_FUNC) {
2007 mk_pointer(&vtop->type);
2008 gaddrof();
2010 if (bt1 == VT_FUNC) {
2011 vswap();
2012 mk_pointer(&vtop->type);
2013 gaddrof();
2014 vswap();
2016 goto redo;
2017 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2018 /* at least one operand is a pointer */
2019 /* relationnal op: must be both pointers */
2020 if (op >= TOK_ULT && op <= TOK_LOR) {
2021 check_comparison_pointer_types(vtop - 1, vtop, op);
2022 /* pointers are handled are unsigned */
2023 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2024 t = VT_LLONG | VT_UNSIGNED;
2025 #else
2026 t = VT_INT | VT_UNSIGNED;
2027 #endif
2028 goto std_op;
2030 /* if both pointers, then it must be the '-' op */
2031 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2032 if (op != '-')
2033 tcc_error("cannot use pointers here");
2034 check_comparison_pointer_types(vtop - 1, vtop, op);
2035 /* XXX: check that types are compatible */
2036 if (vtop[-1].type.t & VT_VLA) {
2037 vla_runtime_pointed_size(&vtop[-1].type);
2038 } else {
2039 vpushi(pointed_size(&vtop[-1].type));
2041 vrott(3);
2042 gen_opic(op);
2043 /* set to integer type */
2044 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2045 vtop->type.t = VT_LLONG;
2046 #else
2047 vtop->type.t = VT_INT;
2048 #endif
2049 vswap();
2050 gen_op(TOK_PDIV);
2051 } else {
2052 /* exactly one pointer : must be '+' or '-'. */
2053 if (op != '-' && op != '+')
2054 tcc_error("cannot use pointers here");
2055 /* Put pointer as first operand */
2056 if (bt2 == VT_PTR) {
2057 vswap();
2058 t = t1, t1 = t2, t2 = t;
2060 #if PTR_SIZE == 4
2061 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2062 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2063 gen_cast(&int_type);
2064 #endif
2065 type1 = vtop[-1].type;
2066 type1.t &= ~VT_ARRAY;
2067 if (vtop[-1].type.t & VT_VLA)
2068 vla_runtime_pointed_size(&vtop[-1].type);
2069 else {
2070 u = pointed_size(&vtop[-1].type);
2071 if (u < 0)
2072 tcc_error("unknown array element size");
2073 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2074 vpushll(u);
2075 #else
2076 /* XXX: cast to int ? (long long case) */
2077 vpushi(u);
2078 #endif
2080 gen_op('*');
2081 #if 0
2082 /* #ifdef CONFIG_TCC_BCHECK
2083 The main reason to removing this code:
2084 #include <stdio.h>
2085 int main ()
2087 int v[10];
2088 int i = 10;
2089 int j = 9;
2090 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2091 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2093 When this code is on. then the output looks like
2094 v+i-j = 0xfffffffe
2095 v+(i-j) = 0xbff84000
2097 /* if evaluating constant expression, no code should be
2098 generated, so no bound check */
2099 if (tcc_state->do_bounds_check && !const_wanted) {
2100 /* if bounded pointers, we generate a special code to
2101 test bounds */
2102 if (op == '-') {
2103 vpushi(0);
2104 vswap();
2105 gen_op('-');
2107 gen_bounded_ptr_add();
2108 } else
2109 #endif
2111 gen_opic(op);
2113 /* put again type if gen_opic() swaped operands */
2114 vtop->type = type1;
2116 } else if (is_float(bt1) || is_float(bt2)) {
2117 /* compute bigger type and do implicit casts */
2118 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2119 t = VT_LDOUBLE;
2120 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2121 t = VT_DOUBLE;
2122 } else {
2123 t = VT_FLOAT;
2125 /* floats can only be used for a few operations */
2126 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2127 (op < TOK_ULT || op > TOK_GT))
2128 tcc_error("invalid operands for binary operation");
2129 goto std_op;
2130 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2131 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2132 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2133 t |= VT_UNSIGNED;
2134 goto std_op;
2135 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2136 /* cast to biggest op */
2137 t = VT_LLONG;
2138 /* convert to unsigned if it does not fit in a long long */
2139 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2140 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2141 t |= VT_UNSIGNED;
2142 goto std_op;
2143 } else {
2144 /* integer operations */
2145 t = VT_INT;
2146 /* convert to unsigned if it does not fit in an integer */
2147 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2148 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2149 t |= VT_UNSIGNED;
2150 std_op:
2151 /* XXX: currently, some unsigned operations are explicit, so
2152 we modify them here */
2153 if (t & VT_UNSIGNED) {
2154 if (op == TOK_SAR)
2155 op = TOK_SHR;
2156 else if (op == '/')
2157 op = TOK_UDIV;
2158 else if (op == '%')
2159 op = TOK_UMOD;
2160 else if (op == TOK_LT)
2161 op = TOK_ULT;
2162 else if (op == TOK_GT)
2163 op = TOK_UGT;
2164 else if (op == TOK_LE)
2165 op = TOK_ULE;
2166 else if (op == TOK_GE)
2167 op = TOK_UGE;
2169 vswap();
2170 type1.t = t;
2171 gen_cast(&type1);
2172 vswap();
2173 /* special case for shifts and long long: we keep the shift as
2174 an integer */
2175 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2176 type1.t = VT_INT;
2177 gen_cast(&type1);
2178 if (is_float(t))
2179 gen_opif(op);
2180 else
2181 gen_opic(op);
2182 if (op >= TOK_ULT && op <= TOK_GT) {
2183 /* relationnal op: the result is an int */
2184 vtop->type.t = VT_INT;
2185 } else {
2186 vtop->type.t = t;
2189 // Make sure that we have converted to an rvalue:
2190 if (vtop->r & VT_LVAL)
2191 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2194 #ifndef TCC_TARGET_ARM
2195 /* generic itof for unsigned long long case */
2196 static void gen_cvt_itof1(int t)
2198 #ifdef TCC_TARGET_ARM64
2199 gen_cvt_itof(t);
2200 #else
2201 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2202 (VT_LLONG | VT_UNSIGNED)) {
2204 if (t == VT_FLOAT)
2205 vpush_global_sym(&func_old_type, TOK___floatundisf);
2206 #if LDOUBLE_SIZE != 8
2207 else if (t == VT_LDOUBLE)
2208 vpush_global_sym(&func_old_type, TOK___floatundixf);
2209 #endif
2210 else
2211 vpush_global_sym(&func_old_type, TOK___floatundidf);
2212 vrott(2);
2213 gfunc_call(1);
2214 vpushi(0);
2215 vtop->r = reg_fret(t);
2216 } else {
2217 gen_cvt_itof(t);
2219 #endif
2221 #endif
2223 /* generic ftoi for unsigned long long case */
2224 static void gen_cvt_ftoi1(int t)
2226 #ifdef TCC_TARGET_ARM64
2227 gen_cvt_ftoi(t);
2228 #else
2229 int st;
2231 if (t == (VT_LLONG | VT_UNSIGNED)) {
2232 /* not handled natively */
2233 st = vtop->type.t & VT_BTYPE;
2234 if (st == VT_FLOAT)
2235 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2236 #if LDOUBLE_SIZE != 8
2237 else if (st == VT_LDOUBLE)
2238 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2239 #endif
2240 else
2241 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2242 vrott(2);
2243 gfunc_call(1);
2244 vpushi(0);
2245 vtop->r = REG_IRET;
2246 vtop->r2 = REG_LRET;
2247 } else {
2248 gen_cvt_ftoi(t);
2250 #endif
2253 /* force char or short cast */
2254 static void force_charshort_cast(int t)
2256 int bits, dbt;
2257 dbt = t & VT_BTYPE;
2258 /* XXX: add optimization if lvalue : just change type and offset */
2259 if (dbt == VT_BYTE)
2260 bits = 8;
2261 else
2262 bits = 16;
2263 if (t & VT_UNSIGNED) {
2264 vpushi((1 << bits) - 1);
2265 gen_op('&');
2266 } else {
2267 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2268 bits = 64 - bits;
2269 else
2270 bits = 32 - bits;
2271 vpushi(bits);
2272 gen_op(TOK_SHL);
2273 /* result must be signed or the SAR is converted to an SHL
2274 This was not the case when "t" was a signed short
2275 and the last value on the stack was an unsigned int */
2276 vtop->type.t &= ~VT_UNSIGNED;
2277 vpushi(bits);
2278 gen_op(TOK_SAR);
2282 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2283 static void gen_cast(CType *type)
2285 int sbt, dbt, sf, df, c, p;
2287 /* special delayed cast for char/short */
2288 /* XXX: in some cases (multiple cascaded casts), it may still
2289 be incorrect */
2290 if (vtop->r & VT_MUSTCAST) {
2291 vtop->r &= ~VT_MUSTCAST;
2292 force_charshort_cast(vtop->type.t);
2295 /* bitfields first get cast to ints */
2296 if (vtop->type.t & VT_BITFIELD) {
2297 gv(RC_INT);
2300 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2301 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2303 if (sbt != dbt) {
2304 sf = is_float(sbt);
2305 df = is_float(dbt);
2306 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2307 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2308 if (c) {
2309 /* constant case: we can do it now */
2310 /* XXX: in ISOC, cannot do it if error in convert */
2311 if (sbt == VT_FLOAT)
2312 vtop->c.ld = vtop->c.f;
2313 else if (sbt == VT_DOUBLE)
2314 vtop->c.ld = vtop->c.d;
2316 if (df) {
2317 if ((sbt & VT_BTYPE) == VT_LLONG) {
2318 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2319 vtop->c.ld = vtop->c.i;
2320 else
2321 vtop->c.ld = -(long double)-vtop->c.i;
2322 } else if(!sf) {
2323 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2324 vtop->c.ld = (uint32_t)vtop->c.i;
2325 else
2326 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2329 if (dbt == VT_FLOAT)
2330 vtop->c.f = (float)vtop->c.ld;
2331 else if (dbt == VT_DOUBLE)
2332 vtop->c.d = (double)vtop->c.ld;
2333 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2334 vtop->c.i = vtop->c.ld;
2335 } else if (sf && dbt == VT_BOOL) {
2336 vtop->c.i = (vtop->c.ld != 0);
2337 } else {
2338 if(sf)
2339 vtop->c.i = vtop->c.ld;
2340 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2342 else if (sbt & VT_UNSIGNED)
2343 vtop->c.i = (uint32_t)vtop->c.i;
2344 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2345 else if (sbt == VT_PTR)
2347 #endif
2348 else if (sbt != VT_LLONG)
2349 vtop->c.i = ((uint32_t)vtop->c.i |
2350 -(vtop->c.i & 0x80000000));
2352 if (dbt == (VT_LLONG|VT_UNSIGNED))
2354 else if (dbt == VT_BOOL)
2355 vtop->c.i = (vtop->c.i != 0);
2356 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2357 else if (dbt == VT_PTR)
2359 #endif
2360 else if (dbt != VT_LLONG) {
2361 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2362 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2363 0xffffffff);
2364 vtop->c.i &= m;
2365 if (!(dbt & VT_UNSIGNED))
2366 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2369 } else if (p && dbt == VT_BOOL) {
2370 vtop->r = VT_CONST;
2371 vtop->c.i = 1;
2372 } else {
2373 /* non constant case: generate code */
2374 if (sf && df) {
2375 /* convert from fp to fp */
2376 gen_cvt_ftof(dbt);
2377 } else if (df) {
2378 /* convert int to fp */
2379 gen_cvt_itof1(dbt);
2380 } else if (sf) {
2381 /* convert fp to int */
2382 if (dbt == VT_BOOL) {
2383 vpushi(0);
2384 gen_op(TOK_NE);
2385 } else {
2386 if (sbt == VT_FLOAT) {
2387 /* cast to DOUBLE to avoid precision loss */
2388 gen_cvt_ftof(VT_DOUBLE);
2389 vtop->type.t = (vtop->type.t & ~VT_BTYPE) | VT_DOUBLE;
2391 /* we handle char/short/etc... with generic code */
2392 if (dbt != (VT_INT | VT_UNSIGNED) &&
2393 dbt != (VT_LLONG | VT_UNSIGNED) &&
2394 dbt != VT_LLONG)
2395 dbt = VT_INT;
2396 gen_cvt_ftoi1(dbt);
2397 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2398 /* additional cast for char/short... */
2399 vtop->type.t = dbt;
2400 gen_cast(type);
2403 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2404 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2405 if ((sbt & VT_BTYPE) != VT_LLONG) {
2406 /* scalar to long long */
2407 /* machine independent conversion */
2408 gv(RC_INT);
2409 /* generate high word */
2410 if (sbt == (VT_INT | VT_UNSIGNED)) {
2411 vpushi(0);
2412 gv(RC_INT);
2413 } else {
2414 if (sbt == VT_PTR) {
2415 /* cast from pointer to int before we apply
2416 shift operation, which pointers don't support*/
2417 gen_cast(&int_type);
2419 gv_dup();
2420 vpushi(31);
2421 gen_op(TOK_SAR);
2423 /* patch second register */
2424 vtop[-1].r2 = vtop->r;
2425 vpop();
2427 #else
2428 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2429 (dbt & VT_BTYPE) == VT_PTR ||
2430 (dbt & VT_BTYPE) == VT_FUNC) {
2431 if ((sbt & VT_BTYPE) != VT_LLONG &&
2432 (sbt & VT_BTYPE) != VT_PTR &&
2433 (sbt & VT_BTYPE) != VT_FUNC) {
2434 /* need to convert from 32bit to 64bit */
2435 gv(RC_INT);
2436 if (sbt != (VT_INT | VT_UNSIGNED)) {
2437 #if defined(TCC_TARGET_ARM64)
2438 gen_cvt_sxtw();
2439 #elif defined(TCC_TARGET_X86_64)
2440 int r = gv(RC_INT);
2441 /* x86_64 specific: movslq */
2442 o(0x6348);
2443 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2444 #else
2445 #error
2446 #endif
2449 #endif
2450 } else if (dbt == VT_BOOL) {
2451 /* scalar to bool */
2452 vpushi(0);
2453 gen_op(TOK_NE);
2454 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2455 (dbt & VT_BTYPE) == VT_SHORT) {
2456 if (sbt == VT_PTR) {
2457 vtop->type.t = VT_INT;
2458 tcc_warning("nonportable conversion from pointer to char/short");
2460 force_charshort_cast(dbt);
2461 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2462 } else if ((dbt & VT_BTYPE) == VT_INT) {
2463 /* scalar to int */
2464 if ((sbt & VT_BTYPE) == VT_LLONG) {
2465 /* from long long: just take low order word */
2466 lexpand();
2467 vpop();
2469 /* if lvalue and single word type, nothing to do because
2470 the lvalue already contains the real type size (see
2471 VT_LVAL_xxx constants) */
2472 #endif
2475 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2476 /* if we are casting between pointer types,
2477 we must update the VT_LVAL_xxx size */
2478 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2479 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2481 vtop->type = *type;
2484 /* return type size as known at compile time. Put alignment at 'a' */
2485 ST_FUNC int type_size(CType *type, int *a)
2487 Sym *s;
2488 int bt;
2490 bt = type->t & VT_BTYPE;
2491 if (bt == VT_STRUCT) {
2492 /* struct/union */
2493 s = type->ref;
2494 *a = s->r;
2495 return s->c;
2496 } else if (bt == VT_PTR) {
2497 if (type->t & VT_ARRAY) {
2498 int ts;
2500 s = type->ref;
2501 ts = type_size(&s->type, a);
2503 if (ts < 0 && s->c < 0)
2504 ts = -ts;
2506 return ts * s->c;
2507 } else {
2508 *a = PTR_SIZE;
2509 return PTR_SIZE;
2511 } else if (bt == VT_LDOUBLE) {
2512 *a = LDOUBLE_ALIGN;
2513 return LDOUBLE_SIZE;
2514 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2515 #ifdef TCC_TARGET_I386
2516 #ifdef TCC_TARGET_PE
2517 *a = 8;
2518 #else
2519 *a = 4;
2520 #endif
2521 #elif defined(TCC_TARGET_ARM)
2522 #ifdef TCC_ARM_EABI
2523 *a = 8;
2524 #else
2525 *a = 4;
2526 #endif
2527 #else
2528 *a = 8;
2529 #endif
2530 return 8;
2531 } else if (bt == VT_INT || bt == VT_FLOAT) {
2532 *a = 4;
2533 return 4;
2534 } else if (bt == VT_SHORT) {
2535 *a = 2;
2536 return 2;
2537 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2538 *a = 8;
2539 return 16;
2540 } else if (bt == VT_ENUM) {
2541 *a = 4;
2542 /* Enums might be incomplete, so don't just return '4' here. */
2543 return type->ref->c;
2544 } else {
2545 /* char, void, function, _Bool */
2546 *a = 1;
2547 return 1;
2551 /* push type size as known at runtime time on top of value stack. Put
2552 alignment at 'a' */
2553 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2555 if (type->t & VT_VLA) {
2556 type_size(&type->ref->type, a);
2557 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2558 } else {
2559 vpushi(type_size(type, a));
2563 static void vla_sp_restore(void) {
2564 if (vlas_in_scope) {
2565 gen_vla_sp_restore(vla_sp_loc);
2569 static void vla_sp_restore_root(void) {
2570 if (vlas_in_scope) {
2571 gen_vla_sp_restore(vla_sp_root_loc);
2575 /* return the pointed type of t */
2576 static inline CType *pointed_type(CType *type)
2578 return &type->ref->type;
2581 /* modify type so that its it is a pointer to type. */
2582 ST_FUNC void mk_pointer(CType *type)
2584 Sym *s;
2585 s = sym_push(SYM_FIELD, type, 0, -1);
2586 type->t = VT_PTR | (type->t & ~VT_TYPE);
2587 type->ref = s;
2590 /* compare function types. OLD functions match any new functions */
2591 static int is_compatible_func(CType *type1, CType *type2)
2593 Sym *s1, *s2;
2595 s1 = type1->ref;
2596 s2 = type2->ref;
2597 if (!is_compatible_types(&s1->type, &s2->type))
2598 return 0;
2599 /* check func_call */
2600 if (s1->a.func_call != s2->a.func_call)
2601 return 0;
2602 /* XXX: not complete */
2603 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2604 return 1;
2605 if (s1->c != s2->c)
2606 return 0;
2607 while (s1 != NULL) {
2608 if (s2 == NULL)
2609 return 0;
2610 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2611 return 0;
2612 s1 = s1->next;
2613 s2 = s2->next;
2615 if (s2)
2616 return 0;
2617 return 1;
2620 /* return true if type1 and type2 are the same. If unqualified is
2621 true, qualifiers on the types are ignored.
2623 - enums are not checked as gcc __builtin_types_compatible_p ()
2625 static int compare_types(CType *type1, CType *type2, int unqualified)
2627 int bt1, t1, t2;
2629 t1 = type1->t & VT_TYPE;
2630 t2 = type2->t & VT_TYPE;
2631 if (unqualified) {
2632 /* strip qualifiers before comparing */
2633 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2634 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2636 /* Default Vs explicit signedness only matters for char */
2637 if ((t1 & VT_BTYPE) != VT_BYTE) {
2638 t1 &= ~VT_DEFSIGN;
2639 t2 &= ~VT_DEFSIGN;
2641 /* An enum is compatible with (unsigned) int. Ideally we would
2642 store the enums signedness in type->ref.a.<some_bit> and
2643 only accept unsigned enums with unsigned int and vice versa.
2644 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2645 from pointer target types, so we can't add it here either. */
2646 if ((t1 & VT_BTYPE) == VT_ENUM) {
2647 t1 = VT_INT;
2648 if (type1->ref->a.unsigned_enum)
2649 t1 |= VT_UNSIGNED;
2651 if ((t2 & VT_BTYPE) == VT_ENUM) {
2652 t2 = VT_INT;
2653 if (type2->ref->a.unsigned_enum)
2654 t2 |= VT_UNSIGNED;
2656 /* XXX: bitfields ? */
2657 if (t1 != t2)
2658 return 0;
2659 /* test more complicated cases */
2660 bt1 = t1 & VT_BTYPE;
2661 if (bt1 == VT_PTR) {
2662 type1 = pointed_type(type1);
2663 type2 = pointed_type(type2);
2664 return is_compatible_types(type1, type2);
2665 } else if (bt1 == VT_STRUCT) {
2666 return (type1->ref == type2->ref);
2667 } else if (bt1 == VT_FUNC) {
2668 return is_compatible_func(type1, type2);
2669 } else {
2670 return 1;
2674 /* return true if type1 and type2 are exactly the same (including
2675 qualifiers).
2677 static int is_compatible_types(CType *type1, CType *type2)
2679 return compare_types(type1,type2,0);
2682 /* return true if type1 and type2 are the same (ignoring qualifiers).
2684 static int is_compatible_parameter_types(CType *type1, CType *type2)
2686 return compare_types(type1,type2,1);
2689 /* print a type. If 'varstr' is not NULL, then the variable is also
2690 printed in the type */
2691 /* XXX: union */
2692 /* XXX: add array and function pointers */
2693 static void type_to_str(char *buf, int buf_size,
2694 CType *type, const char *varstr)
2696 int bt, v, t;
2697 Sym *s, *sa;
2698 char buf1[256];
2699 const char *tstr;
2701 t = type->t & VT_TYPE;
2702 bt = t & VT_BTYPE;
2703 buf[0] = '\0';
2704 if (t & VT_CONSTANT)
2705 pstrcat(buf, buf_size, "const ");
2706 if (t & VT_VOLATILE)
2707 pstrcat(buf, buf_size, "volatile ");
2708 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2709 pstrcat(buf, buf_size, "unsigned ");
2710 else if (t & VT_DEFSIGN)
2711 pstrcat(buf, buf_size, "signed ");
2712 switch(bt) {
2713 case VT_VOID:
2714 tstr = "void";
2715 goto add_tstr;
2716 case VT_BOOL:
2717 tstr = "_Bool";
2718 goto add_tstr;
2719 case VT_BYTE:
2720 tstr = "char";
2721 goto add_tstr;
2722 case VT_SHORT:
2723 tstr = "short";
2724 goto add_tstr;
2725 case VT_INT:
2726 tstr = "int";
2727 goto add_tstr;
2728 case VT_LONG:
2729 tstr = "long";
2730 goto add_tstr;
2731 case VT_LLONG:
2732 tstr = "long long";
2733 goto add_tstr;
2734 case VT_FLOAT:
2735 tstr = "float";
2736 goto add_tstr;
2737 case VT_DOUBLE:
2738 tstr = "double";
2739 goto add_tstr;
2740 case VT_LDOUBLE:
2741 tstr = "long double";
2742 add_tstr:
2743 pstrcat(buf, buf_size, tstr);
2744 break;
2745 case VT_ENUM:
2746 case VT_STRUCT:
2747 if (bt == VT_STRUCT)
2748 tstr = "struct ";
2749 else
2750 tstr = "enum ";
2751 pstrcat(buf, buf_size, tstr);
2752 v = type->ref->v & ~SYM_STRUCT;
2753 if (v >= SYM_FIRST_ANOM)
2754 pstrcat(buf, buf_size, "<anonymous>");
2755 else
2756 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2757 break;
2758 case VT_FUNC:
2759 s = type->ref;
2760 type_to_str(buf, buf_size, &s->type, varstr);
2761 pstrcat(buf, buf_size, "(");
2762 sa = s->next;
2763 while (sa != NULL) {
2764 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2765 pstrcat(buf, buf_size, buf1);
2766 sa = sa->next;
2767 if (sa)
2768 pstrcat(buf, buf_size, ", ");
2770 pstrcat(buf, buf_size, ")");
2771 goto no_var;
2772 case VT_PTR:
2773 s = type->ref;
2774 if (t & VT_ARRAY) {
2775 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2776 type_to_str(buf, buf_size, &s->type, buf1);
2777 goto no_var;
2779 pstrcpy(buf1, sizeof(buf1), "*");
2780 if (t & VT_CONSTANT)
2781 pstrcat(buf1, buf_size, "const ");
2782 if (t & VT_VOLATILE)
2783 pstrcat(buf1, buf_size, "volatile ");
2784 if (varstr)
2785 pstrcat(buf1, sizeof(buf1), varstr);
2786 type_to_str(buf, buf_size, &s->type, buf1);
2787 goto no_var;
2789 if (varstr) {
2790 pstrcat(buf, buf_size, " ");
2791 pstrcat(buf, buf_size, varstr);
2793 no_var: ;
2796 /* verify type compatibility to store vtop in 'dt' type, and generate
2797 casts if needed. */
2798 static void gen_assign_cast(CType *dt)
2800 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2801 char buf1[256], buf2[256];
2802 int dbt, sbt;
2804 st = &vtop->type; /* source type */
2805 dbt = dt->t & VT_BTYPE;
2806 sbt = st->t & VT_BTYPE;
2807 if (sbt == VT_VOID || dbt == VT_VOID) {
2808 if (sbt == VT_VOID && dbt == VT_VOID)
2809 ; /*
2810 It is Ok if both are void
2811 A test program:
2812 void func1() {}
2813 void func2() {
2814 return func1();
2816 gcc accepts this program
2818 else
2819 tcc_error("cannot cast from/to void");
2821 if (dt->t & VT_CONSTANT)
2822 tcc_warning("assignment of read-only location");
2823 switch(dbt) {
2824 case VT_PTR:
2825 /* special cases for pointers */
2826 /* '0' can also be a pointer */
2827 if (is_null_pointer(vtop))
2828 goto type_ok;
2829 /* accept implicit pointer to integer cast with warning */
2830 if (is_integer_btype(sbt)) {
2831 tcc_warning("assignment makes pointer from integer without a cast");
2832 goto type_ok;
2834 type1 = pointed_type(dt);
2835 /* a function is implicitely a function pointer */
2836 if (sbt == VT_FUNC) {
2837 if ((type1->t & VT_BTYPE) != VT_VOID &&
2838 !is_compatible_types(pointed_type(dt), st))
2839 tcc_warning("assignment from incompatible pointer type");
2840 goto type_ok;
2842 if (sbt != VT_PTR)
2843 goto error;
2844 type2 = pointed_type(st);
2845 if ((type1->t & VT_BTYPE) == VT_VOID ||
2846 (type2->t & VT_BTYPE) == VT_VOID) {
2847 /* void * can match anything */
2848 } else {
2849 /* exact type match, except for qualifiers */
2850 tmp_type1 = *type1;
2851 tmp_type2 = *type2;
2852 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2853 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2854 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2855 /* Like GCC don't warn by default for merely changes
2856 in pointer target signedness. Do warn for different
2857 base types, though, in particular for unsigned enums
2858 and signed int targets. */
2859 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2860 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2861 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2863 else
2864 tcc_warning("assignment from incompatible pointer type");
2867 /* check const and volatile */
2868 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2869 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2870 tcc_warning("assignment discards qualifiers from pointer target type");
2871 break;
2872 case VT_BYTE:
2873 case VT_SHORT:
2874 case VT_INT:
2875 case VT_LLONG:
2876 if (sbt == VT_PTR || sbt == VT_FUNC) {
2877 tcc_warning("assignment makes integer from pointer without a cast");
2878 } else if (sbt == VT_STRUCT) {
2879 goto case_VT_STRUCT;
2881 /* XXX: more tests */
2882 break;
2883 case VT_STRUCT:
2884 case_VT_STRUCT:
2885 tmp_type1 = *dt;
2886 tmp_type2 = *st;
2887 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2888 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2889 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2890 error:
2891 type_to_str(buf1, sizeof(buf1), st, NULL);
2892 type_to_str(buf2, sizeof(buf2), dt, NULL);
2893 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2895 break;
2897 type_ok:
2898 gen_cast(dt);
2901 /* store vtop in lvalue pushed on stack */
2902 ST_FUNC void vstore(void)
2904 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2906 ft = vtop[-1].type.t;
2907 sbt = vtop->type.t & VT_BTYPE;
2908 dbt = ft & VT_BTYPE;
2909 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2910 (sbt == VT_INT && dbt == VT_SHORT))
2911 && !(vtop->type.t & VT_BITFIELD)) {
2912 /* optimize char/short casts */
2913 delayed_cast = VT_MUSTCAST;
2914 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2915 ((1 << VT_STRUCT_SHIFT) - 1));
2916 /* XXX: factorize */
2917 if (ft & VT_CONSTANT)
2918 tcc_warning("assignment of read-only location");
2919 } else {
2920 delayed_cast = 0;
2921 if (!(ft & VT_BITFIELD))
2922 gen_assign_cast(&vtop[-1].type);
2925 if (sbt == VT_STRUCT) {
2926 /* if structure, only generate pointer */
2927 /* structure assignment : generate memcpy */
2928 /* XXX: optimize if small size */
2929 size = type_size(&vtop->type, &align);
2931 /* destination */
2932 vswap();
2933 vtop->type.t = VT_PTR;
2934 gaddrof();
2936 /* address of memcpy() */
2937 #ifdef TCC_ARM_EABI
2938 if(!(align & 7))
2939 vpush_global_sym(&func_old_type, TOK_memcpy8);
2940 else if(!(align & 3))
2941 vpush_global_sym(&func_old_type, TOK_memcpy4);
2942 else
2943 #endif
2944 /* Use memmove, rather than memcpy, as dest and src may be same: */
2945 vpush_global_sym(&func_old_type, TOK_memmove);
2947 vswap();
2948 /* source */
2949 vpushv(vtop - 2);
2950 vtop->type.t = VT_PTR;
2951 gaddrof();
2952 /* type size */
2953 vpushi(size);
2954 gfunc_call(3);
2956 /* leave source on stack */
2957 } else if (ft & VT_BITFIELD) {
2958 /* bitfield store handling */
2960 /* save lvalue as expression result (example: s.b = s.a = n;) */
2961 vdup(), vtop[-1] = vtop[-2];
2963 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2964 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2965 /* remove bit field info to avoid loops */
2966 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2968 if((ft & VT_BTYPE) == VT_BOOL) {
2969 gen_cast(&vtop[-1].type);
2970 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2973 /* duplicate destination */
2974 vdup();
2975 vtop[-1] = vtop[-2];
2977 /* mask and shift source */
2978 if((ft & VT_BTYPE) != VT_BOOL) {
2979 if((ft & VT_BTYPE) == VT_LLONG) {
2980 vpushll((1ULL << bit_size) - 1ULL);
2981 } else {
2982 vpushi((1 << bit_size) - 1);
2984 gen_op('&');
2986 vpushi(bit_pos);
2987 gen_op(TOK_SHL);
2988 /* load destination, mask and or with source */
2989 vswap();
2990 if((ft & VT_BTYPE) == VT_LLONG) {
2991 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2992 } else {
2993 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2995 gen_op('&');
2996 gen_op('|');
2997 /* store result */
2998 vstore();
2999 /* ... and discard */
3000 vpop();
3002 } else {
3003 #ifdef CONFIG_TCC_BCHECK
3004 /* bound check case */
3005 if (vtop[-1].r & VT_MUSTBOUND) {
3006 vswap();
3007 gbound();
3008 vswap();
3010 #endif
3011 rc = RC_INT;
3012 if (is_float(ft)) {
3013 rc = RC_FLOAT;
3014 #ifdef TCC_TARGET_X86_64
3015 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3016 rc = RC_ST0;
3017 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3018 rc = RC_FRET;
3020 #endif
3022 r = gv(rc); /* generate value */
3023 /* if lvalue was saved on stack, must read it */
3024 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3025 SValue sv;
3026 t = get_reg(RC_INT);
3027 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3028 sv.type.t = VT_PTR;
3029 #else
3030 sv.type.t = VT_INT;
3031 #endif
3032 sv.r = VT_LOCAL | VT_LVAL;
3033 sv.c.i = vtop[-1].c.i;
3034 load(t, &sv);
3035 vtop[-1].r = t | VT_LVAL;
3037 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3038 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3039 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3040 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3041 #else
3042 if ((ft & VT_BTYPE) == VT_LLONG) {
3043 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3044 #endif
3045 vtop[-1].type.t = load_type;
3046 store(r, vtop - 1);
3047 vswap();
3048 /* convert to int to increment easily */
3049 vtop->type.t = addr_type;
3050 gaddrof();
3051 vpushi(load_size);
3052 gen_op('+');
3053 vtop->r |= VT_LVAL;
3054 vswap();
3055 vtop[-1].type.t = load_type;
3056 /* XXX: it works because r2 is spilled last ! */
3057 store(vtop->r2, vtop - 1);
3058 } else {
3059 store(r, vtop - 1);
3062 vswap();
3063 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3064 vtop->r |= delayed_cast;
3068 /* post defines POST/PRE add. c is the token ++ or -- */
3069 ST_FUNC void inc(int post, int c)
3071 test_lvalue();
3072 vdup(); /* save lvalue */
3073 if (post) {
3074 gv_dup(); /* duplicate value */
3075 vrotb(3);
3076 vrotb(3);
3078 /* add constant */
3079 vpushi(c - TOK_MID);
3080 gen_op('+');
3081 vstore(); /* store value */
3082 if (post)
3083 vpop(); /* if post op, return saved value */
3086 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3088 /* read the string */
3089 if (tok != TOK_STR)
3090 expect(msg);
3091 cstr_new(astr);
3092 while (tok == TOK_STR) {
3093 /* XXX: add \0 handling too ? */
3094 cstr_cat(astr, tokc.str.data, -1);
3095 next();
3097 cstr_ccat(astr, '\0');
3100 /* If I is >= 1 and a power of two, returns log2(i)+1.
3101 If I is 0 returns 0. */
3102 static int exact_log2p1(int i)
3104 int ret;
3105 if (!i)
3106 return 0;
3107 for (ret = 1; i >= 1 << 8; ret += 8)
3108 i >>= 8;
3109 if (i >= 1 << 4)
3110 ret += 4, i >>= 4;
3111 if (i >= 1 << 2)
3112 ret += 2, i >>= 2;
3113 if (i >= 1 << 1)
3114 ret++;
3115 return ret;
3118 /* Parse GNUC __attribute__ extension. Currently, the following
3119 extensions are recognized:
3120 - aligned(n) : set data/function alignment.
3121 - packed : force data alignment to 1
3122 - section(x) : generate data/code in this section.
3123 - unused : currently ignored, but may be used someday.
3124 - regparm(n) : pass function parameters in registers (i386 only)
3126 static void parse_attribute(AttributeDef *ad)
3128 int t, n;
3129 CString astr;
3131 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3132 next();
3133 skip('(');
3134 skip('(');
3135 while (tok != ')') {
3136 if (tok < TOK_IDENT)
3137 expect("attribute name");
3138 t = tok;
3139 next();
3140 switch(t) {
3141 case TOK_SECTION1:
3142 case TOK_SECTION2:
3143 skip('(');
3144 parse_mult_str(&astr, "section name");
3145 ad->section = find_section(tcc_state, (char *)astr.data);
3146 skip(')');
3147 cstr_free(&astr);
3148 break;
3149 case TOK_ALIAS1:
3150 case TOK_ALIAS2:
3151 skip('(');
3152 parse_mult_str(&astr, "alias(\"target\")");
3153 ad->alias_target = /* save string as token, for later */
3154 tok_alloc((char*)astr.data, astr.size-1)->tok;
3155 skip(')');
3156 cstr_free(&astr);
3157 break;
3158 case TOK_VISIBILITY1:
3159 case TOK_VISIBILITY2:
3160 skip('(');
3161 parse_mult_str(&astr,
3162 "visibility(\"default|hidden|internal|protected\")");
3163 if (!strcmp (astr.data, "default"))
3164 ad->a.visibility = STV_DEFAULT;
3165 else if (!strcmp (astr.data, "hidden"))
3166 ad->a.visibility = STV_HIDDEN;
3167 else if (!strcmp (astr.data, "internal"))
3168 ad->a.visibility = STV_INTERNAL;
3169 else if (!strcmp (astr.data, "protected"))
3170 ad->a.visibility = STV_PROTECTED;
3171 else
3172 expect("visibility(\"default|hidden|internal|protected\")");
3173 skip(')');
3174 cstr_free(&astr);
3175 break;
3176 case TOK_ALIGNED1:
3177 case TOK_ALIGNED2:
3178 if (tok == '(') {
3179 next();
3180 n = expr_const();
3181 if (n <= 0 || (n & (n - 1)) != 0)
3182 tcc_error("alignment must be a positive power of two");
3183 skip(')');
3184 } else {
3185 n = MAX_ALIGN;
3187 ad->a.aligned = exact_log2p1(n);
3188 if (n != 1 << (ad->a.aligned - 1))
3189 tcc_error("alignment of %d is larger than implemented", n);
3190 break;
3191 case TOK_PACKED1:
3192 case TOK_PACKED2:
3193 ad->a.packed = 1;
3194 break;
3195 case TOK_WEAK1:
3196 case TOK_WEAK2:
3197 ad->a.weak = 1;
3198 break;
3199 case TOK_UNUSED1:
3200 case TOK_UNUSED2:
3201 /* currently, no need to handle it because tcc does not
3202 track unused objects */
3203 break;
3204 case TOK_NORETURN1:
3205 case TOK_NORETURN2:
3206 /* currently, no need to handle it because tcc does not
3207 track unused objects */
3208 break;
3209 case TOK_CDECL1:
3210 case TOK_CDECL2:
3211 case TOK_CDECL3:
3212 ad->a.func_call = FUNC_CDECL;
3213 break;
3214 case TOK_STDCALL1:
3215 case TOK_STDCALL2:
3216 case TOK_STDCALL3:
3217 ad->a.func_call = FUNC_STDCALL;
3218 break;
3219 #ifdef TCC_TARGET_I386
3220 case TOK_REGPARM1:
3221 case TOK_REGPARM2:
3222 skip('(');
3223 n = expr_const();
3224 if (n > 3)
3225 n = 3;
3226 else if (n < 0)
3227 n = 0;
3228 if (n > 0)
3229 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3230 skip(')');
3231 break;
3232 case TOK_FASTCALL1:
3233 case TOK_FASTCALL2:
3234 case TOK_FASTCALL3:
3235 ad->a.func_call = FUNC_FASTCALLW;
3236 break;
3237 #endif
3238 case TOK_MODE:
3239 skip('(');
3240 switch(tok) {
3241 case TOK_MODE_DI:
3242 ad->a.mode = VT_LLONG + 1;
3243 break;
3244 case TOK_MODE_QI:
3245 ad->a.mode = VT_BYTE + 1;
3246 break;
3247 case TOK_MODE_HI:
3248 ad->a.mode = VT_SHORT + 1;
3249 break;
3250 case TOK_MODE_SI:
3251 case TOK_MODE_word:
3252 ad->a.mode = VT_INT + 1;
3253 break;
3254 default:
3255 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3256 break;
3258 next();
3259 skip(')');
3260 break;
3261 case TOK_DLLEXPORT:
3262 ad->a.func_export = 1;
3263 break;
3264 case TOK_DLLIMPORT:
3265 ad->a.func_import = 1;
3266 break;
3267 default:
3268 if (tcc_state->warn_unsupported)
3269 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3270 /* skip parameters */
3271 if (tok == '(') {
3272 int parenthesis = 0;
3273 do {
3274 if (tok == '(')
3275 parenthesis++;
3276 else if (tok == ')')
3277 parenthesis--;
3278 next();
3279 } while (parenthesis && tok != -1);
3281 break;
3283 if (tok != ',')
3284 break;
3285 next();
3287 skip(')');
3288 skip(')');
3292 static Sym * find_field (CType *type, int v)
3294 Sym *s = type->ref;
3295 v |= SYM_FIELD;
3296 while ((s = s->next) != NULL) {
3297 if ((s->v & SYM_FIELD) &&
3298 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3299 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3300 Sym *ret = find_field (&s->type, v);
3301 if (ret)
3302 return ret;
3304 if (s->v == v)
3305 break;
3307 return s;
3310 static void struct_add_offset (Sym *s, int offset)
3312 while ((s = s->next) != NULL) {
3313 if ((s->v & SYM_FIELD) &&
3314 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3315 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3316 struct_add_offset(s->type.ref, offset);
3317 } else
3318 s->c += offset;
3322 static void struct_layout(CType *type, AttributeDef *ad)
3324 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3325 int pcc = !tcc_state->ms_bitfields;
3326 Sym *f;
3327 if (ad->a.aligned)
3328 maxalign = 1 << (ad->a.aligned - 1);
3329 else
3330 maxalign = 1;
3331 offset = 0;
3332 c = 0;
3333 bit_pos = 0;
3334 prevbt = VT_STRUCT; /* make it never match */
3335 prev_bit_size = 0;
3336 for (f = type->ref->next; f; f = f->next) {
3337 int typealign, bit_size;
3338 int size = type_size(&f->type, &typealign);
3339 if (f->type.t & VT_BITFIELD)
3340 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3341 else
3342 bit_size = -1;
3343 if (bit_size == 0 && pcc) {
3344 /* Zero-width bit-fields in PCC mode aren't affected
3345 by any packing (attribute or pragma). */
3346 align = typealign;
3347 } else if (f->r > 1) {
3348 align = f->r;
3349 } else if (ad->a.packed || f->r == 1) {
3350 align = 1;
3351 /* Packed fields or packed records don't let the base type
3352 influence the records type alignment. */
3353 typealign = 1;
3354 } else {
3355 align = typealign;
3357 if (type->ref->type.t != TOK_STRUCT) {
3358 if (pcc && bit_size >= 0)
3359 size = (bit_size + 7) >> 3;
3360 /* Bit position is already zero from our caller. */
3361 offset = 0;
3362 if (size > c)
3363 c = size;
3364 } else if (bit_size < 0) {
3365 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3366 prevbt = VT_STRUCT;
3367 prev_bit_size = 0;
3368 c = (c + addbytes + align - 1) & -align;
3369 offset = c;
3370 if (size > 0)
3371 c += size;
3372 bit_pos = 0;
3373 } else {
3374 /* A bit-field. Layout is more complicated. There are two
3375 options TCC implements: PCC compatible and MS compatible
3376 (PCC compatible is what GCC uses for almost all targets).
3377 In PCC layout the overall size of the struct (in c) is
3378 _excluding_ the current run of bit-fields (that is,
3379 there's at least additional bit_pos bits after c). In
3380 MS layout c does include the current run of bit-fields.
3382 This matters for calculating the natural alignment buckets
3383 in PCC mode. */
3385 /* 'align' will be used to influence records alignment,
3386 so it's the max of specified and type alignment, except
3387 in certain cases that depend on the mode. */
3388 if (align < typealign)
3389 align = typealign;
3390 if (pcc) {
3391 /* In PCC layout a non-packed bit-field is placed adjacent
3392 to the preceding bit-fields, except if it would overflow
3393 its container (depending on base type) or it's a zero-width
3394 bit-field. Packed non-zero-width bit-fields always are
3395 placed adjacent. */
3396 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3397 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3398 if (bit_size == 0 ||
3399 (typealign != 1 &&
3400 (ofs2 / (typealign * 8)) > (size/typealign))) {
3401 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3402 bit_pos = 0;
3404 offset = c;
3405 /* In PCC layout named bit-fields influence the alignment
3406 of the containing struct using the base types alignment,
3407 except for packed fields (which here have correct
3408 align/typealign). */
3409 if ((f->v & SYM_FIRST_ANOM))
3410 align = 1;
3411 } else {
3412 bt = f->type.t & VT_BTYPE;
3413 if ((bit_pos + bit_size > size * 8) ||
3414 (bit_size > 0) == (bt != prevbt)) {
3415 c = (c + typealign - 1) & -typealign;
3416 offset = c;
3417 bit_pos = 0;
3418 /* In MS bitfield mode a bit-field run always uses
3419 at least as many bits as the underlying type.
3420 To start a new run it's also required that this
3421 or the last bit-field had non-zero width. */
3422 if (bit_size || prev_bit_size)
3423 c += size;
3425 /* In MS layout the records alignment is normally
3426 influenced by the field, except for a zero-width
3427 field at the start of a run (but by further zero-width
3428 fields it is again). */
3429 if (bit_size == 0 && prevbt != bt)
3430 align = 1;
3431 prevbt = bt;
3432 prev_bit_size = bit_size;
3434 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3435 | (bit_pos << VT_STRUCT_SHIFT);
3436 bit_pos += bit_size;
3437 if (pcc && bit_pos >= size * 8) {
3438 c += size;
3439 bit_pos -= size * 8;
3442 if (align > maxalign)
3443 maxalign = align;
3444 #if 0
3445 printf("set field %s offset=%d c=%d",
3446 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3447 if (f->type.t & VT_BITFIELD) {
3448 printf(" pos=%d size=%d",
3449 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3450 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3452 printf("\n");
3453 #endif
3455 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3456 Sym *ass;
3457 /* An anonymous struct/union. Adjust member offsets
3458 to reflect the real offset of our containing struct.
3459 Also set the offset of this anon member inside
3460 the outer struct to be zero. Via this it
3461 works when accessing the field offset directly
3462 (from base object), as well as when recursing
3463 members in initializer handling. */
3464 int v2 = f->type.ref->v;
3465 if (!(v2 & SYM_FIELD) &&
3466 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3467 Sym **pps;
3468 /* This happens only with MS extensions. The
3469 anon member has a named struct type, so it
3470 potentially is shared with other references.
3471 We need to unshare members so we can modify
3472 them. */
3473 ass = f->type.ref;
3474 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3475 &f->type.ref->type, 0,
3476 f->type.ref->c);
3477 pps = &f->type.ref->next;
3478 while ((ass = ass->next) != NULL) {
3479 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3480 pps = &((*pps)->next);
3482 *pps = NULL;
3484 struct_add_offset(f->type.ref, offset);
3485 f->c = 0;
3486 } else {
3487 f->c = offset;
3490 f->r = 0;
3492 /* store size and alignment */
3493 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3494 + maxalign - 1) & -maxalign;
3495 type->ref->r = maxalign;
3498 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3499 static void struct_decl(CType *type, AttributeDef *ad, int u)
3501 int a, v, size, align, flexible, alignoverride;
3502 long c;
3503 int bit_size, bsize, bt;
3504 Sym *s, *ss, **ps;
3505 AttributeDef ad1;
3506 CType type1, btype;
3508 a = tok; /* save decl type */
3509 next();
3510 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3511 parse_attribute(ad);
3512 if (tok != '{') {
3513 v = tok;
3514 next();
3515 /* struct already defined ? return it */
3516 if (v < TOK_IDENT)
3517 expect("struct/union/enum name");
3518 s = struct_find(v);
3519 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3520 if (s->type.t != a)
3521 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3522 goto do_decl;
3524 } else {
3525 v = anon_sym++;
3527 /* Record the original enum/struct/union token. */
3528 type1.t = a;
3529 type1.ref = NULL;
3530 /* we put an undefined size for struct/union */
3531 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3532 s->r = 0; /* default alignment is zero as gcc */
3533 /* put struct/union/enum name in type */
3534 do_decl:
3535 type->t = u;
3536 type->ref = s;
3538 if (tok == '{') {
3539 next();
3540 if (s->c != -1)
3541 tcc_error("struct/union/enum already defined");
3542 /* cannot be empty */
3543 c = 0;
3544 /* non empty enums are not allowed */
3545 if (a == TOK_ENUM) {
3546 int seen_neg = 0;
3547 int seen_wide = 0;
3548 for(;;) {
3549 CType *t = &int_type;
3550 v = tok;
3551 if (v < TOK_UIDENT)
3552 expect("identifier");
3553 ss = sym_find(v);
3554 if (ss && !local_stack)
3555 tcc_error("redefinition of enumerator '%s'",
3556 get_tok_str(v, NULL));
3557 next();
3558 if (tok == '=') {
3559 next();
3560 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3561 c = expr_const64();
3562 #else
3563 /* We really want to support long long enums
3564 on i386 as well, but the Sym structure only
3565 holds a 'long' for associated constants,
3566 and enlarging it would bump its size (no
3567 available padding). So punt for now. */
3568 c = expr_const();
3569 #endif
3571 if (c < 0)
3572 seen_neg = 1;
3573 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3574 seen_wide = 1, t = &size_type;
3575 /* enum symbols have static storage */
3576 ss = sym_push(v, t, VT_CONST, c);
3577 ss->type.t |= VT_STATIC;
3578 if (tok != ',')
3579 break;
3580 next();
3581 c++;
3582 /* NOTE: we accept a trailing comma */
3583 if (tok == '}')
3584 break;
3586 if (!seen_neg)
3587 s->a.unsigned_enum = 1;
3588 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3589 skip('}');
3590 } else {
3591 ps = &s->next;
3592 flexible = 0;
3593 while (tok != '}') {
3594 if (!parse_btype(&btype, &ad1)) {
3595 skip(';');
3596 continue;
3598 while (1) {
3599 if (flexible)
3600 tcc_error("flexible array member '%s' not at the end of struct",
3601 get_tok_str(v, NULL));
3602 bit_size = -1;
3603 v = 0;
3604 type1 = btype;
3605 if (tok != ':') {
3606 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3607 if (v == 0) {
3608 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3609 expect("identifier");
3610 else {
3611 int v = btype.ref->v;
3612 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3613 if (tcc_state->ms_extensions == 0)
3614 expect("identifier");
3618 if (type_size(&type1, &align) < 0) {
3619 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3620 flexible = 1;
3621 else
3622 tcc_error("field '%s' has incomplete type",
3623 get_tok_str(v, NULL));
3625 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3626 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3627 tcc_error("invalid type for '%s'",
3628 get_tok_str(v, NULL));
3630 if (tok == ':') {
3631 next();
3632 bit_size = expr_const();
3633 /* XXX: handle v = 0 case for messages */
3634 if (bit_size < 0)
3635 tcc_error("negative width in bit-field '%s'",
3636 get_tok_str(v, NULL));
3637 if (v && bit_size == 0)
3638 tcc_error("zero width for bit-field '%s'",
3639 get_tok_str(v, NULL));
3640 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3641 parse_attribute(&ad1);
3643 size = type_size(&type1, &align);
3644 /* Only remember non-default alignment. */
3645 alignoverride = 0;
3646 if (ad1.a.aligned) {
3647 int speca = 1 << (ad1.a.aligned - 1);
3648 alignoverride = speca;
3649 } else if (ad1.a.packed || ad->a.packed) {
3650 alignoverride = 1;
3651 } else if (*tcc_state->pack_stack_ptr) {
3652 if (align > *tcc_state->pack_stack_ptr)
3653 alignoverride = *tcc_state->pack_stack_ptr;
3655 if (bit_size >= 0) {
3656 bt = type1.t & VT_BTYPE;
3657 if (bt != VT_INT &&
3658 bt != VT_BYTE &&
3659 bt != VT_SHORT &&
3660 bt != VT_BOOL &&
3661 bt != VT_ENUM &&
3662 bt != VT_LLONG)
3663 tcc_error("bitfields must have scalar type");
3664 bsize = size * 8;
3665 if (bit_size > bsize) {
3666 tcc_error("width of '%s' exceeds its type",
3667 get_tok_str(v, NULL));
3668 } else if (bit_size == bsize) {
3669 /* no need for bit fields */
3671 } else {
3672 type1.t |= VT_BITFIELD |
3673 (0 << VT_STRUCT_SHIFT) |
3674 (bit_size << (VT_STRUCT_SHIFT + 6));
3677 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3678 /* Remember we've seen a real field to check
3679 for placement of flexible array member. */
3680 c = 1;
3682 /* If member is a struct or bit-field, enforce
3683 placing into the struct (as anonymous). */
3684 if (v == 0 &&
3685 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3686 bit_size >= 0)) {
3687 v = anon_sym++;
3689 if (v) {
3690 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3691 *ps = ss;
3692 ps = &ss->next;
3694 if (tok == ';' || tok == TOK_EOF)
3695 break;
3696 skip(',');
3698 skip(';');
3700 skip('}');
3701 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3702 parse_attribute(ad);
3703 struct_layout(type, ad);
3708 /* return 1 if basic type is a type size (short, long, long long) */
3709 ST_FUNC int is_btype_size(int bt)
3711 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3714 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3715 are added to the element type, copied because it could be a typedef. */
3716 static void parse_btype_qualify(CType *type, int qualifiers)
3718 while (type->t & VT_ARRAY) {
3719 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3720 type = &type->ref->type;
3722 type->t |= qualifiers;
3725 /* return 0 if no type declaration. otherwise, return the basic type
3726 and skip it.
3728 static int parse_btype(CType *type, AttributeDef *ad)
3730 int t, u, bt_size, complete, type_found, typespec_found, g;
3731 Sym *s;
3732 CType type1;
3734 memset(ad, 0, sizeof(AttributeDef));
3735 complete = 0;
3736 type_found = 0;
3737 typespec_found = 0;
3738 t = 0;
3739 while(1) {
3740 switch(tok) {
3741 case TOK_EXTENSION:
3742 /* currently, we really ignore extension */
3743 next();
3744 continue;
3746 /* basic types */
3747 case TOK_CHAR:
3748 u = VT_BYTE;
3749 basic_type:
3750 next();
3751 basic_type1:
3752 if (complete)
3753 tcc_error("too many basic types");
3754 t |= u;
3755 bt_size = is_btype_size (u & VT_BTYPE);
3756 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3757 complete = 1;
3758 typespec_found = 1;
3759 break;
3760 case TOK_VOID:
3761 u = VT_VOID;
3762 goto basic_type;
3763 case TOK_SHORT:
3764 u = VT_SHORT;
3765 goto basic_type;
3766 case TOK_INT:
3767 u = VT_INT;
3768 goto basic_type;
3769 case TOK_LONG:
3770 next();
3771 if ((t & VT_BTYPE) == VT_DOUBLE) {
3772 #ifndef TCC_TARGET_PE
3773 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3774 #endif
3775 } else if ((t & VT_BTYPE) == VT_LONG) {
3776 t = (t & ~VT_BTYPE) | VT_LLONG;
3777 } else {
3778 u = VT_LONG;
3779 goto basic_type1;
3781 break;
3782 #ifdef TCC_TARGET_ARM64
3783 case TOK_UINT128:
3784 /* GCC's __uint128_t appears in some Linux header files. Make it a
3785 synonym for long double to get the size and alignment right. */
3786 u = VT_LDOUBLE;
3787 goto basic_type;
3788 #endif
3789 case TOK_BOOL:
3790 u = VT_BOOL;
3791 goto basic_type;
3792 case TOK_FLOAT:
3793 u = VT_FLOAT;
3794 goto basic_type;
3795 case TOK_DOUBLE:
3796 next();
3797 if ((t & VT_BTYPE) == VT_LONG) {
3798 #ifdef TCC_TARGET_PE
3799 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3800 #else
3801 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3802 #endif
3803 } else {
3804 u = VT_DOUBLE;
3805 goto basic_type1;
3807 break;
3808 case TOK_ENUM:
3809 struct_decl(&type1, ad, VT_ENUM);
3810 basic_type2:
3811 u = type1.t;
3812 type->ref = type1.ref;
3813 goto basic_type1;
3814 case TOK_STRUCT:
3815 case TOK_UNION:
3816 struct_decl(&type1, ad, VT_STRUCT);
3817 goto basic_type2;
3819 /* type modifiers */
3820 case TOK_CONST1:
3821 case TOK_CONST2:
3822 case TOK_CONST3:
3823 type->t = t;
3824 parse_btype_qualify(type, VT_CONSTANT);
3825 t = type->t;
3826 next();
3827 break;
3828 case TOK_VOLATILE1:
3829 case TOK_VOLATILE2:
3830 case TOK_VOLATILE3:
3831 type->t = t;
3832 parse_btype_qualify(type, VT_VOLATILE);
3833 t = type->t;
3834 next();
3835 break;
3836 case TOK_SIGNED1:
3837 case TOK_SIGNED2:
3838 case TOK_SIGNED3:
3839 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3840 tcc_error("signed and unsigned modifier");
3841 typespec_found = 1;
3842 t |= VT_DEFSIGN;
3843 next();
3844 break;
3845 case TOK_REGISTER:
3846 case TOK_AUTO:
3847 case TOK_RESTRICT1:
3848 case TOK_RESTRICT2:
3849 case TOK_RESTRICT3:
3850 next();
3851 break;
3852 case TOK_UNSIGNED:
3853 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3854 tcc_error("signed and unsigned modifier");
3855 t |= VT_DEFSIGN | VT_UNSIGNED;
3856 next();
3857 typespec_found = 1;
3858 break;
3860 /* storage */
3861 case TOK_EXTERN:
3862 g = VT_EXTERN;
3863 goto storage;
3864 case TOK_STATIC:
3865 g = VT_STATIC;
3866 goto storage;
3867 case TOK_TYPEDEF:
3868 g = VT_TYPEDEF;
3869 goto storage;
3870 storage:
3871 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3872 tcc_error("multiple storage classes");
3873 t |= g;
3874 next();
3875 break;
3876 case TOK_INLINE1:
3877 case TOK_INLINE2:
3878 case TOK_INLINE3:
3879 t |= VT_INLINE;
3880 next();
3881 break;
3883 /* GNUC attribute */
3884 case TOK_ATTRIBUTE1:
3885 case TOK_ATTRIBUTE2:
3886 parse_attribute(ad);
3887 if (ad->a.mode) {
3888 u = ad->a.mode -1;
3889 t = (t & ~VT_BTYPE) | u;
3891 break;
3892 /* GNUC typeof */
3893 case TOK_TYPEOF1:
3894 case TOK_TYPEOF2:
3895 case TOK_TYPEOF3:
3896 next();
3897 parse_expr_type(&type1);
3898 /* remove all storage modifiers except typedef */
3899 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3900 goto basic_type2;
3901 default:
3902 if (typespec_found)
3903 goto the_end;
3904 s = sym_find(tok);
3905 if (!s || !(s->type.t & VT_TYPEDEF))
3906 goto the_end;
3908 type->t = ((s->type.t & ~VT_TYPEDEF) |
3909 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3910 type->ref = s->type.ref;
3911 if (t & (VT_CONSTANT | VT_VOLATILE))
3912 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3913 t = type->t;
3915 if (s->r) {
3916 /* get attributes from typedef */
3917 if (0 == ad->a.aligned)
3918 ad->a.aligned = s->a.aligned;
3919 if (0 == ad->a.func_call)
3920 ad->a.func_call = s->a.func_call;
3921 ad->a.packed |= s->a.packed;
3923 next();
3924 typespec_found = 1;
3925 break;
3927 type_found = 1;
3929 the_end:
3930 if (tcc_state->char_is_unsigned) {
3931 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3932 t |= VT_UNSIGNED;
3935 /* long is never used as type */
3936 if ((t & VT_BTYPE) == VT_LONG)
3937 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3938 defined TCC_TARGET_PE
3939 t = (t & ~VT_BTYPE) | VT_INT;
3940 #else
3941 t = (t & ~VT_BTYPE) | VT_LLONG;
3942 #endif
3943 type->t = t;
3944 return type_found;
3947 /* convert a function parameter type (array to pointer and function to
3948 function pointer) */
3949 static inline void convert_parameter_type(CType *pt)
3951 /* remove const and volatile qualifiers (XXX: const could be used
3952 to indicate a const function parameter */
3953 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3954 /* array must be transformed to pointer according to ANSI C */
3955 pt->t &= ~VT_ARRAY;
3956 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3957 mk_pointer(pt);
3961 ST_FUNC void parse_asm_str(CString *astr)
3963 skip('(');
3964 parse_mult_str(astr, "string constant");
3967 /* Parse an asm label and return the token */
3968 static int asm_label_instr(void)
3970 int v;
3971 CString astr;
3973 next();
3974 parse_asm_str(&astr);
3975 skip(')');
3976 #ifdef ASM_DEBUG
3977 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3978 #endif
3979 v = tok_alloc(astr.data, astr.size - 1)->tok;
3980 cstr_free(&astr);
3981 return v;
3984 static void post_type(CType *type, AttributeDef *ad, int storage)
3986 int n, l, t1, arg_size, align;
3987 Sym **plast, *s, *first;
3988 AttributeDef ad1;
3989 CType pt;
3991 if (tok == '(') {
3992 /* function declaration */
3993 next();
3994 l = 0;
3995 first = NULL;
3996 plast = &first;
3997 arg_size = 0;
3998 if (tok != ')') {
3999 for(;;) {
4000 /* read param name and compute offset */
4001 if (l != FUNC_OLD) {
4002 if (!parse_btype(&pt, &ad1)) {
4003 if (l) {
4004 tcc_error("invalid type");
4005 } else {
4006 l = FUNC_OLD;
4007 goto old_proto;
4010 l = FUNC_NEW;
4011 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4012 break;
4013 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4014 if ((pt.t & VT_BTYPE) == VT_VOID)
4015 tcc_error("parameter declared as void");
4016 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4017 } else {
4018 old_proto:
4019 n = tok;
4020 if (n < TOK_UIDENT)
4021 expect("identifier");
4022 pt.t = VT_INT;
4023 next();
4025 convert_parameter_type(&pt);
4026 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4027 *plast = s;
4028 plast = &s->next;
4029 if (tok == ')')
4030 break;
4031 skip(',');
4032 if (l == FUNC_NEW && tok == TOK_DOTS) {
4033 l = FUNC_ELLIPSIS;
4034 next();
4035 break;
4039 /* if no parameters, then old type prototype */
4040 if (l == 0)
4041 l = FUNC_OLD;
4042 skip(')');
4043 /* NOTE: const is ignored in returned type as it has a special
4044 meaning in gcc / C++ */
4045 type->t &= ~VT_CONSTANT;
4046 /* some ancient pre-K&R C allows a function to return an array
4047 and the array brackets to be put after the arguments, such
4048 that "int c()[]" means something like "int[] c()" */
4049 if (tok == '[') {
4050 next();
4051 skip(']'); /* only handle simple "[]" */
4052 type->t |= VT_PTR;
4054 /* we push a anonymous symbol which will contain the function prototype */
4055 ad->a.func_args = arg_size;
4056 s = sym_push(SYM_FIELD, type, 0, l);
4057 s->a = ad->a;
4058 s->next = first;
4059 type->t = VT_FUNC;
4060 type->ref = s;
4061 } else if (tok == '[') {
4062 int saved_nocode_wanted = nocode_wanted;
4063 /* array definition */
4064 next();
4065 if (tok == TOK_RESTRICT1)
4066 next();
4067 n = -1;
4068 t1 = 0;
4069 if (tok != ']') {
4070 if (!local_stack || (storage & VT_STATIC))
4071 vpushi(expr_const());
4072 else {
4073 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4074 length must always be evaluated, even under nocode_wanted,
4075 so that its size slot is initialized (e.g. under sizeof
4076 or typeof). */
4077 nocode_wanted = 0;
4078 gexpr();
4080 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4081 n = vtop->c.i;
4082 if (n < 0)
4083 tcc_error("invalid array size");
4084 } else {
4085 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4086 tcc_error("size of variable length array should be an integer");
4087 t1 = VT_VLA;
4090 skip(']');
4091 /* parse next post type */
4092 post_type(type, ad, storage);
4093 if (type->t == VT_FUNC)
4094 tcc_error("declaration of an array of functions");
4095 t1 |= type->t & VT_VLA;
4097 if (t1 & VT_VLA) {
4098 loc -= type_size(&int_type, &align);
4099 loc &= -align;
4100 n = loc;
4102 vla_runtime_type_size(type, &align);
4103 gen_op('*');
4104 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4105 vswap();
4106 vstore();
4108 if (n != -1)
4109 vpop();
4110 nocode_wanted = saved_nocode_wanted;
4112 /* we push an anonymous symbol which will contain the array
4113 element type */
4114 s = sym_push(SYM_FIELD, type, 0, n);
4115 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4116 type->ref = s;
4120 /* Parse a type declaration (except basic type), and return the type
4121 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4122 expected. 'type' should contain the basic type. 'ad' is the
4123 attribute definition of the basic type. It can be modified by
4124 type_decl().
4126 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4128 Sym *s;
4129 CType type1, *type2;
4130 int qualifiers, storage;
4132 while (tok == '*') {
4133 qualifiers = 0;
4134 redo:
4135 next();
4136 switch(tok) {
4137 case TOK_CONST1:
4138 case TOK_CONST2:
4139 case TOK_CONST3:
4140 qualifiers |= VT_CONSTANT;
4141 goto redo;
4142 case TOK_VOLATILE1:
4143 case TOK_VOLATILE2:
4144 case TOK_VOLATILE3:
4145 qualifiers |= VT_VOLATILE;
4146 goto redo;
4147 case TOK_RESTRICT1:
4148 case TOK_RESTRICT2:
4149 case TOK_RESTRICT3:
4150 goto redo;
4151 /* XXX: clarify attribute handling */
4152 case TOK_ATTRIBUTE1:
4153 case TOK_ATTRIBUTE2:
4154 parse_attribute(ad);
4155 break;
4157 mk_pointer(type);
4158 type->t |= qualifiers;
4161 /* recursive type */
4162 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4163 type1.t = 0; /* XXX: same as int */
4164 if (tok == '(') {
4165 next();
4166 /* XXX: this is not correct to modify 'ad' at this point, but
4167 the syntax is not clear */
4168 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4169 parse_attribute(ad);
4170 type_decl(&type1, ad, v, td);
4171 skip(')');
4172 } else {
4173 /* type identifier */
4174 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4175 *v = tok;
4176 next();
4177 } else {
4178 if (!(td & TYPE_ABSTRACT))
4179 expect("identifier");
4180 *v = 0;
4183 storage = type->t & VT_STORAGE;
4184 type->t &= ~VT_STORAGE;
4185 post_type(type, ad, storage);
4186 type->t |= storage;
4187 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4188 parse_attribute(ad);
4190 if (!type1.t)
4191 return;
4192 /* append type at the end of type1 */
4193 type2 = &type1;
4194 for(;;) {
4195 s = type2->ref;
4196 type2 = &s->type;
4197 if (!type2->t) {
4198 *type2 = *type;
4199 break;
4202 *type = type1;
4203 type->t |= storage;
4206 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4207 ST_FUNC int lvalue_type(int t)
4209 int bt, r;
4210 r = VT_LVAL;
4211 bt = t & VT_BTYPE;
4212 if (bt == VT_BYTE || bt == VT_BOOL)
4213 r |= VT_LVAL_BYTE;
4214 else if (bt == VT_SHORT)
4215 r |= VT_LVAL_SHORT;
4216 else
4217 return r;
4218 if (t & VT_UNSIGNED)
4219 r |= VT_LVAL_UNSIGNED;
4220 return r;
4223 /* indirection with full error checking and bound check */
4224 ST_FUNC void indir(void)
4226 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4227 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4228 return;
4229 expect("pointer");
4231 if (vtop->r & VT_LVAL)
4232 gv(RC_INT);
4233 vtop->type = *pointed_type(&vtop->type);
4234 /* Arrays and functions are never lvalues */
4235 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4236 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4237 vtop->r |= lvalue_type(vtop->type.t);
4238 /* if bound checking, the referenced pointer must be checked */
4239 #ifdef CONFIG_TCC_BCHECK
4240 if (tcc_state->do_bounds_check)
4241 vtop->r |= VT_MUSTBOUND;
4242 #endif
4246 /* pass a parameter to a function and do type checking and casting */
4247 static void gfunc_param_typed(Sym *func, Sym *arg)
4249 int func_type;
4250 CType type;
4252 func_type = func->c;
4253 if (func_type == FUNC_OLD ||
4254 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4255 /* default casting : only need to convert float to double */
4256 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4257 type.t = VT_DOUBLE;
4258 gen_cast(&type);
4259 } else if (vtop->type.t & VT_BITFIELD) {
4260 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4261 type.ref = vtop->type.ref;
4262 gen_cast(&type);
4264 } else if (arg == NULL) {
4265 tcc_error("too many arguments to function");
4266 } else {
4267 type = arg->type;
4268 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4269 gen_assign_cast(&type);
4273 /* parse an expression of the form '(type)' or '(expr)' and return its
4274 type */
4275 static void parse_expr_type(CType *type)
4277 int n;
4278 AttributeDef ad;
4280 skip('(');
4281 if (parse_btype(type, &ad)) {
4282 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4283 } else {
4284 expr_type(type);
4286 skip(')');
4289 static void parse_type(CType *type)
4291 AttributeDef ad;
4292 int n;
4294 if (!parse_btype(type, &ad)) {
4295 expect("type");
4297 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4300 static void vpush_tokc(int t)
4302 CType type;
4303 type.t = t;
4304 type.ref = 0;
4305 vsetc(&type, VT_CONST, &tokc);
4308 ST_FUNC void unary(void)
4310 int n, t, align, size, r, sizeof_caller;
4311 CType type;
4312 Sym *s;
4313 AttributeDef ad;
4315 sizeof_caller = in_sizeof;
4316 in_sizeof = 0;
4317 /* XXX: GCC 2.95.3 does not generate a table although it should be
4318 better here */
4319 tok_next:
4320 switch(tok) {
4321 case TOK_EXTENSION:
4322 next();
4323 goto tok_next;
4324 case TOK_CINT:
4325 case TOK_CCHAR:
4326 case TOK_LCHAR:
4327 vpushi(tokc.i);
4328 next();
4329 break;
4330 case TOK_CUINT:
4331 vpush_tokc(VT_INT | VT_UNSIGNED);
4332 next();
4333 break;
4334 case TOK_CLLONG:
4335 vpush_tokc(VT_LLONG);
4336 next();
4337 break;
4338 case TOK_CULLONG:
4339 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4340 next();
4341 break;
4342 case TOK_CFLOAT:
4343 vpush_tokc(VT_FLOAT);
4344 next();
4345 break;
4346 case TOK_CDOUBLE:
4347 vpush_tokc(VT_DOUBLE);
4348 next();
4349 break;
4350 case TOK_CLDOUBLE:
4351 vpush_tokc(VT_LDOUBLE);
4352 next();
4353 break;
4354 case TOK___FUNCTION__:
4355 if (!gnu_ext)
4356 goto tok_identifier;
4357 /* fall thru */
4358 case TOK___FUNC__:
4360 void *ptr;
4361 int len;
4362 /* special function name identifier */
4363 len = strlen(funcname) + 1;
4364 /* generate char[len] type */
4365 type.t = VT_BYTE;
4366 mk_pointer(&type);
4367 type.t |= VT_ARRAY;
4368 type.ref->c = len;
4369 vpush_ref(&type, data_section, data_section->data_offset, len);
4370 ptr = section_ptr_add(data_section, len);
4371 memcpy(ptr, funcname, len);
4372 next();
4374 break;
4375 case TOK_LSTR:
4376 #ifdef TCC_TARGET_PE
4377 t = VT_SHORT | VT_UNSIGNED;
4378 #else
4379 t = VT_INT;
4380 #endif
4381 goto str_init;
4382 case TOK_STR:
4383 /* string parsing */
4384 t = VT_BYTE;
4385 str_init:
4386 if (tcc_state->warn_write_strings)
4387 t |= VT_CONSTANT;
4388 type.t = t;
4389 mk_pointer(&type);
4390 type.t |= VT_ARRAY;
4391 memset(&ad, 0, sizeof(AttributeDef));
4392 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4393 break;
4394 case '(':
4395 next();
4396 /* cast ? */
4397 if (parse_btype(&type, &ad)) {
4398 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4399 skip(')');
4400 /* check ISOC99 compound literal */
4401 if (tok == '{') {
4402 /* data is allocated locally by default */
4403 if (global_expr)
4404 r = VT_CONST;
4405 else
4406 r = VT_LOCAL;
4407 /* all except arrays are lvalues */
4408 if (!(type.t & VT_ARRAY))
4409 r |= lvalue_type(type.t);
4410 memset(&ad, 0, sizeof(AttributeDef));
4411 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4412 } else {
4413 if (sizeof_caller) {
4414 vpush(&type);
4415 return;
4417 unary();
4418 gen_cast(&type);
4420 } else if (tok == '{') {
4421 int saved_nocode_wanted = nocode_wanted;
4422 if (const_wanted)
4423 tcc_error("expected constant");
4424 /* save all registers */
4425 save_regs(0);
4426 /* statement expression : we do not accept break/continue
4427 inside as GCC does. We do retain the nocode_wanted state,
4428 as statement expressions can't ever be entered from the
4429 outside, so any reactivation of code emission (from labels
4430 or loop heads) can be disabled again after the end of it. */
4431 block(NULL, NULL, 1);
4432 nocode_wanted = saved_nocode_wanted;
4433 skip(')');
4434 } else {
4435 gexpr();
4436 skip(')');
4438 break;
4439 case '*':
4440 next();
4441 unary();
4442 indir();
4443 break;
4444 case '&':
4445 next();
4446 unary();
4447 /* functions names must be treated as function pointers,
4448 except for unary '&' and sizeof. Since we consider that
4449 functions are not lvalues, we only have to handle it
4450 there and in function calls. */
4451 /* arrays can also be used although they are not lvalues */
4452 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4453 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4454 test_lvalue();
4455 mk_pointer(&vtop->type);
4456 gaddrof();
4457 break;
4458 case '!':
4459 next();
4460 unary();
4461 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4462 CType boolean;
4463 boolean.t = VT_BOOL;
4464 gen_cast(&boolean);
4465 vtop->c.i = !vtop->c.i;
4466 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4467 vtop->c.i ^= 1;
4468 else {
4469 save_regs(1);
4470 vseti(VT_JMP, gvtst(1, 0));
4472 break;
4473 case '~':
4474 next();
4475 unary();
4476 vpushi(-1);
4477 gen_op('^');
4478 break;
4479 case '+':
4480 next();
4481 unary();
4482 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4483 tcc_error("pointer not accepted for unary plus");
4484 /* In order to force cast, we add zero, except for floating point
4485 where we really need an noop (otherwise -0.0 will be transformed
4486 into +0.0). */
4487 if (!is_float(vtop->type.t)) {
4488 vpushi(0);
4489 gen_op('+');
4491 break;
4492 case TOK_SIZEOF:
4493 case TOK_ALIGNOF1:
4494 case TOK_ALIGNOF2:
4495 t = tok;
4496 next();
4497 in_sizeof++;
4498 unary_type(&type); // Perform a in_sizeof = 0;
4499 size = type_size(&type, &align);
4500 if (t == TOK_SIZEOF) {
4501 if (!(type.t & VT_VLA)) {
4502 if (size < 0)
4503 tcc_error("sizeof applied to an incomplete type");
4504 vpushs(size);
4505 } else {
4506 vla_runtime_type_size(&type, &align);
4508 } else {
4509 vpushs(align);
4511 vtop->type.t |= VT_UNSIGNED;
4512 break;
4514 case TOK_builtin_expect:
4516 /* __builtin_expect is a no-op for now */
4517 next();
4518 skip('(');
4519 expr_eq();
4520 skip(',');
4521 nocode_wanted++;
4522 expr_lor_const();
4523 vpop();
4524 nocode_wanted--;
4525 skip(')');
4527 break;
4528 case TOK_builtin_types_compatible_p:
4530 CType type1, type2;
4531 next();
4532 skip('(');
4533 parse_type(&type1);
4534 skip(',');
4535 parse_type(&type2);
4536 skip(')');
4537 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4538 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4539 vpushi(is_compatible_types(&type1, &type2));
4541 break;
4542 case TOK_builtin_choose_expr:
4544 int64_t c;
4545 next();
4546 skip('(');
4547 c = expr_const64();
4548 skip(',');
4549 if (!c) {
4550 nocode_wanted++;
4552 expr_eq();
4553 if (!c) {
4554 vpop();
4555 nocode_wanted--;
4557 skip(',');
4558 if (c) {
4559 nocode_wanted++;
4561 expr_eq();
4562 if (c) {
4563 vpop();
4564 nocode_wanted--;
4566 skip(')');
4568 break;
4569 case TOK_builtin_constant_p:
4571 int res;
4572 next();
4573 skip('(');
4574 nocode_wanted++;
4575 gexpr();
4576 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4577 vpop();
4578 nocode_wanted--;
4579 skip(')');
4580 vpushi(res);
4582 break;
4583 case TOK_builtin_frame_address:
4584 case TOK_builtin_return_address:
4586 int tok1 = tok;
4587 int level;
4588 CType type;
4589 next();
4590 skip('(');
4591 if (tok != TOK_CINT) {
4592 tcc_error("%s only takes positive integers",
4593 tok1 == TOK_builtin_return_address ?
4594 "__builtin_return_address" :
4595 "__builtin_frame_address");
4597 level = (uint32_t)tokc.i;
4598 next();
4599 skip(')');
4600 type.t = VT_VOID;
4601 mk_pointer(&type);
4602 vset(&type, VT_LOCAL, 0); /* local frame */
4603 while (level--) {
4604 mk_pointer(&vtop->type);
4605 indir(); /* -> parent frame */
4607 if (tok1 == TOK_builtin_return_address) {
4608 // assume return address is just above frame pointer on stack
4609 vpushi(PTR_SIZE);
4610 gen_op('+');
4611 mk_pointer(&vtop->type);
4612 indir();
4615 break;
4616 #ifdef TCC_TARGET_X86_64
4617 #ifdef TCC_TARGET_PE
4618 case TOK_builtin_va_start:
4620 next();
4621 skip('(');
4622 expr_eq();
4623 skip(',');
4624 expr_eq();
4625 skip(')');
4626 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4627 tcc_error("__builtin_va_start expects a local variable");
4628 vtop->r &= ~(VT_LVAL | VT_REF);
4629 vtop->type = char_pointer_type;
4630 vtop->c.i += 8;
4631 vstore();
4633 break;
4634 #else
4635 case TOK_builtin_va_arg_types:
4637 CType type;
4638 next();
4639 skip('(');
4640 parse_type(&type);
4641 skip(')');
4642 vpushi(classify_x86_64_va_arg(&type));
4644 break;
4645 #endif
4646 #endif
4648 #ifdef TCC_TARGET_ARM64
4649 case TOK___va_start: {
4650 next();
4651 skip('(');
4652 expr_eq();
4653 skip(',');
4654 expr_eq();
4655 skip(')');
4656 //xx check types
4657 gen_va_start();
4658 vpushi(0);
4659 vtop->type.t = VT_VOID;
4660 break;
4662 case TOK___va_arg: {
4663 CType type;
4664 next();
4665 skip('(');
4666 expr_eq();
4667 skip(',');
4668 parse_type(&type);
4669 skip(')');
4670 //xx check types
4671 gen_va_arg(&type);
4672 vtop->type = type;
4673 break;
4675 case TOK___arm64_clear_cache: {
4676 next();
4677 skip('(');
4678 expr_eq();
4679 skip(',');
4680 expr_eq();
4681 skip(')');
4682 gen_clear_cache();
4683 vpushi(0);
4684 vtop->type.t = VT_VOID;
4685 break;
4687 #endif
4688 /* pre operations */
4689 case TOK_INC:
4690 case TOK_DEC:
4691 t = tok;
4692 next();
4693 unary();
4694 inc(0, t);
4695 break;
4696 case '-':
4697 next();
4698 unary();
4699 t = vtop->type.t & VT_BTYPE;
4700 if (is_float(t)) {
4701 /* In IEEE negate(x) isn't subtract(0,x), but rather
4702 subtract(-0, x). */
4703 vpush(&vtop->type);
4704 if (t == VT_FLOAT)
4705 vtop->c.f = -1.0 * 0.0;
4706 else if (t == VT_DOUBLE)
4707 vtop->c.d = -1.0 * 0.0;
4708 else