Even on Darwin, tcc should display Linux
[tinycc.git] / tccgen.c
blob131dced5bd85c0d797c6509d4f654b95c8be9feb
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC void tcc_debug_start(TCCState *s1)
146 if (s1->do_debug) {
147 char buf[512];
149 /* file info: full path + filename */
150 section_sym = put_elf_sym(symtab_section, 0, 0,
151 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
152 text_section->sh_num, NULL);
153 getcwd(buf, sizeof(buf));
154 #ifdef _WIN32
155 normalize_slashes(buf);
156 #endif
157 pstrcat(buf, sizeof(buf), "/");
158 put_stabs_r(buf, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 put_stabs_r(file->filename, N_SO, 0, 0,
161 text_section->data_offset, text_section, section_sym);
162 last_ind = 0;
163 last_line_num = 0;
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
170 SHN_ABS, file->filename);
173 /* put end of translation unit info */
174 ST_FUNC void tcc_debug_end(TCCState *s1)
176 if (!s1->do_debug)
177 return;
178 put_stabs_r(NULL, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
183 /* generate line number info */
184 ST_FUNC void tcc_debug_line(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 if ((last_line_num != file->line_num || last_ind != ind)) {
189 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
190 last_ind = ind;
191 last_line_num = file->line_num;
195 /* put function symbol */
196 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
198 char buf[512];
200 if (!s1->do_debug)
201 return;
203 /* stabs info */
204 /* XXX: we put here a dummy type */
205 snprintf(buf, sizeof(buf), "%s:%c1",
206 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
207 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
208 cur_text_section, sym->c);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE, 0, file->line_num, 0);
212 last_ind = 0;
213 last_line_num = 0;
216 /* put function size */
217 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
219 if (!s1->do_debug)
220 return;
221 put_stabn(N_FUN, 0, 0, size);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC void tccgen_start(TCCState *s1)
227 cur_text_section = NULL;
228 funcname = "";
229 anon_sym = SYM_FIRST_ANOM;
230 section_sym = 0;
231 const_wanted = 0;
232 nocode_wanted = 1;
234 /* define some often used types */
235 int_type.t = VT_INT;
236 char_pointer_type.t = VT_BYTE;
237 mk_pointer(&char_pointer_type);
238 #if PTR_SIZE == 4
239 size_type.t = VT_INT;
240 #else
241 size_type.t = VT_LLONG;
242 #endif
243 func_old_type.t = VT_FUNC;
244 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
246 tcc_debug_start(s1);
248 #ifdef TCC_TARGET_ARM
249 arm_init(s1);
250 #endif
253 ST_FUNC void tccgen_end(TCCState *s1)
255 gen_inline_functions(s1);
256 check_vstack();
257 /* end of translation unit info */
258 tcc_debug_end(s1);
261 /* ------------------------------------------------------------------------- */
262 /* update sym->c so that it points to an external symbol in section
263 'section' with value 'value' */
265 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
266 addr_t value, unsigned long size,
267 int can_add_underscore)
269 int sym_type, sym_bind, sh_num, info, other;
270 ElfW(Sym) *esym;
271 const char *name;
272 char buf1[256];
274 #ifdef CONFIG_TCC_BCHECK
275 char buf[32];
276 #endif
278 if (section == NULL)
279 sh_num = SHN_UNDEF;
280 else if (section == SECTION_ABS)
281 sh_num = SHN_ABS;
282 else
283 sh_num = section->sh_num;
285 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
286 sym_type = STT_FUNC;
287 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
288 sym_type = STT_NOTYPE;
289 } else {
290 sym_type = STT_OBJECT;
293 if (sym->type.t & VT_STATIC)
294 sym_bind = STB_LOCAL;
295 else {
296 if (sym->type.t & VT_WEAK)
297 sym_bind = STB_WEAK;
298 else
299 sym_bind = STB_GLOBAL;
302 if (!sym->c) {
303 name = get_tok_str(sym->v, NULL);
304 #ifdef CONFIG_TCC_BCHECK
305 if (tcc_state->do_bounds_check) {
306 /* XXX: avoid doing that for statics ? */
307 /* if bound checking is activated, we change some function
308 names by adding the "__bound" prefix */
309 switch(sym->v) {
310 #ifdef TCC_TARGET_PE
311 /* XXX: we rely only on malloc hooks */
312 case TOK_malloc:
313 case TOK_free:
314 case TOK_realloc:
315 case TOK_memalign:
316 case TOK_calloc:
317 #endif
318 case TOK_memcpy:
319 case TOK_memmove:
320 case TOK_memset:
321 case TOK_strlen:
322 case TOK_strcpy:
323 case TOK_alloca:
324 strcpy(buf, "__bound_");
325 strcat(buf, name);
326 name = buf;
327 break;
330 #endif
331 other = 0;
333 #ifdef TCC_TARGET_PE
334 if (sym->type.t & VT_EXPORT)
335 other |= ST_PE_EXPORT;
336 if (sym_type == STT_FUNC && sym->type.ref) {
337 Sym *ref = sym->type.ref;
338 if (ref->a.func_export)
339 other |= ST_PE_EXPORT;
340 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
341 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
342 name = buf1;
343 other |= ST_PE_STDCALL;
344 can_add_underscore = 0;
346 } else {
347 if (find_elf_sym(tcc_state->dynsymtab_section, name))
348 other |= ST_PE_IMPORT;
349 if (sym->type.t & VT_IMPORT)
350 other |= ST_PE_IMPORT;
352 #else
353 if (! (sym->type.t & VT_STATIC))
354 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
355 #endif
356 if (tcc_state->leading_underscore && can_add_underscore) {
357 buf1[0] = '_';
358 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
359 name = buf1;
361 if (sym->asm_label) {
362 name = get_tok_str(sym->asm_label, NULL);
364 info = ELFW(ST_INFO)(sym_bind, sym_type);
365 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
366 } else {
367 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
368 esym->st_value = value;
369 esym->st_size = size;
370 esym->st_shndx = sh_num;
374 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
375 addr_t value, unsigned long size)
377 put_extern_sym2(sym, section, value, size, 1);
380 /* add a new relocation entry to symbol 'sym' in section 's' */
381 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
382 addr_t addend)
384 int c = 0;
386 if (nocode_wanted && s == cur_text_section)
387 return;
389 if (sym) {
390 if (0 == sym->c)
391 put_extern_sym(sym, NULL, 0, 0);
392 c = sym->c;
395 /* now we can add ELF relocation info */
396 put_elf_reloca(symtab_section, s, offset, type, c, addend);
399 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
401 greloca(s, sym, offset, type, 0);
404 /* ------------------------------------------------------------------------- */
405 /* symbol allocator */
406 static Sym *__sym_malloc(void)
408 Sym *sym_pool, *sym, *last_sym;
409 int i;
411 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
412 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
414 last_sym = sym_free_first;
415 sym = sym_pool;
416 for(i = 0; i < SYM_POOL_NB; i++) {
417 sym->next = last_sym;
418 last_sym = sym;
419 sym++;
421 sym_free_first = last_sym;
422 return last_sym;
425 static inline Sym *sym_malloc(void)
427 Sym *sym;
428 #ifndef SYM_DEBUG
429 sym = sym_free_first;
430 if (!sym)
431 sym = __sym_malloc();
432 sym_free_first = sym->next;
433 return sym;
434 #else
435 sym = tcc_malloc(sizeof(Sym));
436 return sym;
437 #endif
440 ST_INLN void sym_free(Sym *sym)
442 #ifndef SYM_DEBUG
443 sym->next = sym_free_first;
444 sym_free_first = sym;
445 #else
446 tcc_free(sym);
447 #endif
450 /* push, without hashing */
451 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
453 Sym *s;
455 s = sym_malloc();
456 s->scope = 0;
457 s->v = v;
458 s->type.t = t;
459 s->type.ref = NULL;
460 #ifdef _WIN64
461 s->d = NULL;
462 #endif
463 s->c = c;
464 s->next = NULL;
465 /* add in stack */
466 s->prev = *ps;
467 *ps = s;
468 return s;
471 /* find a symbol and return its associated structure. 's' is the top
472 of the symbol stack */
473 ST_FUNC Sym *sym_find2(Sym *s, int v)
475 while (s) {
476 if (s->v == v)
477 return s;
478 else if (s->v == -1)
479 return NULL;
480 s = s->prev;
482 return NULL;
485 /* structure lookup */
486 ST_INLN Sym *struct_find(int v)
488 v -= TOK_IDENT;
489 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
490 return NULL;
491 return table_ident[v]->sym_struct;
494 /* find an identifier */
495 ST_INLN Sym *sym_find(int v)
497 v -= TOK_IDENT;
498 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
499 return NULL;
500 return table_ident[v]->sym_identifier;
503 /* push a given symbol on the symbol stack */
504 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
506 Sym *s, **ps;
507 TokenSym *ts;
509 if (local_stack)
510 ps = &local_stack;
511 else
512 ps = &global_stack;
513 s = sym_push2(ps, v, type->t, c);
514 s->type.ref = type->ref;
515 s->r = r;
516 /* don't record fields or anonymous symbols */
517 /* XXX: simplify */
518 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
519 /* record symbol in token array */
520 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
521 if (v & SYM_STRUCT)
522 ps = &ts->sym_struct;
523 else
524 ps = &ts->sym_identifier;
525 s->prev_tok = *ps;
526 *ps = s;
527 s->scope = local_scope;
528 if (s->prev_tok && s->prev_tok->scope == s->scope)
529 tcc_error("redeclaration of '%s'",
530 get_tok_str(v & ~SYM_STRUCT, NULL));
532 return s;
535 /* push a global identifier */
536 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
538 Sym *s, **ps;
539 s = sym_push2(&global_stack, v, t, c);
540 /* don't record anonymous symbol */
541 if (v < SYM_FIRST_ANOM) {
542 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
543 /* modify the top most local identifier, so that
544 sym_identifier will point to 's' when popped */
545 while (*ps != NULL)
546 ps = &(*ps)->prev_tok;
547 s->prev_tok = NULL;
548 *ps = s;
550 return s;
553 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
554 pop them yet from the list, but do remove them from the token array. */
555 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
557 Sym *s, *ss, **ps;
558 TokenSym *ts;
559 int v;
561 s = *ptop;
562 while(s != b) {
563 ss = s->prev;
564 v = s->v;
565 /* remove symbol in token array */
566 /* XXX: simplify */
567 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
568 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
569 if (v & SYM_STRUCT)
570 ps = &ts->sym_struct;
571 else
572 ps = &ts->sym_identifier;
573 *ps = s->prev_tok;
575 if (!keep)
576 sym_free(s);
577 s = ss;
579 if (!keep)
580 *ptop = b;
583 static void weaken_symbol(Sym *sym)
585 sym->type.t |= VT_WEAK;
586 if (sym->c > 0) {
587 int esym_type;
588 ElfW(Sym) *esym;
590 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
591 esym_type = ELFW(ST_TYPE)(esym->st_info);
592 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
596 static void apply_visibility(Sym *sym, CType *type)
598 int vis = sym->type.t & VT_VIS_MASK;
599 int vis2 = type->t & VT_VIS_MASK;
600 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
601 vis = vis2;
602 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
604 else
605 vis = (vis < vis2) ? vis : vis2;
606 sym->type.t &= ~VT_VIS_MASK;
607 sym->type.t |= vis;
609 if (sym->c > 0) {
610 ElfW(Sym) *esym;
612 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
613 vis >>= VT_VIS_SHIFT;
614 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
618 /* ------------------------------------------------------------------------- */
620 static void vsetc(CType *type, int r, CValue *vc)
622 int v;
624 if (vtop >= vstack + (VSTACK_SIZE - 1))
625 tcc_error("memory full (vstack)");
626 /* cannot let cpu flags if other instruction are generated. Also
627 avoid leaving VT_JMP anywhere except on the top of the stack
628 because it would complicate the code generator.
630 Don't do this when nocode_wanted. vtop might come from
631 !nocode_wanted regions (see 88_codeopt.c) and transforming
632 it to a register without actually generating code is wrong
633 as their value might still be used for real. All values
634 we push under nocode_wanted will eventually be popped
635 again, so that the VT_CMP/VT_JMP value will be in vtop
636 when code is unsuppressed again.
638 Same logic below in vswap(); */
639 if (vtop >= vstack && !nocode_wanted) {
640 v = vtop->r & VT_VALMASK;
641 if (v == VT_CMP || (v & ~1) == VT_JMP)
642 gv(RC_INT);
645 vtop++;
646 vtop->type = *type;
647 vtop->r = r;
648 vtop->r2 = VT_CONST;
649 vtop->c = *vc;
650 vtop->sym = NULL;
653 ST_FUNC void vswap(void)
655 SValue tmp;
656 /* cannot vswap cpu flags. See comment at vsetc() above */
657 if (vtop >= vstack && !nocode_wanted) {
658 int v = vtop->r & VT_VALMASK;
659 if (v == VT_CMP || (v & ~1) == VT_JMP)
660 gv(RC_INT);
662 tmp = vtop[0];
663 vtop[0] = vtop[-1];
664 vtop[-1] = tmp;
667 /* pop stack value */
668 ST_FUNC void vpop(void)
670 int v;
671 v = vtop->r & VT_VALMASK;
672 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
673 /* for x86, we need to pop the FP stack */
674 if (v == TREG_ST0) {
675 o(0xd8dd); /* fstp %st(0) */
676 } else
677 #endif
678 if (v == VT_JMP || v == VT_JMPI) {
679 /* need to put correct jump if && or || without test */
680 gsym(vtop->c.i);
682 vtop--;
685 /* push constant of type "type" with useless value */
686 ST_FUNC void vpush(CType *type)
688 CValue cval;
689 vsetc(type, VT_CONST, &cval);
692 /* push integer constant */
693 ST_FUNC void vpushi(int v)
695 CValue cval;
696 cval.i = v;
697 vsetc(&int_type, VT_CONST, &cval);
700 /* push a pointer sized constant */
701 static void vpushs(addr_t v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&size_type, VT_CONST, &cval);
708 /* push arbitrary 64bit constant */
709 ST_FUNC void vpush64(int ty, unsigned long long v)
711 CValue cval;
712 CType ctype;
713 ctype.t = ty;
714 ctype.ref = NULL;
715 cval.i = v;
716 vsetc(&ctype, VT_CONST, &cval);
719 /* push long long constant */
720 static inline void vpushll(long long v)
722 vpush64(VT_LLONG, v);
725 ST_FUNC void vset(CType *type, int r, long v)
727 CValue cval;
729 cval.i = v;
730 vsetc(type, r, &cval);
733 static void vseti(int r, int v)
735 CType type;
736 type.t = VT_INT;
737 type.ref = 0;
738 vset(&type, r, v);
741 ST_FUNC void vpushv(SValue *v)
743 if (vtop >= vstack + (VSTACK_SIZE - 1))
744 tcc_error("memory full (vstack)");
745 vtop++;
746 *vtop = *v;
749 static void vdup(void)
751 vpushv(vtop);
754 /* rotate n first stack elements to the bottom
755 I1 ... In -> I2 ... In I1 [top is right]
757 ST_FUNC void vrotb(int n)
759 int i;
760 SValue tmp;
762 tmp = vtop[-n + 1];
763 for(i=-n+1;i!=0;i++)
764 vtop[i] = vtop[i+1];
765 vtop[0] = tmp;
768 /* rotate the n elements before entry e towards the top
769 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
771 ST_FUNC void vrote(SValue *e, int n)
773 int i;
774 SValue tmp;
776 tmp = *e;
777 for(i = 0;i < n - 1; i++)
778 e[-i] = e[-i - 1];
779 e[-n + 1] = tmp;
782 /* rotate n first stack elements to the top
783 I1 ... In -> In I1 ... I(n-1) [top is right]
785 ST_FUNC void vrott(int n)
787 vrote(vtop, n);
790 /* push a symbol value of TYPE */
791 static inline void vpushsym(CType *type, Sym *sym)
793 CValue cval;
794 cval.i = 0;
795 vsetc(type, VT_CONST | VT_SYM, &cval);
796 vtop->sym = sym;
799 /* Return a static symbol pointing to a section */
800 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
802 int v;
803 Sym *sym;
805 v = anon_sym++;
806 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
807 sym->type.ref = type->ref;
808 sym->r = VT_CONST | VT_SYM;
809 put_extern_sym(sym, sec, offset, size);
810 return sym;
813 /* push a reference to a section offset by adding a dummy symbol */
814 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
816 vpushsym(type, get_sym_ref(type, sec, offset, size));
819 /* define a new external reference to a symbol 'v' of type 'u' */
820 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
822 Sym *s;
824 s = sym_find(v);
825 if (!s) {
826 /* push forward reference */
827 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
828 s->type.ref = type->ref;
829 s->r = r | VT_CONST | VT_SYM;
831 return s;
834 /* define a new external reference to a symbol 'v' */
835 static Sym *external_sym(int v, CType *type, int r)
837 Sym *s;
839 s = sym_find(v);
840 if (!s) {
841 /* push forward reference */
842 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
843 s->type.t |= VT_EXTERN;
844 } else if (s->type.ref == func_old_type.ref) {
845 s->type.ref = type->ref;
846 s->r = r | VT_CONST | VT_SYM;
847 s->type.t |= VT_EXTERN;
848 } else if (!is_compatible_types(&s->type, type)) {
849 tcc_error("incompatible types for redefinition of '%s'",
850 get_tok_str(v, NULL));
852 /* Merge some storage attributes. */
853 if (type->t & VT_WEAK)
854 weaken_symbol(s);
856 if (type->t & VT_VIS_MASK)
857 apply_visibility(s, type);
859 return s;
862 /* push a reference to global symbol v */
863 ST_FUNC void vpush_global_sym(CType *type, int v)
865 vpushsym(type, external_global_sym(v, type, 0));
868 /* save registers up to (vtop - n) stack entry */
869 ST_FUNC void save_regs(int n)
871 SValue *p, *p1;
872 for(p = vstack, p1 = vtop - n; p <= p1; p++)
873 save_reg(p->r);
876 /* save r to the memory stack, and mark it as being free */
877 ST_FUNC void save_reg(int r)
879 save_reg_upstack(r, 0);
882 /* save r to the memory stack, and mark it as being free,
883 if seen up to (vtop - n) stack entry */
884 ST_FUNC void save_reg_upstack(int r, int n)
886 int l, saved, size, align;
887 SValue *p, *p1, sv;
888 CType *type;
890 if ((r &= VT_VALMASK) >= VT_CONST)
891 return;
892 if (nocode_wanted)
893 return;
895 /* modify all stack values */
896 saved = 0;
897 l = 0;
898 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
899 if ((p->r & VT_VALMASK) == r ||
900 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
901 /* must save value on stack if not already done */
902 if (!saved) {
903 /* NOTE: must reload 'r' because r might be equal to r2 */
904 r = p->r & VT_VALMASK;
905 /* store register in the stack */
906 type = &p->type;
907 if ((p->r & VT_LVAL) ||
908 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
909 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
910 type = &char_pointer_type;
911 #else
912 type = &int_type;
913 #endif
914 if ((type->t & VT_BTYPE) == VT_FLOAT) {
915 /* cast to DOUBLE to avoid precision loss */
916 type->t = (type->t & ~VT_BTYPE) | VT_DOUBLE;
918 size = type_size(type, &align);
919 loc = (loc - size) & -align;
920 sv.type.t = type->t;
921 sv.r = VT_LOCAL | VT_LVAL;
922 sv.c.i = loc;
923 store(r, &sv);
924 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
925 /* x86 specific: need to pop fp register ST0 if saved */
926 if (r == TREG_ST0) {
927 o(0xd8dd); /* fstp %st(0) */
929 #endif
930 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
931 /* special long long case */
932 if ((type->t & VT_BTYPE) == VT_LLONG) {
933 sv.c.i += 4;
934 store(p->r2, &sv);
936 #endif
937 l = loc;
938 saved = 1;
940 /* mark that stack entry as being saved on the stack */
941 if (p->r & VT_LVAL) {
942 /* also clear the bounded flag because the
943 relocation address of the function was stored in
944 p->c.i */
945 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
946 } else {
947 p->r = lvalue_type(p->type.t) | VT_LOCAL;
949 p->r2 = VT_CONST;
950 p->c.i = l;
955 #ifdef TCC_TARGET_ARM
956 /* find a register of class 'rc2' with at most one reference on stack.
957 * If none, call get_reg(rc) */
958 ST_FUNC int get_reg_ex(int rc, int rc2)
960 int r;
961 SValue *p;
963 for(r=0;r<NB_REGS;r++) {
964 if (reg_classes[r] & rc2) {
965 int n;
966 n=0;
967 for(p = vstack; p <= vtop; p++) {
968 if ((p->r & VT_VALMASK) == r ||
969 (p->r2 & VT_VALMASK) == r)
970 n++;
972 if (n <= 1)
973 return r;
976 return get_reg(rc);
978 #endif
980 /* find a free register of class 'rc'. If none, save one register */
981 ST_FUNC int get_reg(int rc)
983 int r;
984 SValue *p;
986 /* find a free register */
987 for(r=0;r<NB_REGS;r++) {
988 if (reg_classes[r] & rc) {
989 if (nocode_wanted)
990 return r;
991 for(p=vstack;p<=vtop;p++) {
992 if ((p->r & VT_VALMASK) == r ||
993 (p->r2 & VT_VALMASK) == r)
994 goto notfound;
996 return r;
998 notfound: ;
1001 /* no register left : free the first one on the stack (VERY
1002 IMPORTANT to start from the bottom to ensure that we don't
1003 spill registers used in gen_opi()) */
1004 for(p=vstack;p<=vtop;p++) {
1005 /* look at second register (if long long) */
1006 r = p->r2 & VT_VALMASK;
1007 if (r < VT_CONST && (reg_classes[r] & rc))
1008 goto save_found;
1009 r = p->r & VT_VALMASK;
1010 if (r < VT_CONST && (reg_classes[r] & rc)) {
1011 save_found:
1012 save_reg(r);
1013 return r;
1016 /* Should never comes here */
1017 return -1;
1020 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1021 if needed */
1022 static void move_reg(int r, int s, int t)
1024 SValue sv;
1026 if (r != s) {
1027 save_reg(r);
1028 sv.type.t = t;
1029 sv.type.ref = NULL;
1030 sv.r = s;
1031 sv.c.i = 0;
1032 load(r, &sv);
1036 /* get address of vtop (vtop MUST BE an lvalue) */
1037 ST_FUNC void gaddrof(void)
1039 if (vtop->r & VT_REF)
1040 gv(RC_INT);
1041 vtop->r &= ~VT_LVAL;
1042 /* tricky: if saved lvalue, then we can go back to lvalue */
1043 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1044 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1049 #ifdef CONFIG_TCC_BCHECK
1050 /* generate lvalue bound code */
1051 static void gbound(void)
1053 int lval_type;
1054 CType type1;
1056 vtop->r &= ~VT_MUSTBOUND;
1057 /* if lvalue, then use checking code before dereferencing */
1058 if (vtop->r & VT_LVAL) {
1059 /* if not VT_BOUNDED value, then make one */
1060 if (!(vtop->r & VT_BOUNDED)) {
1061 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1062 /* must save type because we must set it to int to get pointer */
1063 type1 = vtop->type;
1064 vtop->type.t = VT_PTR;
1065 gaddrof();
1066 vpushi(0);
1067 gen_bounded_ptr_add();
1068 vtop->r |= lval_type;
1069 vtop->type = type1;
1071 /* then check for dereferencing */
1072 gen_bounded_ptr_deref();
1075 #endif
1077 /* store vtop a register belonging to class 'rc'. lvalues are
1078 converted to values. Cannot be used if cannot be converted to
1079 register value (such as structures). */
1080 ST_FUNC int gv(int rc)
1082 int r, bit_pos, bit_size, size, align, i;
1083 int rc2;
1085 /* NOTE: get_reg can modify vstack[] */
1086 if (vtop->type.t & VT_BITFIELD) {
1087 CType type;
1088 int bits = 32;
1089 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1090 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1091 /* remove bit field info to avoid loops */
1092 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1093 /* cast to int to propagate signedness in following ops */
1094 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1095 type.t = VT_LLONG;
1096 bits = 64;
1097 } else
1098 type.t = VT_INT;
1099 if((vtop->type.t & VT_UNSIGNED) ||
1100 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1101 type.t |= VT_UNSIGNED;
1102 gen_cast(&type);
1103 /* generate shifts */
1104 vpushi(bits - (bit_pos + bit_size));
1105 gen_op(TOK_SHL);
1106 vpushi(bits - bit_size);
1107 /* NOTE: transformed to SHR if unsigned */
1108 gen_op(TOK_SAR);
1109 r = gv(rc);
1110 } else {
1111 if (is_float(vtop->type.t) &&
1112 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1113 Sym *sym;
1114 int *ptr;
1115 unsigned long offset;
1116 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1117 CValue check;
1118 #endif
1120 /* XXX: unify with initializers handling ? */
1121 /* CPUs usually cannot use float constants, so we store them
1122 generically in data segment */
1123 size = type_size(&vtop->type, &align);
1124 offset = (data_section->data_offset + align - 1) & -align;
1125 data_section->data_offset = offset;
1126 /* XXX: not portable yet */
1127 #if defined(__i386__) || defined(__x86_64__)
1128 /* Zero pad x87 tenbyte long doubles */
1129 if (size == LDOUBLE_SIZE) {
1130 vtop->c.tab[2] &= 0xffff;
1131 #if LDOUBLE_SIZE == 16
1132 vtop->c.tab[3] = 0;
1133 #endif
1135 #endif
1136 ptr = section_ptr_add(data_section, size);
1137 size = size >> 2;
1138 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1139 check.d = 1;
1140 if(check.tab[0])
1141 for(i=0;i<size;i++)
1142 ptr[i] = vtop->c.tab[size-1-i];
1143 else
1144 #endif
1145 for(i=0;i<size;i++)
1146 ptr[i] = vtop->c.tab[i];
1147 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1148 vtop->r |= VT_LVAL | VT_SYM;
1149 vtop->sym = sym;
1150 vtop->c.i = 0;
1152 #ifdef CONFIG_TCC_BCHECK
1153 if (vtop->r & VT_MUSTBOUND)
1154 gbound();
1155 #endif
1157 r = vtop->r & VT_VALMASK;
1158 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1159 #ifndef TCC_TARGET_ARM64
1160 if (rc == RC_IRET)
1161 rc2 = RC_LRET;
1162 #ifdef TCC_TARGET_X86_64
1163 else if (rc == RC_FRET)
1164 rc2 = RC_QRET;
1165 #endif
1166 #endif
1168 /* need to reload if:
1169 - constant
1170 - lvalue (need to dereference pointer)
1171 - already a register, but not in the right class */
1172 if (r >= VT_CONST
1173 || (vtop->r & VT_LVAL)
1174 || !(reg_classes[r] & rc)
1175 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1176 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1177 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1178 #else
1179 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1180 #endif
1183 r = get_reg(rc);
1184 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1185 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1186 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1187 #else
1188 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1189 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1190 unsigned long long ll;
1191 #endif
1192 int r2, original_type;
1193 original_type = vtop->type.t;
1194 /* two register type load : expand to two words
1195 temporarily */
1196 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1197 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1198 /* load constant */
1199 ll = vtop->c.i;
1200 vtop->c.i = ll; /* first word */
1201 load(r, vtop);
1202 vtop->r = r; /* save register value */
1203 vpushi(ll >> 32); /* second word */
1204 } else
1205 #endif
1206 if (vtop->r & VT_LVAL) {
1207 /* We do not want to modifier the long long
1208 pointer here, so the safest (and less
1209 efficient) is to save all the other registers
1210 in the stack. XXX: totally inefficient. */
1211 #if 0
1212 save_regs(1);
1213 #else
1214 /* lvalue_save: save only if used further down the stack */
1215 save_reg_upstack(vtop->r, 1);
1216 #endif
1217 /* load from memory */
1218 vtop->type.t = load_type;
1219 load(r, vtop);
1220 vdup();
1221 vtop[-1].r = r; /* save register value */
1222 /* increment pointer to get second word */
1223 vtop->type.t = addr_type;
1224 gaddrof();
1225 vpushi(load_size);
1226 gen_op('+');
1227 vtop->r |= VT_LVAL;
1228 vtop->type.t = load_type;
1229 } else {
1230 /* move registers */
1231 load(r, vtop);
1232 vdup();
1233 vtop[-1].r = r; /* save register value */
1234 vtop->r = vtop[-1].r2;
1236 /* Allocate second register. Here we rely on the fact that
1237 get_reg() tries first to free r2 of an SValue. */
1238 r2 = get_reg(rc2);
1239 load(r2, vtop);
1240 vpop();
1241 /* write second register */
1242 vtop->r2 = r2;
1243 vtop->type.t = original_type;
1244 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1245 int t1, t;
1246 /* lvalue of scalar type : need to use lvalue type
1247 because of possible cast */
1248 t = vtop->type.t;
1249 t1 = t;
1250 /* compute memory access type */
1251 if (vtop->r & VT_REF)
1252 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1253 t = VT_PTR;
1254 #else
1255 t = VT_INT;
1256 #endif
1257 else if (vtop->r & VT_LVAL_BYTE)
1258 t = VT_BYTE;
1259 else if (vtop->r & VT_LVAL_SHORT)
1260 t = VT_SHORT;
1261 if (vtop->r & VT_LVAL_UNSIGNED)
1262 t |= VT_UNSIGNED;
1263 vtop->type.t = t;
1264 load(r, vtop);
1265 /* restore wanted type */
1266 vtop->type.t = t1;
1267 } else {
1268 /* one register type load */
1269 load(r, vtop);
1272 vtop->r = r;
1273 #ifdef TCC_TARGET_C67
1274 /* uses register pairs for doubles */
1275 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1276 vtop->r2 = r+1;
1277 #endif
1279 return r;
1282 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1283 ST_FUNC void gv2(int rc1, int rc2)
1285 int v;
1287 /* generate more generic register first. But VT_JMP or VT_CMP
1288 values must be generated first in all cases to avoid possible
1289 reload errors */
1290 v = vtop[0].r & VT_VALMASK;
1291 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1292 vswap();
1293 gv(rc1);
1294 vswap();
1295 gv(rc2);
1296 /* test if reload is needed for first register */
1297 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1298 vswap();
1299 gv(rc1);
1300 vswap();
1302 } else {
1303 gv(rc2);
1304 vswap();
1305 gv(rc1);
1306 vswap();
1307 /* test if reload is needed for first register */
1308 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1309 gv(rc2);
1314 #ifndef TCC_TARGET_ARM64
1315 /* wrapper around RC_FRET to return a register by type */
1316 static int rc_fret(int t)
1318 #ifdef TCC_TARGET_X86_64
1319 if (t == VT_LDOUBLE) {
1320 return RC_ST0;
1322 #endif
1323 return RC_FRET;
1325 #endif
1327 /* wrapper around REG_FRET to return a register by type */
1328 static int reg_fret(int t)
1330 #ifdef TCC_TARGET_X86_64
1331 if (t == VT_LDOUBLE) {
1332 return TREG_ST0;
1334 #endif
1335 return REG_FRET;
1338 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1339 /* expand 64bit on stack in two ints */
1340 static void lexpand(void)
1342 int u, v;
1343 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1344 v = vtop->r & (VT_VALMASK | VT_LVAL);
1345 if (v == VT_CONST) {
1346 vdup();
1347 vtop[0].c.i >>= 32;
1348 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1349 vdup();
1350 vtop[0].c.i += 4;
1351 } else {
1352 gv(RC_INT);
1353 vdup();
1354 vtop[0].r = vtop[-1].r2;
1355 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1357 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1359 #endif
1361 #ifdef TCC_TARGET_ARM
1362 /* expand long long on stack */
1363 ST_FUNC void lexpand_nr(void)
1365 int u,v;
1367 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1368 vdup();
1369 vtop->r2 = VT_CONST;
1370 vtop->type.t = VT_INT | u;
1371 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1372 if (v == VT_CONST) {
1373 vtop[-1].c.i = vtop->c.i;
1374 vtop->c.i = vtop->c.i >> 32;
1375 vtop->r = VT_CONST;
1376 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1377 vtop->c.i += 4;
1378 vtop->r = vtop[-1].r;
1379 } else if (v > VT_CONST) {
1380 vtop--;
1381 lexpand();
1382 } else
1383 vtop->r = vtop[-1].r2;
1384 vtop[-1].r2 = VT_CONST;
1385 vtop[-1].type.t = VT_INT | u;
1387 #endif
1389 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1390 /* build a long long from two ints */
1391 static void lbuild(int t)
1393 gv2(RC_INT, RC_INT);
1394 vtop[-1].r2 = vtop[0].r;
1395 vtop[-1].type.t = t;
1396 vpop();
1398 #endif
1400 /* convert stack entry to register and duplicate its value in another
1401 register */
1402 static void gv_dup(void)
1404 int rc, t, r, r1;
1405 SValue sv;
1407 t = vtop->type.t;
1408 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1409 if ((t & VT_BTYPE) == VT_LLONG) {
1410 lexpand();
1411 gv_dup();
1412 vswap();
1413 vrotb(3);
1414 gv_dup();
1415 vrotb(4);
1416 /* stack: H L L1 H1 */
1417 lbuild(t);
1418 vrotb(3);
1419 vrotb(3);
1420 vswap();
1421 lbuild(t);
1422 vswap();
1423 } else
1424 #endif
1426 /* duplicate value */
1427 rc = RC_INT;
1428 sv.type.t = VT_INT;
1429 if (is_float(t)) {
1430 rc = RC_FLOAT;
1431 #ifdef TCC_TARGET_X86_64
1432 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1433 rc = RC_ST0;
1435 #endif
1436 sv.type.t = t;
1438 r = gv(rc);
1439 r1 = get_reg(rc);
1440 sv.r = r;
1441 sv.c.i = 0;
1442 load(r1, &sv); /* move r to r1 */
1443 vdup();
1444 /* duplicates value */
1445 if (r != r1)
1446 vtop->r = r1;
1450 /* Generate value test
1452 * Generate a test for any value (jump, comparison and integers) */
1453 ST_FUNC int gvtst(int inv, int t)
1455 int v = vtop->r & VT_VALMASK;
1456 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1457 vpushi(0);
1458 gen_op(TOK_NE);
1460 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1461 /* constant jmp optimization */
1462 if ((vtop->c.i != 0) != inv)
1463 t = gjmp(t);
1464 vtop--;
1465 return t;
1467 return gtst(inv, t);
1470 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1471 /* generate CPU independent (unsigned) long long operations */
1472 static void gen_opl(int op)
1474 int t, a, b, op1, c, i;
1475 int func;
1476 unsigned short reg_iret = REG_IRET;
1477 unsigned short reg_lret = REG_LRET;
1478 SValue tmp;
1480 switch(op) {
1481 case '/':
1482 case TOK_PDIV:
1483 func = TOK___divdi3;
1484 goto gen_func;
1485 case TOK_UDIV:
1486 func = TOK___udivdi3;
1487 goto gen_func;
1488 case '%':
1489 func = TOK___moddi3;
1490 goto gen_mod_func;
1491 case TOK_UMOD:
1492 func = TOK___umoddi3;
1493 gen_mod_func:
1494 #ifdef TCC_ARM_EABI
1495 reg_iret = TREG_R2;
1496 reg_lret = TREG_R3;
1497 #endif
1498 gen_func:
1499 /* call generic long long function */
1500 vpush_global_sym(&func_old_type, func);
1501 vrott(3);
1502 gfunc_call(2);
1503 vpushi(0);
1504 vtop->r = reg_iret;
1505 vtop->r2 = reg_lret;
1506 break;
1507 case '^':
1508 case '&':
1509 case '|':
1510 case '*':
1511 case '+':
1512 case '-':
1513 //pv("gen_opl A",0,2);
1514 t = vtop->type.t;
1515 vswap();
1516 lexpand();
1517 vrotb(3);
1518 lexpand();
1519 /* stack: L1 H1 L2 H2 */
1520 tmp = vtop[0];
1521 vtop[0] = vtop[-3];
1522 vtop[-3] = tmp;
1523 tmp = vtop[-2];
1524 vtop[-2] = vtop[-3];
1525 vtop[-3] = tmp;
1526 vswap();
1527 /* stack: H1 H2 L1 L2 */
1528 //pv("gen_opl B",0,4);
1529 if (op == '*') {
1530 vpushv(vtop - 1);
1531 vpushv(vtop - 1);
1532 gen_op(TOK_UMULL);
1533 lexpand();
1534 /* stack: H1 H2 L1 L2 ML MH */
1535 for(i=0;i<4;i++)
1536 vrotb(6);
1537 /* stack: ML MH H1 H2 L1 L2 */
1538 tmp = vtop[0];
1539 vtop[0] = vtop[-2];
1540 vtop[-2] = tmp;
1541 /* stack: ML MH H1 L2 H2 L1 */
1542 gen_op('*');
1543 vrotb(3);
1544 vrotb(3);
1545 gen_op('*');
1546 /* stack: ML MH M1 M2 */
1547 gen_op('+');
1548 gen_op('+');
1549 } else if (op == '+' || op == '-') {
1550 /* XXX: add non carry method too (for MIPS or alpha) */
1551 if (op == '+')
1552 op1 = TOK_ADDC1;
1553 else
1554 op1 = TOK_SUBC1;
1555 gen_op(op1);
1556 /* stack: H1 H2 (L1 op L2) */
1557 vrotb(3);
1558 vrotb(3);
1559 gen_op(op1 + 1); /* TOK_xxxC2 */
1560 } else {
1561 gen_op(op);
1562 /* stack: H1 H2 (L1 op L2) */
1563 vrotb(3);
1564 vrotb(3);
1565 /* stack: (L1 op L2) H1 H2 */
1566 gen_op(op);
1567 /* stack: (L1 op L2) (H1 op H2) */
1569 /* stack: L H */
1570 lbuild(t);
1571 break;
1572 case TOK_SAR:
1573 case TOK_SHR:
1574 case TOK_SHL:
1575 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1576 t = vtop[-1].type.t;
1577 vswap();
1578 lexpand();
1579 vrotb(3);
1580 /* stack: L H shift */
1581 c = (int)vtop->c.i;
1582 /* constant: simpler */
1583 /* NOTE: all comments are for SHL. the other cases are
1584 done by swaping words */
1585 vpop();
1586 if (op != TOK_SHL)
1587 vswap();
1588 if (c >= 32) {
1589 /* stack: L H */
1590 vpop();
1591 if (c > 32) {
1592 vpushi(c - 32);
1593 gen_op(op);
1595 if (op != TOK_SAR) {
1596 vpushi(0);
1597 } else {
1598 gv_dup();
1599 vpushi(31);
1600 gen_op(TOK_SAR);
1602 vswap();
1603 } else {
1604 vswap();
1605 gv_dup();
1606 /* stack: H L L */
1607 vpushi(c);
1608 gen_op(op);
1609 vswap();
1610 vpushi(32 - c);
1611 if (op == TOK_SHL)
1612 gen_op(TOK_SHR);
1613 else
1614 gen_op(TOK_SHL);
1615 vrotb(3);
1616 /* stack: L L H */
1617 vpushi(c);
1618 if (op == TOK_SHL)
1619 gen_op(TOK_SHL);
1620 else
1621 gen_op(TOK_SHR);
1622 gen_op('|');
1624 if (op != TOK_SHL)
1625 vswap();
1626 lbuild(t);
1627 } else {
1628 /* XXX: should provide a faster fallback on x86 ? */
1629 switch(op) {
1630 case TOK_SAR:
1631 func = TOK___ashrdi3;
1632 goto gen_func;
1633 case TOK_SHR:
1634 func = TOK___lshrdi3;
1635 goto gen_func;
1636 case TOK_SHL:
1637 func = TOK___ashldi3;
1638 goto gen_func;
1641 break;
1642 default:
1643 /* compare operations */
1644 t = vtop->type.t;
1645 vswap();
1646 lexpand();
1647 vrotb(3);
1648 lexpand();
1649 /* stack: L1 H1 L2 H2 */
1650 tmp = vtop[-1];
1651 vtop[-1] = vtop[-2];
1652 vtop[-2] = tmp;
1653 /* stack: L1 L2 H1 H2 */
1654 /* compare high */
1655 op1 = op;
1656 /* when values are equal, we need to compare low words. since
1657 the jump is inverted, we invert the test too. */
1658 if (op1 == TOK_LT)
1659 op1 = TOK_LE;
1660 else if (op1 == TOK_GT)
1661 op1 = TOK_GE;
1662 else if (op1 == TOK_ULT)
1663 op1 = TOK_ULE;
1664 else if (op1 == TOK_UGT)
1665 op1 = TOK_UGE;
1666 a = 0;
1667 b = 0;
1668 gen_op(op1);
1669 if (op == TOK_NE) {
1670 b = gvtst(0, 0);
1671 } else {
1672 a = gvtst(1, 0);
1673 if (op != TOK_EQ) {
1674 /* generate non equal test */
1675 vpushi(TOK_NE);
1676 vtop->r = VT_CMP;
1677 b = gvtst(0, 0);
1680 /* compare low. Always unsigned */
1681 op1 = op;
1682 if (op1 == TOK_LT)
1683 op1 = TOK_ULT;
1684 else if (op1 == TOK_LE)
1685 op1 = TOK_ULE;
1686 else if (op1 == TOK_GT)
1687 op1 = TOK_UGT;
1688 else if (op1 == TOK_GE)
1689 op1 = TOK_UGE;
1690 gen_op(op1);
1691 a = gvtst(1, a);
1692 gsym(b);
1693 vseti(VT_JMPI, a);
1694 break;
1697 #endif
1699 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1701 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1702 return (a ^ b) >> 63 ? -x : x;
1705 static int gen_opic_lt(uint64_t a, uint64_t b)
1707 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1710 /* handle integer constant optimizations and various machine
1711 independent opt */
1712 static void gen_opic(int op)
1714 SValue *v1 = vtop - 1;
1715 SValue *v2 = vtop;
1716 int t1 = v1->type.t & VT_BTYPE;
1717 int t2 = v2->type.t & VT_BTYPE;
1718 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1719 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1720 uint64_t l1 = c1 ? v1->c.i : 0;
1721 uint64_t l2 = c2 ? v2->c.i : 0;
1722 int shm = (t1 == VT_LLONG) ? 63 : 31;
1724 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1725 l1 = ((uint32_t)l1 |
1726 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1727 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1728 l2 = ((uint32_t)l2 |
1729 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1731 if (c1 && c2) {
1732 switch(op) {
1733 case '+': l1 += l2; break;
1734 case '-': l1 -= l2; break;
1735 case '&': l1 &= l2; break;
1736 case '^': l1 ^= l2; break;
1737 case '|': l1 |= l2; break;
1738 case '*': l1 *= l2; break;
1740 case TOK_PDIV:
1741 case '/':
1742 case '%':
1743 case TOK_UDIV:
1744 case TOK_UMOD:
1745 /* if division by zero, generate explicit division */
1746 if (l2 == 0) {
1747 if (const_wanted)
1748 tcc_error("division by zero in constant");
1749 goto general_case;
1751 switch(op) {
1752 default: l1 = gen_opic_sdiv(l1, l2); break;
1753 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1754 case TOK_UDIV: l1 = l1 / l2; break;
1755 case TOK_UMOD: l1 = l1 % l2; break;
1757 break;
1758 case TOK_SHL: l1 <<= (l2 & shm); break;
1759 case TOK_SHR: l1 >>= (l2 & shm); break;
1760 case TOK_SAR:
1761 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1762 break;
1763 /* tests */
1764 case TOK_ULT: l1 = l1 < l2; break;
1765 case TOK_UGE: l1 = l1 >= l2; break;
1766 case TOK_EQ: l1 = l1 == l2; break;
1767 case TOK_NE: l1 = l1 != l2; break;
1768 case TOK_ULE: l1 = l1 <= l2; break;
1769 case TOK_UGT: l1 = l1 > l2; break;
1770 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1771 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1772 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1773 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1774 /* logical */
1775 case TOK_LAND: l1 = l1 && l2; break;
1776 case TOK_LOR: l1 = l1 || l2; break;
1777 default:
1778 goto general_case;
1780 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1781 l1 = ((uint32_t)l1 |
1782 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1783 v1->c.i = l1;
1784 vtop--;
1785 } else {
1786 /* if commutative ops, put c2 as constant */
1787 if (c1 && (op == '+' || op == '&' || op == '^' ||
1788 op == '|' || op == '*')) {
1789 vswap();
1790 c2 = c1; //c = c1, c1 = c2, c2 = c;
1791 l2 = l1; //l = l1, l1 = l2, l2 = l;
1793 if (!const_wanted &&
1794 c1 && ((l1 == 0 &&
1795 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1796 (l1 == -1 && op == TOK_SAR))) {
1797 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1798 vtop--;
1799 } else if (!const_wanted &&
1800 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1801 (l2 == -1 && op == '|') ||
1802 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1803 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1804 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1805 if (l2 == 1)
1806 vtop->c.i = 0;
1807 vswap();
1808 vtop--;
1809 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1810 op == TOK_PDIV) &&
1811 l2 == 1) ||
1812 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1813 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1814 l2 == 0) ||
1815 (op == '&' &&
1816 l2 == -1))) {
1817 /* filter out NOP operations like x*1, x-0, x&-1... */
1818 vtop--;
1819 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1820 /* try to use shifts instead of muls or divs */
1821 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1822 int n = -1;
1823 while (l2) {
1824 l2 >>= 1;
1825 n++;
1827 vtop->c.i = n;
1828 if (op == '*')
1829 op = TOK_SHL;
1830 else if (op == TOK_PDIV)
1831 op = TOK_SAR;
1832 else
1833 op = TOK_SHR;
1835 goto general_case;
1836 } else if (c2 && (op == '+' || op == '-') &&
1837 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1838 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1839 /* symbol + constant case */
1840 if (op == '-')
1841 l2 = -l2;
1842 l2 += vtop[-1].c.i;
1843 /* The backends can't always deal with addends to symbols
1844 larger than +-1<<31. Don't construct such. */
1845 if ((int)l2 != l2)
1846 goto general_case;
1847 vtop--;
1848 vtop->c.i = l2;
1849 } else {
1850 general_case:
1851 /* call low level op generator */
1852 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1853 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1854 gen_opl(op);
1855 else
1856 gen_opi(op);
1861 /* generate a floating point operation with constant propagation */
1862 static void gen_opif(int op)
1864 int c1, c2;
1865 SValue *v1, *v2;
1866 long double f1, f2;
1868 v1 = vtop - 1;
1869 v2 = vtop;
1870 /* currently, we cannot do computations with forward symbols */
1871 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1872 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1873 if (c1 && c2) {
1874 if (v1->type.t == VT_FLOAT) {
1875 f1 = v1->c.f;
1876 f2 = v2->c.f;
1877 } else if (v1->type.t == VT_DOUBLE) {
1878 f1 = v1->c.d;
1879 f2 = v2->c.d;
1880 } else {
1881 f1 = v1->c.ld;
1882 f2 = v2->c.ld;
1885 /* NOTE: we only do constant propagation if finite number (not
1886 NaN or infinity) (ANSI spec) */
1887 if (!ieee_finite(f1) || !ieee_finite(f2))
1888 goto general_case;
1890 switch(op) {
1891 case '+': f1 += f2; break;
1892 case '-': f1 -= f2; break;
1893 case '*': f1 *= f2; break;
1894 case '/':
1895 if (f2 == 0.0) {
1896 if (const_wanted)
1897 tcc_error("division by zero in constant");
1898 goto general_case;
1900 f1 /= f2;
1901 break;
1902 /* XXX: also handles tests ? */
1903 default:
1904 goto general_case;
1906 /* XXX: overflow test ? */
1907 if (v1->type.t == VT_FLOAT) {
1908 v1->c.f = f1;
1909 } else if (v1->type.t == VT_DOUBLE) {
1910 v1->c.d = f1;
1911 } else {
1912 v1->c.ld = f1;
1914 vtop--;
1915 } else {
1916 general_case:
1917 gen_opf(op);
1921 static int pointed_size(CType *type)
1923 int align;
1924 return type_size(pointed_type(type), &align);
1927 static void vla_runtime_pointed_size(CType *type)
1929 int align;
1930 vla_runtime_type_size(pointed_type(type), &align);
1933 static inline int is_null_pointer(SValue *p)
1935 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1936 return 0;
1937 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1938 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1939 ((p->type.t & VT_BTYPE) == VT_PTR &&
1940 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1943 static inline int is_integer_btype(int bt)
1945 return (bt == VT_BYTE || bt == VT_SHORT ||
1946 bt == VT_INT || bt == VT_LLONG);
1949 /* check types for comparison or subtraction of pointers */
1950 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1952 CType *type1, *type2, tmp_type1, tmp_type2;
1953 int bt1, bt2;
1955 /* null pointers are accepted for all comparisons as gcc */
1956 if (is_null_pointer(p1) || is_null_pointer(p2))
1957 return;
1958 type1 = &p1->type;
1959 type2 = &p2->type;
1960 bt1 = type1->t & VT_BTYPE;
1961 bt2 = type2->t & VT_BTYPE;
1962 /* accept comparison between pointer and integer with a warning */
1963 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1964 if (op != TOK_LOR && op != TOK_LAND )
1965 tcc_warning("comparison between pointer and integer");
1966 return;
1969 /* both must be pointers or implicit function pointers */
1970 if (bt1 == VT_PTR) {
1971 type1 = pointed_type(type1);
1972 } else if (bt1 != VT_FUNC)
1973 goto invalid_operands;
1975 if (bt2 == VT_PTR) {
1976 type2 = pointed_type(type2);
1977 } else if (bt2 != VT_FUNC) {
1978 invalid_operands:
1979 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1981 if ((type1->t & VT_BTYPE) == VT_VOID ||
1982 (type2->t & VT_BTYPE) == VT_VOID)
1983 return;
1984 tmp_type1 = *type1;
1985 tmp_type2 = *type2;
1986 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1987 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1988 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1989 /* gcc-like error if '-' is used */
1990 if (op == '-')
1991 goto invalid_operands;
1992 else
1993 tcc_warning("comparison of distinct pointer types lacks a cast");
1997 /* generic gen_op: handles types problems */
1998 ST_FUNC void gen_op(int op)
2000 int u, t1, t2, bt1, bt2, t;
2001 CType type1;
2003 redo:
2004 t1 = vtop[-1].type.t;
2005 t2 = vtop[0].type.t;
2006 bt1 = t1 & VT_BTYPE;
2007 bt2 = t2 & VT_BTYPE;
2009 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2010 tcc_error("operation on a struct");
2011 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2012 if (bt2 == VT_FUNC) {
2013 mk_pointer(&vtop->type);
2014 gaddrof();
2016 if (bt1 == VT_FUNC) {
2017 vswap();
2018 mk_pointer(&vtop->type);
2019 gaddrof();
2020 vswap();
2022 goto redo;
2023 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2024 /* at least one operand is a pointer */
2025 /* relationnal op: must be both pointers */
2026 if (op >= TOK_ULT && op <= TOK_LOR) {
2027 check_comparison_pointer_types(vtop - 1, vtop, op);
2028 /* pointers are handled are unsigned */
2029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2030 t = VT_LLONG | VT_UNSIGNED;
2031 #else
2032 t = VT_INT | VT_UNSIGNED;
2033 #endif
2034 goto std_op;
2036 /* if both pointers, then it must be the '-' op */
2037 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2038 if (op != '-')
2039 tcc_error("cannot use pointers here");
2040 check_comparison_pointer_types(vtop - 1, vtop, op);
2041 /* XXX: check that types are compatible */
2042 if (vtop[-1].type.t & VT_VLA) {
2043 vla_runtime_pointed_size(&vtop[-1].type);
2044 } else {
2045 vpushi(pointed_size(&vtop[-1].type));
2047 vrott(3);
2048 gen_opic(op);
2049 /* set to integer type */
2050 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2051 vtop->type.t = VT_LLONG;
2052 #else
2053 vtop->type.t = VT_INT;
2054 #endif
2055 vswap();
2056 gen_op(TOK_PDIV);
2057 } else {
2058 /* exactly one pointer : must be '+' or '-'. */
2059 if (op != '-' && op != '+')
2060 tcc_error("cannot use pointers here");
2061 /* Put pointer as first operand */
2062 if (bt2 == VT_PTR) {
2063 vswap();
2064 t = t1, t1 = t2, t2 = t;
2066 #if PTR_SIZE == 4
2067 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2068 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2069 gen_cast(&int_type);
2070 #endif
2071 type1 = vtop[-1].type;
2072 type1.t &= ~VT_ARRAY;
2073 if (vtop[-1].type.t & VT_VLA)
2074 vla_runtime_pointed_size(&vtop[-1].type);
2075 else {
2076 u = pointed_size(&vtop[-1].type);
2077 if (u < 0)
2078 tcc_error("unknown array element size");
2079 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2080 vpushll(u);
2081 #else
2082 /* XXX: cast to int ? (long long case) */
2083 vpushi(u);
2084 #endif
2086 gen_op('*');
2087 #if 0
2088 /* #ifdef CONFIG_TCC_BCHECK
2089 The main reason to removing this code:
2090 #include <stdio.h>
2091 int main ()
2093 int v[10];
2094 int i = 10;
2095 int j = 9;
2096 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2097 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2099 When this code is on. then the output looks like
2100 v+i-j = 0xfffffffe
2101 v+(i-j) = 0xbff84000
2103 /* if evaluating constant expression, no code should be
2104 generated, so no bound check */
2105 if (tcc_state->do_bounds_check && !const_wanted) {
2106 /* if bounded pointers, we generate a special code to
2107 test bounds */
2108 if (op == '-') {
2109 vpushi(0);
2110 vswap();
2111 gen_op('-');
2113 gen_bounded_ptr_add();
2114 } else
2115 #endif
2117 gen_opic(op);
2119 /* put again type if gen_opic() swaped operands */
2120 vtop->type = type1;
2122 } else if (is_float(bt1) || is_float(bt2)) {
2123 /* compute bigger type and do implicit casts */
2124 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2125 t = VT_LDOUBLE;
2126 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2127 t = VT_DOUBLE;
2128 } else {
2129 t = VT_FLOAT;
2131 /* floats can only be used for a few operations */
2132 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2133 (op < TOK_ULT || op > TOK_GT))
2134 tcc_error("invalid operands for binary operation");
2135 goto std_op;
2136 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2137 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2138 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2139 t |= VT_UNSIGNED;
2140 goto std_op;
2141 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2142 /* cast to biggest op */
2143 t = VT_LLONG;
2144 /* convert to unsigned if it does not fit in a long long */
2145 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2146 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2147 t |= VT_UNSIGNED;
2148 goto std_op;
2149 } else {
2150 /* integer operations */
2151 t = VT_INT;
2152 /* convert to unsigned if it does not fit in an integer */
2153 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2154 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2155 t |= VT_UNSIGNED;
2156 std_op:
2157 /* XXX: currently, some unsigned operations are explicit, so
2158 we modify them here */
2159 if (t & VT_UNSIGNED) {
2160 if (op == TOK_SAR)
2161 op = TOK_SHR;
2162 else if (op == '/')
2163 op = TOK_UDIV;
2164 else if (op == '%')
2165 op = TOK_UMOD;
2166 else if (op == TOK_LT)
2167 op = TOK_ULT;
2168 else if (op == TOK_GT)
2169 op = TOK_UGT;
2170 else if (op == TOK_LE)
2171 op = TOK_ULE;
2172 else if (op == TOK_GE)
2173 op = TOK_UGE;
2175 vswap();
2176 type1.t = t;
2177 gen_cast(&type1);
2178 vswap();
2179 /* special case for shifts and long long: we keep the shift as
2180 an integer */
2181 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2182 type1.t = VT_INT;
2183 gen_cast(&type1);
2184 if (is_float(t))
2185 gen_opif(op);
2186 else
2187 gen_opic(op);
2188 if (op >= TOK_ULT && op <= TOK_GT) {
2189 /* relationnal op: the result is an int */
2190 vtop->type.t = VT_INT;
2191 } else {
2192 vtop->type.t = t;
2195 // Make sure that we have converted to an rvalue:
2196 if (vtop->r & VT_LVAL)
2197 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2200 #ifndef TCC_TARGET_ARM
2201 /* generic itof for unsigned long long case */
2202 static void gen_cvt_itof1(int t)
2204 #ifdef TCC_TARGET_ARM64
2205 gen_cvt_itof(t);
2206 #else
2207 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2208 (VT_LLONG | VT_UNSIGNED)) {
2210 if (t == VT_FLOAT)
2211 vpush_global_sym(&func_old_type, TOK___floatundisf);
2212 #if LDOUBLE_SIZE != 8
2213 else if (t == VT_LDOUBLE)
2214 vpush_global_sym(&func_old_type, TOK___floatundixf);
2215 #endif
2216 else
2217 vpush_global_sym(&func_old_type, TOK___floatundidf);
2218 vrott(2);
2219 gfunc_call(1);
2220 vpushi(0);
2221 vtop->r = reg_fret(t);
2222 } else {
2223 gen_cvt_itof(t);
2225 #endif
2227 #endif
2229 /* generic ftoi for unsigned long long case */
2230 static void gen_cvt_ftoi1(int t)
2232 #ifdef TCC_TARGET_ARM64
2233 gen_cvt_ftoi(t);
2234 #else
2235 int st;
2237 if (t == (VT_LLONG | VT_UNSIGNED)) {
2238 /* not handled natively */
2239 st = vtop->type.t & VT_BTYPE;
2240 if (st == VT_FLOAT)
2241 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2242 #if LDOUBLE_SIZE != 8
2243 else if (st == VT_LDOUBLE)
2244 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2245 #endif
2246 else
2247 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2248 vrott(2);
2249 gfunc_call(1);
2250 vpushi(0);
2251 vtop->r = REG_IRET;
2252 vtop->r2 = REG_LRET;
2253 } else {
2254 gen_cvt_ftoi(t);
2256 #endif
2259 /* force char or short cast */
2260 static void force_charshort_cast(int t)
2262 int bits, dbt;
2263 dbt = t & VT_BTYPE;
2264 /* XXX: add optimization if lvalue : just change type and offset */
2265 if (dbt == VT_BYTE)
2266 bits = 8;
2267 else
2268 bits = 16;
2269 if (t & VT_UNSIGNED) {
2270 vpushi((1 << bits) - 1);
2271 gen_op('&');
2272 } else {
2273 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2274 bits = 64 - bits;
2275 else
2276 bits = 32 - bits;
2277 vpushi(bits);
2278 gen_op(TOK_SHL);
2279 /* result must be signed or the SAR is converted to an SHL
2280 This was not the case when "t" was a signed short
2281 and the last value on the stack was an unsigned int */
2282 vtop->type.t &= ~VT_UNSIGNED;
2283 vpushi(bits);
2284 gen_op(TOK_SAR);
2288 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2289 static void gen_cast(CType *type)
2291 int sbt, dbt, sf, df, c, p;
2293 /* special delayed cast for char/short */
2294 /* XXX: in some cases (multiple cascaded casts), it may still
2295 be incorrect */
2296 if (vtop->r & VT_MUSTCAST) {
2297 vtop->r &= ~VT_MUSTCAST;
2298 force_charshort_cast(vtop->type.t);
2301 /* bitfields first get cast to ints */
2302 if (vtop->type.t & VT_BITFIELD) {
2303 gv(RC_INT);
2306 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2307 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2309 if (sbt != dbt) {
2310 sf = is_float(sbt);
2311 df = is_float(dbt);
2312 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2313 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2314 if (c) {
2315 /* constant case: we can do it now */
2316 /* XXX: in ISOC, cannot do it if error in convert */
2317 if (sbt == VT_FLOAT)
2318 vtop->c.ld = vtop->c.f;
2319 else if (sbt == VT_DOUBLE)
2320 vtop->c.ld = vtop->c.d;
2322 if (df) {
2323 if ((sbt & VT_BTYPE) == VT_LLONG) {
2324 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2325 vtop->c.ld = vtop->c.i;
2326 else
2327 vtop->c.ld = -(long double)-vtop->c.i;
2328 } else if(!sf) {
2329 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2330 vtop->c.ld = (uint32_t)vtop->c.i;
2331 else
2332 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2335 if (dbt == VT_FLOAT)
2336 vtop->c.f = (float)vtop->c.ld;
2337 else if (dbt == VT_DOUBLE)
2338 vtop->c.d = (double)vtop->c.ld;
2339 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2340 vtop->c.i = vtop->c.ld;
2341 } else if (sf && dbt == VT_BOOL) {
2342 vtop->c.i = (vtop->c.ld != 0);
2343 } else {
2344 if(sf)
2345 vtop->c.i = vtop->c.ld;
2346 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2348 else if (sbt & VT_UNSIGNED)
2349 vtop->c.i = (uint32_t)vtop->c.i;
2350 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2351 else if (sbt == VT_PTR)
2353 #endif
2354 else if (sbt != VT_LLONG)
2355 vtop->c.i = ((uint32_t)vtop->c.i |
2356 -(vtop->c.i & 0x80000000));
2358 if (dbt == (VT_LLONG|VT_UNSIGNED))
2360 else if (dbt == VT_BOOL)
2361 vtop->c.i = (vtop->c.i != 0);
2362 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2363 else if (dbt == VT_PTR)
2365 #endif
2366 else if (dbt != VT_LLONG) {
2367 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2368 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2369 0xffffffff);
2370 vtop->c.i &= m;
2371 if (!(dbt & VT_UNSIGNED))
2372 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2375 } else if (p && dbt == VT_BOOL) {
2376 vtop->r = VT_CONST;
2377 vtop->c.i = 1;
2378 } else {
2379 /* non constant case: generate code */
2380 if (sf && df) {
2381 /* convert from fp to fp */
2382 gen_cvt_ftof(dbt);
2383 } else if (df) {
2384 /* convert int to fp */
2385 gen_cvt_itof1(dbt);
2386 } else if (sf) {
2387 /* convert fp to int */
2388 if (dbt == VT_BOOL) {
2389 vpushi(0);
2390 gen_op(TOK_NE);
2391 } else {
2392 if (sbt == VT_FLOAT) {
2393 /* cast to DOUBLE to avoid precision loss */
2394 gen_cvt_ftof(VT_DOUBLE);
2395 vtop->type.t = (vtop->type.t & ~VT_BTYPE) | VT_DOUBLE;
2397 /* we handle char/short/etc... with generic code */
2398 if (dbt != (VT_INT | VT_UNSIGNED) &&
2399 dbt != (VT_LLONG | VT_UNSIGNED) &&
2400 dbt != VT_LLONG)
2401 dbt = VT_INT;
2402 gen_cvt_ftoi1(dbt);
2403 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2404 /* additional cast for char/short... */
2405 vtop->type.t = dbt;
2406 gen_cast(type);
2409 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2410 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2411 if ((sbt & VT_BTYPE) != VT_LLONG) {
2412 /* scalar to long long */
2413 /* machine independent conversion */
2414 gv(RC_INT);
2415 /* generate high word */
2416 if (sbt == (VT_INT | VT_UNSIGNED)) {
2417 vpushi(0);
2418 gv(RC_INT);
2419 } else {
2420 if (sbt == VT_PTR) {
2421 /* cast from pointer to int before we apply
2422 shift operation, which pointers don't support*/
2423 gen_cast(&int_type);
2425 gv_dup();
2426 vpushi(31);
2427 gen_op(TOK_SAR);
2429 /* patch second register */
2430 vtop[-1].r2 = vtop->r;
2431 vpop();
2433 #else
2434 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2435 (dbt & VT_BTYPE) == VT_PTR ||
2436 (dbt & VT_BTYPE) == VT_FUNC) {
2437 if ((sbt & VT_BTYPE) != VT_LLONG &&
2438 (sbt & VT_BTYPE) != VT_PTR &&
2439 (sbt & VT_BTYPE) != VT_FUNC) {
2440 /* need to convert from 32bit to 64bit */
2441 gv(RC_INT);
2442 if (sbt != (VT_INT | VT_UNSIGNED)) {
2443 #if defined(TCC_TARGET_ARM64)
2444 gen_cvt_sxtw();
2445 #elif defined(TCC_TARGET_X86_64)
2446 int r = gv(RC_INT);
2447 /* x86_64 specific: movslq */
2448 o(0x6348);
2449 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2450 #else
2451 #error
2452 #endif
2455 #endif
2456 } else if (dbt == VT_BOOL) {
2457 /* scalar to bool */
2458 vpushi(0);
2459 gen_op(TOK_NE);
2460 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2461 (dbt & VT_BTYPE) == VT_SHORT) {
2462 if (sbt == VT_PTR) {
2463 vtop->type.t = VT_INT;
2464 tcc_warning("nonportable conversion from pointer to char/short");
2466 force_charshort_cast(dbt);
2467 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2468 } else if ((dbt & VT_BTYPE) == VT_INT) {
2469 /* scalar to int */
2470 if ((sbt & VT_BTYPE) == VT_LLONG) {
2471 /* from long long: just take low order word */
2472 lexpand();
2473 vpop();
2475 /* if lvalue and single word type, nothing to do because
2476 the lvalue already contains the real type size (see
2477 VT_LVAL_xxx constants) */
2478 #endif
2481 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2482 /* if we are casting between pointer types,
2483 we must update the VT_LVAL_xxx size */
2484 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2485 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2487 vtop->type = *type;
2490 /* return type size as known at compile time. Put alignment at 'a' */
2491 ST_FUNC int type_size(CType *type, int *a)
2493 Sym *s;
2494 int bt;
2496 bt = type->t & VT_BTYPE;
2497 if (bt == VT_STRUCT) {
2498 /* struct/union */
2499 s = type->ref;
2500 *a = s->r;
2501 return s->c;
2502 } else if (bt == VT_PTR) {
2503 if (type->t & VT_ARRAY) {
2504 int ts;
2506 s = type->ref;
2507 ts = type_size(&s->type, a);
2509 if (ts < 0 && s->c < 0)
2510 ts = -ts;
2512 return ts * s->c;
2513 } else {
2514 *a = PTR_SIZE;
2515 return PTR_SIZE;
2517 } else if (bt == VT_LDOUBLE) {
2518 *a = LDOUBLE_ALIGN;
2519 return LDOUBLE_SIZE;
2520 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2521 #ifdef TCC_TARGET_I386
2522 #ifdef TCC_TARGET_PE
2523 *a = 8;
2524 #else
2525 *a = 4;
2526 #endif
2527 #elif defined(TCC_TARGET_ARM)
2528 #ifdef TCC_ARM_EABI
2529 *a = 8;
2530 #else
2531 *a = 4;
2532 #endif
2533 #else
2534 *a = 8;
2535 #endif
2536 return 8;
2537 } else if (bt == VT_INT || bt == VT_FLOAT) {
2538 *a = 4;
2539 return 4;
2540 } else if (bt == VT_SHORT) {
2541 *a = 2;
2542 return 2;
2543 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2544 *a = 8;
2545 return 16;
2546 } else if (bt == VT_ENUM) {
2547 *a = 4;
2548 /* Enums might be incomplete, so don't just return '4' here. */
2549 return type->ref->c;
2550 } else {
2551 /* char, void, function, _Bool */
2552 *a = 1;
2553 return 1;
2557 /* push type size as known at runtime time on top of value stack. Put
2558 alignment at 'a' */
2559 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2561 if (type->t & VT_VLA) {
2562 type_size(&type->ref->type, a);
2563 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2564 } else {
2565 vpushi(type_size(type, a));
2569 static void vla_sp_restore(void) {
2570 if (vlas_in_scope) {
2571 gen_vla_sp_restore(vla_sp_loc);
2575 static void vla_sp_restore_root(void) {
2576 if (vlas_in_scope) {
2577 gen_vla_sp_restore(vla_sp_root_loc);
2581 /* return the pointed type of t */
2582 static inline CType *pointed_type(CType *type)
2584 return &type->ref->type;
2587 /* modify type so that its it is a pointer to type. */
2588 ST_FUNC void mk_pointer(CType *type)
2590 Sym *s;
2591 s = sym_push(SYM_FIELD, type, 0, -1);
2592 type->t = VT_PTR | (type->t & ~VT_TYPE);
2593 type->ref = s;
2596 /* compare function types. OLD functions match any new functions */
2597 static int is_compatible_func(CType *type1, CType *type2)
2599 Sym *s1, *s2;
2601 s1 = type1->ref;
2602 s2 = type2->ref;
2603 if (!is_compatible_types(&s1->type, &s2->type))
2604 return 0;
2605 /* check func_call */
2606 if (s1->a.func_call != s2->a.func_call)
2607 return 0;
2608 /* XXX: not complete */
2609 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2610 return 1;
2611 if (s1->c != s2->c)
2612 return 0;
2613 while (s1 != NULL) {
2614 if (s2 == NULL)
2615 return 0;
2616 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2617 return 0;
2618 s1 = s1->next;
2619 s2 = s2->next;
2621 if (s2)
2622 return 0;
2623 return 1;
2626 /* return true if type1 and type2 are the same. If unqualified is
2627 true, qualifiers on the types are ignored.
2629 - enums are not checked as gcc __builtin_types_compatible_p ()
2631 static int compare_types(CType *type1, CType *type2, int unqualified)
2633 int bt1, t1, t2;
2635 t1 = type1->t & VT_TYPE;
2636 t2 = type2->t & VT_TYPE;
2637 if (unqualified) {
2638 /* strip qualifiers before comparing */
2639 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2640 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2642 /* Default Vs explicit signedness only matters for char */
2643 if ((t1 & VT_BTYPE) != VT_BYTE) {
2644 t1 &= ~VT_DEFSIGN;
2645 t2 &= ~VT_DEFSIGN;
2647 /* An enum is compatible with (unsigned) int. Ideally we would
2648 store the enums signedness in type->ref.a.<some_bit> and
2649 only accept unsigned enums with unsigned int and vice versa.
2650 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2651 from pointer target types, so we can't add it here either. */
2652 if ((t1 & VT_BTYPE) == VT_ENUM) {
2653 t1 = VT_INT;
2654 if (type1->ref->a.unsigned_enum)
2655 t1 |= VT_UNSIGNED;
2657 if ((t2 & VT_BTYPE) == VT_ENUM) {
2658 t2 = VT_INT;
2659 if (type2->ref->a.unsigned_enum)
2660 t2 |= VT_UNSIGNED;
2662 /* XXX: bitfields ? */
2663 if (t1 != t2)
2664 return 0;
2665 /* test more complicated cases */
2666 bt1 = t1 & VT_BTYPE;
2667 if (bt1 == VT_PTR) {
2668 type1 = pointed_type(type1);
2669 type2 = pointed_type(type2);
2670 return is_compatible_types(type1, type2);
2671 } else if (bt1 == VT_STRUCT) {
2672 return (type1->ref == type2->ref);
2673 } else if (bt1 == VT_FUNC) {
2674 return is_compatible_func(type1, type2);
2675 } else {
2676 return 1;
2680 /* return true if type1 and type2 are exactly the same (including
2681 qualifiers).
2683 static int is_compatible_types(CType *type1, CType *type2)
2685 return compare_types(type1,type2,0);
2688 /* return true if type1 and type2 are the same (ignoring qualifiers).
2690 static int is_compatible_parameter_types(CType *type1, CType *type2)
2692 return compare_types(type1,type2,1);
2695 /* print a type. If 'varstr' is not NULL, then the variable is also
2696 printed in the type */
2697 /* XXX: union */
2698 /* XXX: add array and function pointers */
2699 static void type_to_str(char *buf, int buf_size,
2700 CType *type, const char *varstr)
2702 int bt, v, t;
2703 Sym *s, *sa;
2704 char buf1[256];
2705 const char *tstr;
2707 t = type->t & VT_TYPE;
2708 bt = t & VT_BTYPE;
2709 buf[0] = '\0';
2710 if (t & VT_CONSTANT)
2711 pstrcat(buf, buf_size, "const ");
2712 if (t & VT_VOLATILE)
2713 pstrcat(buf, buf_size, "volatile ");
2714 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2715 pstrcat(buf, buf_size, "unsigned ");
2716 else if (t & VT_DEFSIGN)
2717 pstrcat(buf, buf_size, "signed ");
2718 switch(bt) {
2719 case VT_VOID:
2720 tstr = "void";
2721 goto add_tstr;
2722 case VT_BOOL:
2723 tstr = "_Bool";
2724 goto add_tstr;
2725 case VT_BYTE:
2726 tstr = "char";
2727 goto add_tstr;
2728 case VT_SHORT:
2729 tstr = "short";
2730 goto add_tstr;
2731 case VT_INT:
2732 tstr = "int";
2733 goto add_tstr;
2734 case VT_LONG:
2735 tstr = "long";
2736 goto add_tstr;
2737 case VT_LLONG:
2738 tstr = "long long";
2739 goto add_tstr;
2740 case VT_FLOAT:
2741 tstr = "float";
2742 goto add_tstr;
2743 case VT_DOUBLE:
2744 tstr = "double";
2745 goto add_tstr;
2746 case VT_LDOUBLE:
2747 tstr = "long double";
2748 add_tstr:
2749 pstrcat(buf, buf_size, tstr);
2750 break;
2751 case VT_ENUM:
2752 case VT_STRUCT:
2753 if (bt == VT_STRUCT)
2754 tstr = "struct ";
2755 else
2756 tstr = "enum ";
2757 pstrcat(buf, buf_size, tstr);
2758 v = type->ref->v & ~SYM_STRUCT;
2759 if (v >= SYM_FIRST_ANOM)
2760 pstrcat(buf, buf_size, "<anonymous>");
2761 else
2762 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2763 break;
2764 case VT_FUNC:
2765 s = type->ref;
2766 type_to_str(buf, buf_size, &s->type, varstr);
2767 pstrcat(buf, buf_size, "(");
2768 sa = s->next;
2769 while (sa != NULL) {
2770 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2771 pstrcat(buf, buf_size, buf1);
2772 sa = sa->next;
2773 if (sa)
2774 pstrcat(buf, buf_size, ", ");
2776 pstrcat(buf, buf_size, ")");
2777 goto no_var;
2778 case VT_PTR:
2779 s = type->ref;
2780 if (t & VT_ARRAY) {
2781 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2782 type_to_str(buf, buf_size, &s->type, buf1);
2783 goto no_var;
2785 pstrcpy(buf1, sizeof(buf1), "*");
2786 if (t & VT_CONSTANT)
2787 pstrcat(buf1, buf_size, "const ");
2788 if (t & VT_VOLATILE)
2789 pstrcat(buf1, buf_size, "volatile ");
2790 if (varstr)
2791 pstrcat(buf1, sizeof(buf1), varstr);
2792 type_to_str(buf, buf_size, &s->type, buf1);
2793 goto no_var;
2795 if (varstr) {
2796 pstrcat(buf, buf_size, " ");
2797 pstrcat(buf, buf_size, varstr);
2799 no_var: ;
2802 /* verify type compatibility to store vtop in 'dt' type, and generate
2803 casts if needed. */
2804 static void gen_assign_cast(CType *dt)
2806 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2807 char buf1[256], buf2[256];
2808 int dbt, sbt;
2810 st = &vtop->type; /* source type */
2811 dbt = dt->t & VT_BTYPE;
2812 sbt = st->t & VT_BTYPE;
2813 if (sbt == VT_VOID || dbt == VT_VOID) {
2814 if (sbt == VT_VOID && dbt == VT_VOID)
2815 ; /*
2816 It is Ok if both are void
2817 A test program:
2818 void func1() {}
2819 void func2() {
2820 return func1();
2822 gcc accepts this program
2824 else
2825 tcc_error("cannot cast from/to void");
2827 if (dt->t & VT_CONSTANT)
2828 tcc_warning("assignment of read-only location");
2829 switch(dbt) {
2830 case VT_PTR:
2831 /* special cases for pointers */
2832 /* '0' can also be a pointer */
2833 if (is_null_pointer(vtop))
2834 goto type_ok;
2835 /* accept implicit pointer to integer cast with warning */
2836 if (is_integer_btype(sbt)) {
2837 tcc_warning("assignment makes pointer from integer without a cast");
2838 goto type_ok;
2840 type1 = pointed_type(dt);
2841 /* a function is implicitely a function pointer */
2842 if (sbt == VT_FUNC) {
2843 if ((type1->t & VT_BTYPE) != VT_VOID &&
2844 !is_compatible_types(pointed_type(dt), st))
2845 tcc_warning("assignment from incompatible pointer type");
2846 goto type_ok;
2848 if (sbt != VT_PTR)
2849 goto error;
2850 type2 = pointed_type(st);
2851 if ((type1->t & VT_BTYPE) == VT_VOID ||
2852 (type2->t & VT_BTYPE) == VT_VOID) {
2853 /* void * can match anything */
2854 } else {
2855 /* exact type match, except for qualifiers */
2856 tmp_type1 = *type1;
2857 tmp_type2 = *type2;
2858 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2859 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2860 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2861 /* Like GCC don't warn by default for merely changes
2862 in pointer target signedness. Do warn for different
2863 base types, though, in particular for unsigned enums
2864 and signed int targets. */
2865 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2866 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2867 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2869 else
2870 tcc_warning("assignment from incompatible pointer type");
2873 /* check const and volatile */
2874 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2875 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2876 tcc_warning("assignment discards qualifiers from pointer target type");
2877 break;
2878 case VT_BYTE:
2879 case VT_SHORT:
2880 case VT_INT:
2881 case VT_LLONG:
2882 if (sbt == VT_PTR || sbt == VT_FUNC) {
2883 tcc_warning("assignment makes integer from pointer without a cast");
2884 } else if (sbt == VT_STRUCT) {
2885 goto case_VT_STRUCT;
2887 /* XXX: more tests */
2888 break;
2889 case VT_STRUCT:
2890 case_VT_STRUCT:
2891 tmp_type1 = *dt;
2892 tmp_type2 = *st;
2893 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2894 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2895 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2896 error:
2897 type_to_str(buf1, sizeof(buf1), st, NULL);
2898 type_to_str(buf2, sizeof(buf2), dt, NULL);
2899 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2901 break;
2903 type_ok:
2904 gen_cast(dt);
2907 /* store vtop in lvalue pushed on stack */
2908 ST_FUNC void vstore(void)
2910 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2912 ft = vtop[-1].type.t;
2913 sbt = vtop->type.t & VT_BTYPE;
2914 dbt = ft & VT_BTYPE;
2915 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2916 (sbt == VT_INT && dbt == VT_SHORT))
2917 && !(vtop->type.t & VT_BITFIELD)) {
2918 /* optimize char/short casts */
2919 delayed_cast = VT_MUSTCAST;
2920 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2921 ((1 << VT_STRUCT_SHIFT) - 1));
2922 /* XXX: factorize */
2923 if (ft & VT_CONSTANT)
2924 tcc_warning("assignment of read-only location");
2925 } else {
2926 delayed_cast = 0;
2927 if (!(ft & VT_BITFIELD))
2928 gen_assign_cast(&vtop[-1].type);
2931 if (sbt == VT_STRUCT) {
2932 /* if structure, only generate pointer */
2933 /* structure assignment : generate memcpy */
2934 /* XXX: optimize if small size */
2935 size = type_size(&vtop->type, &align);
2937 /* destination */
2938 vswap();
2939 vtop->type.t = VT_PTR;
2940 gaddrof();
2942 /* address of memcpy() */
2943 #ifdef TCC_ARM_EABI
2944 if(!(align & 7))
2945 vpush_global_sym(&func_old_type, TOK_memcpy8);
2946 else if(!(align & 3))
2947 vpush_global_sym(&func_old_type, TOK_memcpy4);
2948 else
2949 #endif
2950 /* Use memmove, rather than memcpy, as dest and src may be same: */
2951 vpush_global_sym(&func_old_type, TOK_memmove);
2953 vswap();
2954 /* source */
2955 vpushv(vtop - 2);
2956 vtop->type.t = VT_PTR;
2957 gaddrof();
2958 /* type size */
2959 vpushi(size);
2960 gfunc_call(3);
2962 /* leave source on stack */
2963 } else if (ft & VT_BITFIELD) {
2964 /* bitfield store handling */
2966 /* save lvalue as expression result (example: s.b = s.a = n;) */
2967 vdup(), vtop[-1] = vtop[-2];
2969 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2970 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2971 /* remove bit field info to avoid loops */
2972 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2974 if((ft & VT_BTYPE) == VT_BOOL) {
2975 gen_cast(&vtop[-1].type);
2976 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2979 /* duplicate destination */
2980 vdup();
2981 vtop[-1] = vtop[-2];
2983 /* mask and shift source */
2984 if((ft & VT_BTYPE) != VT_BOOL) {
2985 if((ft & VT_BTYPE) == VT_LLONG) {
2986 vpushll((1ULL << bit_size) - 1ULL);
2987 } else {
2988 vpushi((1 << bit_size) - 1);
2990 gen_op('&');
2992 vpushi(bit_pos);
2993 gen_op(TOK_SHL);
2994 /* load destination, mask and or with source */
2995 vswap();
2996 if((ft & VT_BTYPE) == VT_LLONG) {
2997 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2998 } else {
2999 vpushi(~(((1 << bit_size) - 1) << bit_pos));
3001 gen_op('&');
3002 gen_op('|');
3003 /* store result */
3004 vstore();
3005 /* ... and discard */
3006 vpop();
3008 } else {
3009 #ifdef CONFIG_TCC_BCHECK
3010 /* bound check case */
3011 if (vtop[-1].r & VT_MUSTBOUND) {
3012 vswap();
3013 gbound();
3014 vswap();
3016 #endif
3017 rc = RC_INT;
3018 if (is_float(ft)) {
3019 rc = RC_FLOAT;
3020 #ifdef TCC_TARGET_X86_64
3021 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3022 rc = RC_ST0;
3023 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3024 rc = RC_FRET;
3026 #endif
3028 r = gv(rc); /* generate value */
3029 /* if lvalue was saved on stack, must read it */
3030 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3031 SValue sv;
3032 t = get_reg(RC_INT);
3033 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3034 sv.type.t = VT_PTR;
3035 #else
3036 sv.type.t = VT_INT;
3037 #endif
3038 sv.r = VT_LOCAL | VT_LVAL;
3039 sv.c.i = vtop[-1].c.i;
3040 load(t, &sv);
3041 vtop[-1].r = t | VT_LVAL;
3043 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3044 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3045 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3046 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3047 #else
3048 if ((ft & VT_BTYPE) == VT_LLONG) {
3049 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3050 #endif
3051 vtop[-1].type.t = load_type;
3052 store(r, vtop - 1);
3053 vswap();
3054 /* convert to int to increment easily */
3055 vtop->type.t = addr_type;
3056 gaddrof();
3057 vpushi(load_size);
3058 gen_op('+');
3059 vtop->r |= VT_LVAL;
3060 vswap();
3061 vtop[-1].type.t = load_type;
3062 /* XXX: it works because r2 is spilled last ! */
3063 store(vtop->r2, vtop - 1);
3064 } else {
3065 store(r, vtop - 1);
3068 vswap();
3069 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3070 vtop->r |= delayed_cast;
3074 /* post defines POST/PRE add. c is the token ++ or -- */
3075 ST_FUNC void inc(int post, int c)
3077 test_lvalue();
3078 vdup(); /* save lvalue */
3079 if (post) {
3080 gv_dup(); /* duplicate value */
3081 vrotb(3);
3082 vrotb(3);
3084 /* add constant */
3085 vpushi(c - TOK_MID);
3086 gen_op('+');
3087 vstore(); /* store value */
3088 if (post)
3089 vpop(); /* if post op, return saved value */
3092 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3094 /* read the string */
3095 if (tok != TOK_STR)
3096 expect(msg);
3097 cstr_new(astr);
3098 while (tok == TOK_STR) {
3099 /* XXX: add \0 handling too ? */
3100 cstr_cat(astr, tokc.str.data, -1);
3101 next();
3103 cstr_ccat(astr, '\0');
3106 /* If I is >= 1 and a power of two, returns log2(i)+1.
3107 If I is 0 returns 0. */
3108 static int exact_log2p1(int i)
3110 int ret;
3111 if (!i)
3112 return 0;
3113 for (ret = 1; i >= 1 << 8; ret += 8)
3114 i >>= 8;
3115 if (i >= 1 << 4)
3116 ret += 4, i >>= 4;
3117 if (i >= 1 << 2)
3118 ret += 2, i >>= 2;
3119 if (i >= 1 << 1)
3120 ret++;
3121 return ret;
3124 /* Parse GNUC __attribute__ extension. Currently, the following
3125 extensions are recognized:
3126 - aligned(n) : set data/function alignment.
3127 - packed : force data alignment to 1
3128 - section(x) : generate data/code in this section.
3129 - unused : currently ignored, but may be used someday.
3130 - regparm(n) : pass function parameters in registers (i386 only)
3132 static void parse_attribute(AttributeDef *ad)
3134 int t, n;
3135 CString astr;
3137 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3138 next();
3139 skip('(');
3140 skip('(');
3141 while (tok != ')') {
3142 if (tok < TOK_IDENT)
3143 expect("attribute name");
3144 t = tok;
3145 next();
3146 switch(t) {
3147 case TOK_SECTION1:
3148 case TOK_SECTION2:
3149 skip('(');
3150 parse_mult_str(&astr, "section name");
3151 ad->section = find_section(tcc_state, (char *)astr.data);
3152 skip(')');
3153 cstr_free(&astr);
3154 break;
3155 case TOK_ALIAS1:
3156 case TOK_ALIAS2:
3157 skip('(');
3158 parse_mult_str(&astr, "alias(\"target\")");
3159 ad->alias_target = /* save string as token, for later */
3160 tok_alloc((char*)astr.data, astr.size-1)->tok;
3161 skip(')');
3162 cstr_free(&astr);
3163 break;
3164 case TOK_VISIBILITY1:
3165 case TOK_VISIBILITY2:
3166 skip('(');
3167 parse_mult_str(&astr,
3168 "visibility(\"default|hidden|internal|protected\")");
3169 if (!strcmp (astr.data, "default"))
3170 ad->a.visibility = STV_DEFAULT;
3171 else if (!strcmp (astr.data, "hidden"))
3172 ad->a.visibility = STV_HIDDEN;
3173 else if (!strcmp (astr.data, "internal"))
3174 ad->a.visibility = STV_INTERNAL;
3175 else if (!strcmp (astr.data, "protected"))
3176 ad->a.visibility = STV_PROTECTED;
3177 else
3178 expect("visibility(\"default|hidden|internal|protected\")");
3179 skip(')');
3180 cstr_free(&astr);
3181 break;
3182 case TOK_ALIGNED1:
3183 case TOK_ALIGNED2:
3184 if (tok == '(') {
3185 next();
3186 n = expr_const();
3187 if (n <= 0 || (n & (n - 1)) != 0)
3188 tcc_error("alignment must be a positive power of two");
3189 skip(')');
3190 } else {
3191 n = MAX_ALIGN;
3193 ad->a.aligned = exact_log2p1(n);
3194 if (n != 1 << (ad->a.aligned - 1))
3195 tcc_error("alignment of %d is larger than implemented", n);
3196 break;
3197 case TOK_PACKED1:
3198 case TOK_PACKED2:
3199 ad->a.packed = 1;
3200 break;
3201 case TOK_WEAK1:
3202 case TOK_WEAK2:
3203 ad->a.weak = 1;
3204 break;
3205 case TOK_UNUSED1:
3206 case TOK_UNUSED2:
3207 /* currently, no need to handle it because tcc does not
3208 track unused objects */
3209 break;
3210 case TOK_NORETURN1:
3211 case TOK_NORETURN2:
3212 /* currently, no need to handle it because tcc does not
3213 track unused objects */
3214 break;
3215 case TOK_CDECL1:
3216 case TOK_CDECL2:
3217 case TOK_CDECL3:
3218 ad->a.func_call = FUNC_CDECL;
3219 break;
3220 case TOK_STDCALL1:
3221 case TOK_STDCALL2:
3222 case TOK_STDCALL3:
3223 ad->a.func_call = FUNC_STDCALL;
3224 break;
3225 #ifdef TCC_TARGET_I386
3226 case TOK_REGPARM1:
3227 case TOK_REGPARM2:
3228 skip('(');
3229 n = expr_const();
3230 if (n > 3)
3231 n = 3;
3232 else if (n < 0)
3233 n = 0;
3234 if (n > 0)
3235 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3236 skip(')');
3237 break;
3238 case TOK_FASTCALL1:
3239 case TOK_FASTCALL2:
3240 case TOK_FASTCALL3:
3241 ad->a.func_call = FUNC_FASTCALLW;
3242 break;
3243 #endif
3244 case TOK_MODE:
3245 skip('(');
3246 switch(tok) {
3247 case TOK_MODE_DI:
3248 ad->a.mode = VT_LLONG + 1;
3249 break;
3250 case TOK_MODE_QI:
3251 ad->a.mode = VT_BYTE + 1;
3252 break;
3253 case TOK_MODE_HI:
3254 ad->a.mode = VT_SHORT + 1;
3255 break;
3256 case TOK_MODE_SI:
3257 case TOK_MODE_word:
3258 ad->a.mode = VT_INT + 1;
3259 break;
3260 default:
3261 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3262 break;
3264 next();
3265 skip(')');
3266 break;
3267 case TOK_DLLEXPORT:
3268 ad->a.func_export = 1;
3269 break;
3270 case TOK_DLLIMPORT:
3271 ad->a.func_import = 1;
3272 break;
3273 default:
3274 if (tcc_state->warn_unsupported)
3275 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3276 /* skip parameters */
3277 if (tok == '(') {
3278 int parenthesis = 0;
3279 do {
3280 if (tok == '(')
3281 parenthesis++;
3282 else if (tok == ')')
3283 parenthesis--;
3284 next();
3285 } while (parenthesis && tok != -1);
3287 break;
3289 if (tok != ',')
3290 break;
3291 next();
3293 skip(')');
3294 skip(')');
3298 static Sym * find_field (CType *type, int v)
3300 Sym *s = type->ref;
3301 v |= SYM_FIELD;
3302 while ((s = s->next) != NULL) {
3303 if ((s->v & SYM_FIELD) &&
3304 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3305 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3306 Sym *ret = find_field (&s->type, v);
3307 if (ret)
3308 return ret;
3310 if (s->v == v)
3311 break;
3313 return s;
3316 static void struct_add_offset (Sym *s, int offset)
3318 while ((s = s->next) != NULL) {
3319 if ((s->v & SYM_FIELD) &&
3320 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3321 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3322 struct_add_offset(s->type.ref, offset);
3323 } else
3324 s->c += offset;
3328 static void struct_layout(CType *type, AttributeDef *ad)
3330 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3331 int pcc = !tcc_state->ms_bitfields;
3332 Sym *f;
3333 if (ad->a.aligned)
3334 maxalign = 1 << (ad->a.aligned - 1);
3335 else
3336 maxalign = 1;
3337 offset = 0;
3338 c = 0;
3339 bit_pos = 0;
3340 prevbt = VT_STRUCT; /* make it never match */
3341 prev_bit_size = 0;
3342 for (f = type->ref->next; f; f = f->next) {
3343 int typealign, bit_size;
3344 int size = type_size(&f->type, &typealign);
3345 if (f->type.t & VT_BITFIELD)
3346 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3347 else
3348 bit_size = -1;
3349 if (bit_size == 0 && pcc) {
3350 /* Zero-width bit-fields in PCC mode aren't affected
3351 by any packing (attribute or pragma). */
3352 align = typealign;
3353 } else if (f->r > 1) {
3354 align = f->r;
3355 } else if (ad->a.packed || f->r == 1) {
3356 align = 1;
3357 /* Packed fields or packed records don't let the base type
3358 influence the records type alignment. */
3359 typealign = 1;
3360 } else {
3361 align = typealign;
3363 if (type->ref->type.t != TOK_STRUCT) {
3364 if (pcc && bit_size >= 0)
3365 size = (bit_size + 7) >> 3;
3366 /* Bit position is already zero from our caller. */
3367 offset = 0;
3368 if (size > c)
3369 c = size;
3370 } else if (bit_size < 0) {
3371 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3372 prevbt = VT_STRUCT;
3373 prev_bit_size = 0;
3374 c = (c + addbytes + align - 1) & -align;
3375 offset = c;
3376 if (size > 0)
3377 c += size;
3378 bit_pos = 0;
3379 } else {
3380 /* A bit-field. Layout is more complicated. There are two
3381 options TCC implements: PCC compatible and MS compatible
3382 (PCC compatible is what GCC uses for almost all targets).
3383 In PCC layout the overall size of the struct (in c) is
3384 _excluding_ the current run of bit-fields (that is,
3385 there's at least additional bit_pos bits after c). In
3386 MS layout c does include the current run of bit-fields.
3388 This matters for calculating the natural alignment buckets
3389 in PCC mode. */
3391 /* 'align' will be used to influence records alignment,
3392 so it's the max of specified and type alignment, except
3393 in certain cases that depend on the mode. */
3394 if (align < typealign)
3395 align = typealign;
3396 if (pcc) {
3397 /* In PCC layout a non-packed bit-field is placed adjacent
3398 to the preceding bit-fields, except if it would overflow
3399 its container (depending on base type) or it's a zero-width
3400 bit-field. Packed non-zero-width bit-fields always are
3401 placed adjacent. */
3402 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3403 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3404 if (bit_size == 0 ||
3405 (typealign != 1 &&
3406 (ofs2 / (typealign * 8)) > (size/typealign))) {
3407 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3408 bit_pos = 0;
3410 offset = c;
3411 /* In PCC layout named bit-fields influence the alignment
3412 of the containing struct using the base types alignment,
3413 except for packed fields (which here have correct
3414 align/typealign). */
3415 if ((f->v & SYM_FIRST_ANOM))
3416 align = 1;
3417 } else {
3418 bt = f->type.t & VT_BTYPE;
3419 if ((bit_pos + bit_size > size * 8) ||
3420 (bit_size > 0) == (bt != prevbt)) {
3421 c = (c + typealign - 1) & -typealign;
3422 offset = c;
3423 bit_pos = 0;
3424 /* In MS bitfield mode a bit-field run always uses
3425 at least as many bits as the underlying type.
3426 To start a new run it's also required that this
3427 or the last bit-field had non-zero width. */
3428 if (bit_size || prev_bit_size)
3429 c += size;
3431 /* In MS layout the records alignment is normally
3432 influenced by the field, except for a zero-width
3433 field at the start of a run (but by further zero-width
3434 fields it is again). */
3435 if (bit_size == 0 && prevbt != bt)
3436 align = 1;
3437 prevbt = bt;
3438 prev_bit_size = bit_size;
3440 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3441 | (bit_pos << VT_STRUCT_SHIFT);
3442 bit_pos += bit_size;
3443 if (pcc && bit_pos >= size * 8) {
3444 c += size;
3445 bit_pos -= size * 8;
3448 if (align > maxalign)
3449 maxalign = align;
3450 #if 0
3451 printf("set field %s offset=%d c=%d",
3452 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3453 if (f->type.t & VT_BITFIELD) {
3454 printf(" pos=%d size=%d",
3455 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3456 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3458 printf("\n");
3459 #endif
3461 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3462 Sym *ass;
3463 /* An anonymous struct/union. Adjust member offsets
3464 to reflect the real offset of our containing struct.
3465 Also set the offset of this anon member inside
3466 the outer struct to be zero. Via this it
3467 works when accessing the field offset directly
3468 (from base object), as well as when recursing
3469 members in initializer handling. */
3470 int v2 = f->type.ref->v;
3471 if (!(v2 & SYM_FIELD) &&
3472 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3473 Sym **pps;
3474 /* This happens only with MS extensions. The
3475 anon member has a named struct type, so it
3476 potentially is shared with other references.
3477 We need to unshare members so we can modify
3478 them. */
3479 ass = f->type.ref;
3480 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3481 &f->type.ref->type, 0,
3482 f->type.ref->c);
3483 pps = &f->type.ref->next;
3484 while ((ass = ass->next) != NULL) {
3485 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3486 pps = &((*pps)->next);
3488 *pps = NULL;
3490 struct_add_offset(f->type.ref, offset);
3491 f->c = 0;
3492 } else {
3493 f->c = offset;
3496 f->r = 0;
3498 /* store size and alignment */
3499 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3500 + maxalign - 1) & -maxalign;
3501 type->ref->r = maxalign;
3504 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3505 static void struct_decl(CType *type, AttributeDef *ad, int u)
3507 int a, v, size, align, flexible, alignoverride;
3508 long c;
3509 int bit_size, bsize, bt;
3510 Sym *s, *ss, **ps;
3511 AttributeDef ad1;
3512 CType type1, btype;
3514 a = tok; /* save decl type */
3515 next();
3516 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3517 parse_attribute(ad);
3518 if (tok != '{') {
3519 v = tok;
3520 next();
3521 /* struct already defined ? return it */
3522 if (v < TOK_IDENT)
3523 expect("struct/union/enum name");
3524 s = struct_find(v);
3525 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3526 if (s->type.t != a)
3527 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3528 goto do_decl;
3530 } else {
3531 v = anon_sym++;
3533 /* Record the original enum/struct/union token. */
3534 type1.t = a;
3535 type1.ref = NULL;
3536 /* we put an undefined size for struct/union */
3537 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3538 s->r = 0; /* default alignment is zero as gcc */
3539 /* put struct/union/enum name in type */
3540 do_decl:
3541 type->t = u;
3542 type->ref = s;
3544 if (tok == '{') {
3545 next();
3546 if (s->c != -1)
3547 tcc_error("struct/union/enum already defined");
3548 /* cannot be empty */
3549 c = 0;
3550 /* non empty enums are not allowed */
3551 if (a == TOK_ENUM) {
3552 int seen_neg = 0;
3553 int seen_wide = 0;
3554 for(;;) {
3555 CType *t = &int_type;
3556 v = tok;
3557 if (v < TOK_UIDENT)
3558 expect("identifier");
3559 ss = sym_find(v);
3560 if (ss && !local_stack)
3561 tcc_error("redefinition of enumerator '%s'",
3562 get_tok_str(v, NULL));
3563 next();
3564 if (tok == '=') {
3565 next();
3566 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3567 c = expr_const64();
3568 #else
3569 /* We really want to support long long enums
3570 on i386 as well, but the Sym structure only
3571 holds a 'long' for associated constants,
3572 and enlarging it would bump its size (no
3573 available padding). So punt for now. */
3574 c = expr_const();
3575 #endif
3577 if (c < 0)
3578 seen_neg = 1;
3579 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3580 seen_wide = 1, t = &size_type;
3581 /* enum symbols have static storage */
3582 ss = sym_push(v, t, VT_CONST, c);
3583 ss->type.t |= VT_STATIC;
3584 if (tok != ',')
3585 break;
3586 next();
3587 c++;
3588 /* NOTE: we accept a trailing comma */
3589 if (tok == '}')
3590 break;
3592 if (!seen_neg)
3593 s->a.unsigned_enum = 1;
3594 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3595 skip('}');
3596 } else {
3597 ps = &s->next;
3598 flexible = 0;
3599 while (tok != '}') {
3600 if (!parse_btype(&btype, &ad1)) {
3601 skip(';');
3602 continue;
3604 while (1) {
3605 if (flexible)
3606 tcc_error("flexible array member '%s' not at the end of struct",
3607 get_tok_str(v, NULL));
3608 bit_size = -1;
3609 v = 0;
3610 type1 = btype;
3611 if (tok != ':') {
3612 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3613 if (v == 0) {
3614 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3615 expect("identifier");
3616 else {
3617 int v = btype.ref->v;
3618 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3619 if (tcc_state->ms_extensions == 0)
3620 expect("identifier");
3624 if (type_size(&type1, &align) < 0) {
3625 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3626 flexible = 1;
3627 else
3628 tcc_error("field '%s' has incomplete type",
3629 get_tok_str(v, NULL));
3631 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3632 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3633 tcc_error("invalid type for '%s'",
3634 get_tok_str(v, NULL));
3636 if (tok == ':') {
3637 next();
3638 bit_size = expr_const();
3639 /* XXX: handle v = 0 case for messages */
3640 if (bit_size < 0)
3641 tcc_error("negative width in bit-field '%s'",
3642 get_tok_str(v, NULL));
3643 if (v && bit_size == 0)
3644 tcc_error("zero width for bit-field '%s'",
3645 get_tok_str(v, NULL));
3646 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3647 parse_attribute(&ad1);
3649 size = type_size(&type1, &align);
3650 /* Only remember non-default alignment. */
3651 alignoverride = 0;
3652 if (ad1.a.aligned) {
3653 int speca = 1 << (ad1.a.aligned - 1);
3654 alignoverride = speca;
3655 } else if (ad1.a.packed || ad->a.packed) {
3656 alignoverride = 1;
3657 } else if (*tcc_state->pack_stack_ptr) {
3658 if (align > *tcc_state->pack_stack_ptr)
3659 alignoverride = *tcc_state->pack_stack_ptr;
3661 if (bit_size >= 0) {
3662 bt = type1.t & VT_BTYPE;
3663 if (bt != VT_INT &&
3664 bt != VT_BYTE &&
3665 bt != VT_SHORT &&
3666 bt != VT_BOOL &&
3667 bt != VT_ENUM &&
3668 bt != VT_LLONG)
3669 tcc_error("bitfields must have scalar type");
3670 bsize = size * 8;
3671 if (bit_size > bsize) {
3672 tcc_error("width of '%s' exceeds its type",
3673 get_tok_str(v, NULL));
3674 } else if (bit_size == bsize) {
3675 /* no need for bit fields */
3677 } else {
3678 type1.t |= VT_BITFIELD |
3679 (0 << VT_STRUCT_SHIFT) |
3680 (bit_size << (VT_STRUCT_SHIFT + 6));
3683 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3684 /* Remember we've seen a real field to check
3685 for placement of flexible array member. */
3686 c = 1;
3688 /* If member is a struct or bit-field, enforce
3689 placing into the struct (as anonymous). */
3690 if (v == 0 &&
3691 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3692 bit_size >= 0)) {
3693 v = anon_sym++;
3695 if (v) {
3696 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3697 *ps = ss;
3698 ps = &ss->next;
3700 if (tok == ';' || tok == TOK_EOF)
3701 break;
3702 skip(',');
3704 skip(';');
3706 skip('}');
3707 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3708 parse_attribute(ad);
3709 struct_layout(type, ad);
3714 /* return 1 if basic type is a type size (short, long, long long) */
3715 ST_FUNC int is_btype_size(int bt)
3717 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3720 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3721 are added to the element type, copied because it could be a typedef. */
3722 static void parse_btype_qualify(CType *type, int qualifiers)
3724 while (type->t & VT_ARRAY) {
3725 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3726 type = &type->ref->type;
3728 type->t |= qualifiers;
3731 /* return 0 if no type declaration. otherwise, return the basic type
3732 and skip it.
3734 static int parse_btype(CType *type, AttributeDef *ad)
3736 int t, u, bt_size, complete, type_found, typespec_found;
3737 Sym *s;
3738 CType type1;
3740 memset(ad, 0, sizeof(AttributeDef));
3741 complete = 0;
3742 type_found = 0;
3743 typespec_found = 0;
3744 t = 0;
3745 while(1) {
3746 switch(tok) {
3747 case TOK_EXTENSION:
3748 /* currently, we really ignore extension */
3749 next();
3750 continue;
3752 /* basic types */
3753 case TOK_CHAR:
3754 u = VT_BYTE;
3755 basic_type:
3756 next();
3757 basic_type1:
3758 if (complete)
3759 tcc_error("too many basic types");
3760 t |= u;
3761 bt_size = is_btype_size (u & VT_BTYPE);
3762 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3763 complete = 1;
3764 typespec_found = 1;
3765 break;
3766 case TOK_VOID:
3767 u = VT_VOID;
3768 goto basic_type;
3769 case TOK_SHORT:
3770 u = VT_SHORT;
3771 goto basic_type;
3772 case TOK_INT:
3773 u = VT_INT;
3774 goto basic_type;
3775 case TOK_LONG:
3776 next();
3777 if ((t & VT_BTYPE) == VT_DOUBLE) {
3778 #ifndef TCC_TARGET_PE
3779 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3780 #endif
3781 } else if ((t & VT_BTYPE) == VT_LONG) {
3782 t = (t & ~VT_BTYPE) | VT_LLONG;
3783 } else {
3784 u = VT_LONG;
3785 goto basic_type1;
3787 break;
3788 #ifdef TCC_TARGET_ARM64
3789 case TOK_UINT128:
3790 /* GCC's __uint128_t appears in some Linux header files. Make it a
3791 synonym for long double to get the size and alignment right. */
3792 u = VT_LDOUBLE;
3793 goto basic_type;
3794 #endif
3795 case TOK_BOOL:
3796 u = VT_BOOL;
3797 goto basic_type;
3798 case TOK_FLOAT:
3799 u = VT_FLOAT;
3800 goto basic_type;
3801 case TOK_DOUBLE:
3802 next();
3803 if ((t & VT_BTYPE) == VT_LONG) {
3804 #ifdef TCC_TARGET_PE
3805 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3806 #else
3807 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3808 #endif
3809 } else {
3810 u = VT_DOUBLE;
3811 goto basic_type1;
3813 break;
3814 case TOK_ENUM:
3815 struct_decl(&type1, ad, VT_ENUM);
3816 basic_type2:
3817 u = type1.t;
3818 type->ref = type1.ref;
3819 goto basic_type1;
3820 case TOK_STRUCT:
3821 case TOK_UNION:
3822 struct_decl(&type1, ad, VT_STRUCT);
3823 goto basic_type2;
3825 /* type modifiers */
3826 case TOK_CONST1:
3827 case TOK_CONST2:
3828 case TOK_CONST3:
3829 type->t = t;
3830 parse_btype_qualify(type, VT_CONSTANT);
3831 t = type->t;
3832 next();
3833 break;
3834 case TOK_VOLATILE1:
3835 case TOK_VOLATILE2:
3836 case TOK_VOLATILE3:
3837 type->t = t;
3838 parse_btype_qualify(type, VT_VOLATILE);
3839 t = type->t;
3840 next();
3841 break;
3842 case TOK_SIGNED1:
3843 case TOK_SIGNED2:
3844 case TOK_SIGNED3:
3845 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3846 tcc_error("signed and unsigned modifier");
3847 typespec_found = 1;
3848 t |= VT_DEFSIGN;
3849 next();
3850 break;
3851 case TOK_REGISTER:
3852 case TOK_AUTO:
3853 case TOK_RESTRICT1:
3854 case TOK_RESTRICT2:
3855 case TOK_RESTRICT3:
3856 next();
3857 break;
3858 case TOK_UNSIGNED:
3859 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3860 tcc_error("signed and unsigned modifier");
3861 t |= VT_DEFSIGN | VT_UNSIGNED;
3862 next();
3863 typespec_found = 1;
3864 break;
3866 /* storage */
3867 case TOK_EXTERN:
3868 t |= VT_EXTERN;
3869 next();
3870 break;
3871 case TOK_STATIC:
3872 t |= VT_STATIC;
3873 next();
3874 break;
3875 case TOK_TYPEDEF:
3876 t |= VT_TYPEDEF;
3877 next();
3878 break;
3879 case TOK_INLINE1:
3880 case TOK_INLINE2:
3881 case TOK_INLINE3:
3882 t |= VT_INLINE;
3883 next();
3884 break;
3886 /* GNUC attribute */
3887 case TOK_ATTRIBUTE1:
3888 case TOK_ATTRIBUTE2:
3889 parse_attribute(ad);
3890 if (ad->a.mode) {
3891 u = ad->a.mode -1;
3892 t = (t & ~VT_BTYPE) | u;
3894 break;
3895 /* GNUC typeof */
3896 case TOK_TYPEOF1:
3897 case TOK_TYPEOF2:
3898 case TOK_TYPEOF3:
3899 next();
3900 parse_expr_type(&type1);
3901 /* remove all storage modifiers except typedef */
3902 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3903 goto basic_type2;
3904 default:
3905 if (typespec_found)
3906 goto the_end;
3907 s = sym_find(tok);
3908 if (!s || !(s->type.t & VT_TYPEDEF))
3909 goto the_end;
3911 type->t = ((s->type.t & ~VT_TYPEDEF) |
3912 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3913 type->ref = s->type.ref;
3914 if (t & (VT_CONSTANT | VT_VOLATILE))
3915 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3916 t = type->t;
3918 if (s->r) {
3919 /* get attributes from typedef */
3920 if (0 == ad->a.aligned)
3921 ad->a.aligned = s->a.aligned;
3922 if (0 == ad->a.func_call)
3923 ad->a.func_call = s->a.func_call;
3924 ad->a.packed |= s->a.packed;
3926 next();
3927 typespec_found = 1;
3928 break;
3930 type_found = 1;
3932 the_end:
3933 if (tcc_state->char_is_unsigned) {
3934 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3935 t |= VT_UNSIGNED;
3938 /* long is never used as type */
3939 if ((t & VT_BTYPE) == VT_LONG)
3940 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3941 defined TCC_TARGET_PE
3942 t = (t & ~VT_BTYPE) | VT_INT;
3943 #else
3944 t = (t & ~VT_BTYPE) | VT_LLONG;
3945 #endif
3946 type->t = t;
3947 return type_found;
3950 /* convert a function parameter type (array to pointer and function to
3951 function pointer) */
3952 static inline void convert_parameter_type(CType *pt)
3954 /* remove const and volatile qualifiers (XXX: const could be used
3955 to indicate a const function parameter */
3956 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3957 /* array must be transformed to pointer according to ANSI C */
3958 pt->t &= ~VT_ARRAY;
3959 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3960 mk_pointer(pt);
3964 ST_FUNC void parse_asm_str(CString *astr)
3966 skip('(');
3967 parse_mult_str(astr, "string constant");
3970 /* Parse an asm label and return the token */
3971 static int asm_label_instr(void)
3973 int v;
3974 CString astr;
3976 next();
3977 parse_asm_str(&astr);
3978 skip(')');
3979 #ifdef ASM_DEBUG
3980 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3981 #endif
3982 v = tok_alloc(astr.data, astr.size - 1)->tok;
3983 cstr_free(&astr);
3984 return v;
3987 static void post_type(CType *type, AttributeDef *ad, int storage)
3989 int n, l, t1, arg_size, align;
3990 Sym **plast, *s, *first;
3991 AttributeDef ad1;
3992 CType pt;
3994 if (tok == '(') {
3995 /* function declaration */
3996 next();
3997 l = 0;
3998 first = NULL;
3999 plast = &first;
4000 arg_size = 0;
4001 if (tok != ')') {
4002 for(;;) {
4003 /* read param name and compute offset */
4004 if (l != FUNC_OLD) {
4005 if (!parse_btype(&pt, &ad1)) {
4006 if (l) {
4007 tcc_error("invalid type");
4008 } else {
4009 l = FUNC_OLD;
4010 goto old_proto;
4013 l = FUNC_NEW;
4014 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4015 break;
4016 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4017 if ((pt.t & VT_BTYPE) == VT_VOID)
4018 tcc_error("parameter declared as void");
4019 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4020 } else {
4021 old_proto:
4022 n = tok;
4023 if (n < TOK_UIDENT)
4024 expect("identifier");
4025 pt.t = VT_INT;
4026 next();
4028 convert_parameter_type(&pt);
4029 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4030 *plast = s;
4031 plast = &s->next;
4032 if (tok == ')')
4033 break;
4034 skip(',');
4035 if (l == FUNC_NEW && tok == TOK_DOTS) {
4036 l = FUNC_ELLIPSIS;
4037 next();
4038 break;
4042 /* if no parameters, then old type prototype */
4043 if (l == 0)
4044 l = FUNC_OLD;
4045 skip(')');
4046 /* NOTE: const is ignored in returned type as it has a special
4047 meaning in gcc / C++ */
4048 type->t &= ~VT_CONSTANT;
4049 /* some ancient pre-K&R C allows a function to return an array
4050 and the array brackets to be put after the arguments, such
4051 that "int c()[]" means something like "int[] c()" */
4052 if (tok == '[') {
4053 next();
4054 skip(']'); /* only handle simple "[]" */
4055 type->t |= VT_PTR;
4057 /* we push a anonymous symbol which will contain the function prototype */
4058 ad->a.func_args = arg_size;
4059 s = sym_push(SYM_FIELD, type, 0, l);
4060 s->a = ad->a;
4061 s->next = first;
4062 type->t = VT_FUNC;
4063 type->ref = s;
4064 } else if (tok == '[') {
4065 int saved_nocode_wanted = nocode_wanted;
4066 /* array definition */
4067 next();
4068 if (tok == TOK_RESTRICT1)
4069 next();
4070 n = -1;
4071 t1 = 0;
4072 if (tok != ']') {
4073 if (!local_stack || (storage & VT_STATIC))
4074 vpushi(expr_const());
4075 else {
4076 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4077 length must always be evaluated, even under nocode_wanted,
4078 so that its size slot is initialized (e.g. under sizeof
4079 or typeof). */
4080 nocode_wanted = 0;
4081 gexpr();
4083 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4084 n = vtop->c.i;
4085 if (n < 0)
4086 tcc_error("invalid array size");
4087 } else {
4088 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4089 tcc_error("size of variable length array should be an integer");
4090 t1 = VT_VLA;
4093 skip(']');
4094 /* parse next post type */
4095 post_type(type, ad, storage);
4096 if (type->t == VT_FUNC)
4097 tcc_error("declaration of an array of functions");
4098 t1 |= type->t & VT_VLA;
4100 if (t1 & VT_VLA) {
4101 loc -= type_size(&int_type, &align);
4102 loc &= -align;
4103 n = loc;
4105 vla_runtime_type_size(type, &align);
4106 gen_op('*');
4107 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4108 vswap();
4109 vstore();
4111 if (n != -1)
4112 vpop();
4113 nocode_wanted = saved_nocode_wanted;
4115 /* we push an anonymous symbol which will contain the array
4116 element type */
4117 s = sym_push(SYM_FIELD, type, 0, n);
4118 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4119 type->ref = s;
4123 /* Parse a type declaration (except basic type), and return the type
4124 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4125 expected. 'type' should contain the basic type. 'ad' is the
4126 attribute definition of the basic type. It can be modified by
4127 type_decl().
4129 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4131 Sym *s;
4132 CType type1, *type2;
4133 int qualifiers, storage;
4135 while (tok == '*') {
4136 qualifiers = 0;
4137 redo:
4138 next();
4139 switch(tok) {
4140 case TOK_CONST1:
4141 case TOK_CONST2:
4142 case TOK_CONST3:
4143 qualifiers |= VT_CONSTANT;
4144 goto redo;
4145 case TOK_VOLATILE1:
4146 case TOK_VOLATILE2:
4147 case TOK_VOLATILE3:
4148 qualifiers |= VT_VOLATILE;
4149 goto redo;
4150 case TOK_RESTRICT1:
4151 case TOK_RESTRICT2:
4152 case TOK_RESTRICT3:
4153 goto redo;
4154 /* XXX: clarify attribute handling */
4155 case TOK_ATTRIBUTE1:
4156 case TOK_ATTRIBUTE2:
4157 parse_attribute(ad);
4158 break;
4160 mk_pointer(type);
4161 type->t |= qualifiers;
4164 /* recursive type */
4165 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4166 type1.t = 0; /* XXX: same as int */
4167 if (tok == '(') {
4168 next();
4169 /* XXX: this is not correct to modify 'ad' at this point, but
4170 the syntax is not clear */
4171 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4172 parse_attribute(ad);
4173 type_decl(&type1, ad, v, td);
4174 skip(')');
4175 } else {
4176 /* type identifier */
4177 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4178 *v = tok;
4179 next();
4180 } else {
4181 if (!(td & TYPE_ABSTRACT))
4182 expect("identifier");
4183 *v = 0;
4186 storage = type->t & VT_STORAGE;
4187 type->t &= ~VT_STORAGE;
4188 post_type(type, ad, storage);
4189 type->t |= storage;
4190 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4191 parse_attribute(ad);
4193 if (!type1.t)
4194 return;
4195 /* append type at the end of type1 */
4196 type2 = &type1;
4197 for(;;) {
4198 s = type2->ref;
4199 type2 = &s->type;
4200 if (!type2->t) {
4201 *type2 = *type;
4202 break;
4205 *type = type1;
4208 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4209 ST_FUNC int lvalue_type(int t)
4211 int bt, r;
4212 r = VT_LVAL;
4213 bt = t & VT_BTYPE;
4214 if (bt == VT_BYTE || bt == VT_BOOL)
4215 r |= VT_LVAL_BYTE;
4216 else if (bt == VT_SHORT)
4217 r |= VT_LVAL_SHORT;
4218 else
4219 return r;
4220 if (t & VT_UNSIGNED)
4221 r |= VT_LVAL_UNSIGNED;
4222 return r;
4225 /* indirection with full error checking and bound check */
4226 ST_FUNC void indir(void)
4228 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4229 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4230 return;
4231 expect("pointer");
4233 if (vtop->r & VT_LVAL)
4234 gv(RC_INT);
4235 vtop->type = *pointed_type(&vtop->type);
4236 /* Arrays and functions are never lvalues */
4237 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4238 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4239 vtop->r |= lvalue_type(vtop->type.t);
4240 /* if bound checking, the referenced pointer must be checked */
4241 #ifdef CONFIG_TCC_BCHECK
4242 if (tcc_state->do_bounds_check)
4243 vtop->r |= VT_MUSTBOUND;
4244 #endif
4248 /* pass a parameter to a function and do type checking and casting */
4249 static void gfunc_param_typed(Sym *func, Sym *arg)
4251 int func_type;
4252 CType type;
4254 func_type = func->c;
4255 if (func_type == FUNC_OLD ||
4256 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4257 /* default casting : only need to convert float to double */
4258 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4259 type.t = VT_DOUBLE;
4260 gen_cast(&type);
4261 } else if (vtop->type.t & VT_BITFIELD) {
4262 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4263 type.ref = vtop->type.ref;
4264 gen_cast(&type);
4266 } else if (arg == NULL) {
4267 tcc_error("too many arguments to function");
4268 } else {
4269 type = arg->type;
4270 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4271 gen_assign_cast(&type);
4275 /* parse an expression of the form '(type)' or '(expr)' and return its
4276 type */
4277 static void parse_expr_type(CType *type)
4279 int n;
4280 AttributeDef ad;
4282 skip('(');
4283 if (parse_btype(type, &ad)) {
4284 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4285 } else {
4286 expr_type(type);
4288 skip(')');
4291 static void parse_type(CType *type)
4293 AttributeDef ad;
4294 int n;
4296 if (!parse_btype(type, &ad)) {
4297 expect("type");
4299 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4302 static void vpush_tokc(int t)
4304 CType type;
4305 type.t = t;
4306 type.ref = 0;
4307 vsetc(&type, VT_CONST, &tokc);
4310 ST_FUNC void unary(void)
4312 int n, t, align, size, r, sizeof_caller;
4313 CType type;
4314 Sym *s;
4315 AttributeDef ad;
4317 sizeof_caller = in_sizeof;
4318 in_sizeof = 0;
4319 /* XXX: GCC 2.95.3 does not generate a table although it should be
4320 better here */
4321 tok_next:
4322 switch(tok) {
4323 case TOK_EXTENSION:
4324 next();
4325 goto tok_next;
4326 case TOK_CINT:
4327 case TOK_CCHAR:
4328 case TOK_LCHAR:
4329 vpushi(tokc.i);
4330 next();
4331 break;
4332 case TOK_CUINT:
4333 vpush_tokc(VT_INT | VT_UNSIGNED);
4334 next();
4335 break;
4336 case TOK_CLLONG:
4337 vpush_tokc(VT_LLONG);
4338 next();
4339 break;
4340 case TOK_CULLONG:
4341 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4342 next();
4343 break;
4344 case TOK_CFLOAT:
4345 vpush_tokc(VT_FLOAT);
4346 next();
4347 break;
4348 case TOK_CDOUBLE:
4349 vpush_tokc(VT_DOUBLE);
4350 next();
4351 break;
4352 case TOK_CLDOUBLE:
4353 vpush_tokc(VT_LDOUBLE);
4354 next();
4355 break;
4356 case TOK___FUNCTION__:
4357 if (!gnu_ext)
4358 goto tok_identifier;
4359 /* fall thru */
4360 case TOK___FUNC__:
4362 void *ptr;
4363 int len;
4364 /* special function name identifier */
4365 len = strlen(funcname) + 1;
4366 /* generate char[len] type */
4367 type.t = VT_BYTE;
4368 mk_pointer(&type);
4369 type.t |= VT_ARRAY;
4370 type.ref->c = len;
4371 vpush_ref(&type, data_section, data_section->data_offset, len);
4372 ptr = section_ptr_add(data_section, len);
4373 memcpy(ptr, funcname, len);
4374 next();
4376 break;
4377 case TOK_LSTR:
4378 #ifdef TCC_TARGET_PE
4379 t = VT_SHORT | VT_UNSIGNED;
4380 #else
4381 t = VT_INT;
4382 #endif
4383 goto str_init;
4384 case TOK_STR:
4385 /* string parsing */
4386 t = VT_BYTE;
4387 str_init:
4388 if (tcc_state->warn_write_strings)
4389 t |= VT_CONSTANT;
4390 type.t = t;
4391 mk_pointer(&type);
4392 type.t |= VT_ARRAY;
4393 memset(&ad, 0, sizeof(AttributeDef));
4394 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4395 break;
4396 case '(':
4397 next();
4398 /* cast ? */
4399 if (parse_btype(&type, &ad)) {
4400 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4401 skip(')');
4402 /* check ISOC99 compound literal */
4403 if (tok == '{') {
4404 /* data is allocated locally by default */
4405 if (global_expr)
4406 r = VT_CONST;
4407 else
4408 r = VT_LOCAL;
4409 /* all except arrays are lvalues */
4410 if (!(type.t & VT_ARRAY))
4411 r |= lvalue_type(type.t);
4412 memset(&ad, 0, sizeof(AttributeDef));
4413 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4414 } else {
4415 if (sizeof_caller) {
4416 vpush(&type);
4417 return;
4419 unary();
4420 gen_cast(&type);
4422 } else if (tok == '{') {
4423 int saved_nocode_wanted = nocode_wanted;
4424 if (const_wanted)
4425 tcc_error("expected constant");
4426 /* save all registers */
4427 save_regs(0);
4428 /* statement expression : we do not accept break/continue
4429 inside as GCC does. We do retain the nocode_wanted state,
4430 as statement expressions can't ever be entered from the
4431 outside, so any reactivation of code emission (from labels
4432 or loop heads) can be disabled again after the end of it. */
4433 block(NULL, NULL, 1);
4434 nocode_wanted = saved_nocode_wanted;
4435 skip(')');
4436 } else {
4437 gexpr();
4438 skip(')');
4440 break;
4441 case '*':
4442 next();
4443 unary();
4444 indir();
4445 break;
4446 case '&':
4447 next();
4448 unary();
4449 /* functions names must be treated as function pointers,
4450 except for unary '&' and sizeof. Since we consider that
4451 functions are not lvalues, we only have to handle it
4452 there and in function calls. */
4453 /* arrays can also be used although they are not lvalues */
4454 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4455 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4456 test_lvalue();
4457 mk_pointer(&vtop->type);
4458 gaddrof();
4459 break;
4460 case '!':
4461 next();
4462 unary();
4463 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4464 CType boolean;
4465 boolean.t = VT_BOOL;
4466 gen_cast(&boolean);
4467 vtop->c.i = !vtop->c.i;
4468 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4469 vtop->c.i ^= 1;
4470 else {
4471 save_regs(1);
4472 vseti(VT_JMP, gvtst(1, 0));
4474 break;
4475 case '~':
4476 next();
4477 unary();
4478 vpushi(-1);
4479 gen_op('^');
4480 break;
4481 case '+':
4482 next();
4483 unary();
4484 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4485 tcc_error("pointer not accepted for unary plus");
4486 /* In order to force cast, we add zero, except for floating point
4487 where we really need an noop (otherwise -0.0 will be transformed
4488 into +0.0). */
4489 if (!is_float(vtop->type.t)) {
4490 vpushi(0);
4491 gen_op('+');
4493 break;
4494 case TOK_SIZEOF:
4495 case TOK_ALIGNOF1:
4496 case TOK_ALIGNOF2:
4497 t = tok;
4498 next();
4499 in_sizeof++;
4500 unary_type(&type); // Perform a in_sizeof = 0;
4501 size = type_size(&type, &align);
4502 if (t == TOK_SIZEOF) {
4503 if (!(type.t & VT_VLA)) {
4504 if (size < 0)
4505 tcc_error("sizeof applied to an incomplete type");
4506 vpushs(size);
4507 } else {
4508 vla_runtime_type_size(&type, &align);
4510 } else {
4511 vpushs(align);
4513 vtop->type.t |= VT_UNSIGNED;
4514 break;
4516 case TOK_builtin_expect:
4518 /* __builtin_expect is a no-op for now */
4519 next();
4520 skip('(');
4521 expr_eq();
4522 skip(',');
4523 nocode_wanted++;
4524 expr_lor_const();
4525 vpop();
4526 nocode_wanted--;
4527 skip(')');
4529 break;
4530 case TOK_builtin_types_compatible_p:
4532 CType type1, type2;
4533 next();
4534 skip('(');
4535 parse_type(&type1);
4536 skip(',');
4537 parse_type(&type2);
4538 skip(')');
4539 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4540 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4541 vpushi(is_compatible_types(&type1, &type2));
4543 break;
4544 case TOK_builtin_choose_expr:
4546 int64_t c;
4547 next();
4548 skip('(');
4549 c = expr_const64();
4550 skip(',');
4551 if (!c) {
4552 nocode_wanted++;
4554 expr_eq();
4555 if (!c) {
4556 vpop();
4557 nocode_wanted--;
4559 skip(',');
4560 if (c) {
4561 nocode_wanted++;
4563 expr_eq();
4564 if (c) {
4565 vpop();
4566 nocode_wanted--;
4568 skip(')');
4570 break;
4571 case TOK_builtin_constant_p:
4573 int res;
4574 next();
4575 skip('(');
4576 nocode_wanted++;
4577 gexpr();
4578 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4579 vpop();
4580 nocode_wanted--;
4581 skip(')');
4582 vpushi(res);
4584 break;
4585 case TOK_builtin_frame_address:
4586 case TOK_builtin_return_address:
4588 int tok1 = tok;
4589 int level;
4590 CType type;
4591 next();
4592 skip('(');
4593 if (tok != TOK_CINT) {
4594 tcc_error("%s only takes positive integers",
4595 tok1 == TOK_builtin_return_address ?
4596 "__builtin_return_address" :
4597 "__builtin_frame_address");
4599 level = (uint32_t)tokc.i;
4600 next();
4601 skip(')');
4602 type.t = VT_VOID;
4603 mk_pointer(&type);
4604 vset(&type, VT_LOCAL, 0); /* local frame */
4605 while (level--) {
4606 mk_pointer(&vtop->type);
4607 indir(); /* -> parent frame */
4609 if (tok1 == TOK_builtin_return_address) {
4610 // assume return address is just above frame pointer on stack
4611 vpushi(PTR_SIZE);
4612 gen_op('+');
4613 mk_pointer(&vtop->type);
4614 indir();
4617 break;
4618 #ifdef TCC_TARGET_X86_64
4619 #ifdef TCC_TARGET_PE
4620 case TOK_builtin_va_start:
4622 next();
4623 skip('(');
4624 expr_eq();
4625 skip(',');
4626 expr_eq();
4627 skip(')');
4628 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4629 tcc_error("__builtin_va_start expects a local variable");
4630 vtop->r &= ~(VT_LVAL | VT_REF);
4631 vtop->type = char_pointer_type;
4632 vtop->c.i += 8;
4633 vstore();
4635 break;
4636 #else
4637 case TOK_builtin_va_arg_types:
4639 CType type;
4640 next();
4641 skip('(');
4642 parse_type(&type);
4643 skip(')');
4644 vpushi(classify_x86_64_va_arg(&type));
4646 break;
4647 #endif
4648 #endif
4650 #ifdef TCC_TARGET_ARM64
4651 case TOK___va_start: {
4652 next();
4653 skip('(');
4654 expr_eq();
4655 skip(',');
4656 expr_eq();
4657 skip(')');
4658 //xx check types
4659 gen_va_start();
4660 vpushi(0);
4661 vtop->type.t = VT_VOID;
4662 break;
4664 case TOK___va_arg: {
4665 CType type;
4666 next();
4667 skip('(');
4668 expr_eq();
4669 skip(',');
4670 parse_type(&type);
4671 skip(')');
4672 //xx check types
4673 gen_va_arg(&type);
4674 vtop->type = type;
4675 break;
4677 case TOK___arm64_clear_cache: {
4678 next();
4679 skip('(');
4680 expr_eq();
4681 skip(',');
4682 expr_eq();
4683 skip(')');
4684 gen_clear_cache();
4685 vpushi(0);
4686 vtop->type.t = VT_VOID;
4687 break;
4689 #endif
4690 /* pre operations */
4691 case TOK_INC:
4692 case TOK_DEC:
4693 t = tok;
4694 next();
4695 unary();
4696 inc(0, t);
4697 break;
4698 case '-':
4699 next();
4700 unary();
4701 t = vtop->type.t & VT_BTYPE;
4702 if (is_float(t)) {
4703 /* In IEEE negate(x) isn't subtract(0,x), but rather
4704 subtract(-0, x). */
4705 vpush(&vtop->type);
4706 if (t == VT_FLOAT)
4707 vtop->c.f = -1.0 * 0.0;
4708 else if (t == VT_DOUBLE)
4709 vtop->c.d = -1.0 * 0