SECTION_ALIGNMENT -> RUN_SECTION_ALIGNMENT, and tweaks
[tinycc.git] / tccgen.c
blob70e6fb656665fbb63d750552d4fd2f04a93cc150
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC void tccgen_start(TCCState *s1)
145 cur_text_section = NULL;
146 funcname = "";
147 anon_sym = SYM_FIRST_ANOM;
148 section_sym = 0;
149 const_wanted = 0;
150 nocode_wanted = 1;
152 /* define some often used types */
153 int_type.t = VT_INT;
154 char_pointer_type.t = VT_BYTE;
155 mk_pointer(&char_pointer_type);
156 #if PTR_SIZE == 4
157 size_type.t = VT_INT;
158 #else
159 size_type.t = VT_LLONG;
160 #endif
161 func_old_type.t = VT_FUNC;
162 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
164 if (s1->do_debug) {
165 char buf[512];
167 /* file info: full path + filename */
168 section_sym = put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
170 text_section->sh_num, NULL);
171 getcwd(buf, sizeof(buf));
172 #ifdef _WIN32
173 normalize_slashes(buf);
174 #endif
175 pstrcat(buf, sizeof(buf), "/");
176 put_stabs_r(buf, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
178 put_stabs_r(file->filename, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
185 SHN_ABS, file->filename);
187 #ifdef TCC_TARGET_ARM
188 arm_init(s1);
189 #endif
192 ST_FUNC void tccgen_end(TCCState *s1)
194 gen_inline_functions(s1);
195 check_vstack();
196 /* end of translation unit info */
197 if (s1->do_debug) {
198 put_stabs_r(NULL, N_SO, 0, 0,
199 text_section->data_offset, text_section, section_sym);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
208 addr_t value, unsigned long size,
209 int can_add_underscore)
211 int sym_type, sym_bind, sh_num, info, other;
212 ElfW(Sym) *esym;
213 const char *name;
214 char buf1[256];
216 #ifdef CONFIG_TCC_BCHECK
217 char buf[32];
218 #endif
220 if (section == NULL)
221 sh_num = SHN_UNDEF;
222 else if (section == SECTION_ABS)
223 sh_num = SHN_ABS;
224 else
225 sh_num = section->sh_num;
227 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
228 sym_type = STT_FUNC;
229 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
230 sym_type = STT_NOTYPE;
231 } else {
232 sym_type = STT_OBJECT;
235 if (sym->type.t & VT_STATIC)
236 sym_bind = STB_LOCAL;
237 else {
238 if (sym->type.t & VT_WEAK)
239 sym_bind = STB_WEAK;
240 else
241 sym_bind = STB_GLOBAL;
244 if (!sym->c) {
245 name = get_tok_str(sym->v, NULL);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state->do_bounds_check) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
251 switch(sym->v) {
252 #ifdef TCC_TARGET_PE
253 /* XXX: we rely only on malloc hooks */
254 case TOK_malloc:
255 case TOK_free:
256 case TOK_realloc:
257 case TOK_memalign:
258 case TOK_calloc:
259 #endif
260 case TOK_memcpy:
261 case TOK_memmove:
262 case TOK_memset:
263 case TOK_strlen:
264 case TOK_strcpy:
265 case TOK_alloca:
266 strcpy(buf, "__bound_");
267 strcat(buf, name);
268 name = buf;
269 break;
272 #endif
273 other = 0;
275 #ifdef TCC_TARGET_PE
276 if (sym->type.t & VT_EXPORT)
277 other |= ST_PE_EXPORT;
278 if (sym_type == STT_FUNC && sym->type.ref) {
279 Sym *ref = sym->type.ref;
280 if (ref->a.func_export)
281 other |= ST_PE_EXPORT;
282 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
283 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
284 name = buf1;
285 other |= ST_PE_STDCALL;
286 can_add_underscore = 0;
288 } else {
289 if (find_elf_sym(tcc_state->dynsymtab_section, name))
290 other |= ST_PE_IMPORT;
291 if (sym->type.t & VT_IMPORT)
292 other |= ST_PE_IMPORT;
294 #else
295 if (! (sym->type.t & VT_STATIC))
296 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
297 #endif
298 if (tcc_state->leading_underscore && can_add_underscore) {
299 buf1[0] = '_';
300 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
301 name = buf1;
303 if (sym->asm_label) {
304 name = get_tok_str(sym->asm_label, NULL);
306 info = ELFW(ST_INFO)(sym_bind, sym_type);
307 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
308 } else {
309 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
310 esym->st_value = value;
311 esym->st_size = size;
312 esym->st_shndx = sh_num;
316 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
317 addr_t value, unsigned long size)
319 put_extern_sym2(sym, section, value, size, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
324 addr_t addend)
326 int c = 0;
328 if (nocode_wanted && s == cur_text_section)
329 return;
331 if (sym) {
332 if (0 == sym->c)
333 put_extern_sym(sym, NULL, 0, 0);
334 c = sym->c;
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section, s, offset, type, c, addend);
341 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
343 greloca(s, sym, offset, type, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym *__sym_malloc(void)
350 Sym *sym_pool, *sym, *last_sym;
351 int i;
353 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
354 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
356 last_sym = sym_free_first;
357 sym = sym_pool;
358 for(i = 0; i < SYM_POOL_NB; i++) {
359 sym->next = last_sym;
360 last_sym = sym;
361 sym++;
363 sym_free_first = last_sym;
364 return last_sym;
367 static inline Sym *sym_malloc(void)
369 Sym *sym;
370 #ifndef SYM_DEBUG
371 sym = sym_free_first;
372 if (!sym)
373 sym = __sym_malloc();
374 sym_free_first = sym->next;
375 return sym;
376 #else
377 sym = tcc_malloc(sizeof(Sym));
378 return sym;
379 #endif
382 ST_INLN void sym_free(Sym *sym)
384 #ifndef SYM_DEBUG
385 sym->next = sym_free_first;
386 sym_free_first = sym;
387 #else
388 tcc_free(sym);
389 #endif
392 /* push, without hashing */
393 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
395 Sym *s;
397 s = sym_malloc();
398 s->scope = 0;
399 s->v = v;
400 s->type.t = t;
401 s->type.ref = NULL;
402 #ifdef _WIN64
403 s->d = NULL;
404 #endif
405 s->c = c;
406 s->next = NULL;
407 /* add in stack */
408 s->prev = *ps;
409 *ps = s;
410 return s;
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym *sym_find2(Sym *s, int v)
417 while (s) {
418 if (s->v == v)
419 return s;
420 else if (s->v == -1)
421 return NULL;
422 s = s->prev;
424 return NULL;
427 /* structure lookup */
428 ST_INLN Sym *struct_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_struct;
436 /* find an identifier */
437 ST_INLN Sym *sym_find(int v)
439 v -= TOK_IDENT;
440 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
441 return NULL;
442 return table_ident[v]->sym_identifier;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
448 Sym *s, **ps;
449 TokenSym *ts;
451 if (local_stack)
452 ps = &local_stack;
453 else
454 ps = &global_stack;
455 s = sym_push2(ps, v, type->t, c);
456 s->type.ref = type->ref;
457 s->r = r;
458 /* don't record fields or anonymous symbols */
459 /* XXX: simplify */
460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
461 /* record symbol in token array */
462 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
463 if (v & SYM_STRUCT)
464 ps = &ts->sym_struct;
465 else
466 ps = &ts->sym_identifier;
467 s->prev_tok = *ps;
468 *ps = s;
469 s->scope = local_scope;
470 if (s->prev_tok && s->prev_tok->scope == s->scope)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v & ~SYM_STRUCT, NULL));
474 return s;
477 /* push a global identifier */
478 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
480 Sym *s, **ps;
481 s = sym_push2(&global_stack, v, t, c);
482 /* don't record anonymous symbol */
483 if (v < SYM_FIRST_ANOM) {
484 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
487 while (*ps != NULL)
488 ps = &(*ps)->prev_tok;
489 s->prev_tok = NULL;
490 *ps = s;
492 return s;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
499 Sym *s, *ss, **ps;
500 TokenSym *ts;
501 int v;
503 s = *ptop;
504 while(s != b) {
505 ss = s->prev;
506 v = s->v;
507 /* remove symbol in token array */
508 /* XXX: simplify */
509 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
510 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
511 if (v & SYM_STRUCT)
512 ps = &ts->sym_struct;
513 else
514 ps = &ts->sym_identifier;
515 *ps = s->prev_tok;
517 if (!keep)
518 sym_free(s);
519 s = ss;
521 if (!keep)
522 *ptop = b;
525 static void weaken_symbol(Sym *sym)
527 sym->type.t |= VT_WEAK;
528 if (sym->c > 0) {
529 int esym_type;
530 ElfW(Sym) *esym;
532 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
533 esym_type = ELFW(ST_TYPE)(esym->st_info);
534 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
538 static void apply_visibility(Sym *sym, CType *type)
540 int vis = sym->type.t & VT_VIS_MASK;
541 int vis2 = type->t & VT_VIS_MASK;
542 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
543 vis = vis2;
544 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
546 else
547 vis = (vis < vis2) ? vis : vis2;
548 sym->type.t &= ~VT_VIS_MASK;
549 sym->type.t |= vis;
551 if (sym->c > 0) {
552 ElfW(Sym) *esym;
554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
555 vis >>= VT_VIS_SHIFT;
556 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
560 /* ------------------------------------------------------------------------- */
562 ST_FUNC void swap(int *p, int *q)
564 int t;
565 t = *p;
566 *p = *q;
567 *q = t;
570 static void vsetc(CType *type, int r, CValue *vc)
572 int v;
574 if (vtop >= vstack + (VSTACK_SIZE - 1))
575 tcc_error("memory full (vstack)");
576 /* cannot let cpu flags if other instruction are generated. Also
577 avoid leaving VT_JMP anywhere except on the top of the stack
578 because it would complicate the code generator.
580 Don't do this when nocode_wanted. vtop might come from
581 !nocode_wanted regions (see 88_codeopt.c) and transforming
582 it to a register without actually generating code is wrong
583 as their value might still be used for real. All values
584 we push under nocode_wanted will eventually be popped
585 again, so that the VT_CMP/VT_JMP value will be in vtop
586 when code is unsuppressed again. */
587 if (vtop >= vstack && !nocode_wanted) {
588 v = vtop->r & VT_VALMASK;
589 if (v == VT_CMP || (v & ~1) == VT_JMP)
590 gv(RC_INT);
592 vtop++;
593 vtop->type = *type;
594 vtop->r = r;
595 vtop->r2 = VT_CONST;
596 vtop->c = *vc;
597 vtop->sym = NULL;
600 /* push constant of type "type" with useless value */
601 ST_FUNC void vpush(CType *type)
603 CValue cval;
604 vsetc(type, VT_CONST, &cval);
607 /* push integer constant */
608 ST_FUNC void vpushi(int v)
610 CValue cval;
611 cval.i = v;
612 vsetc(&int_type, VT_CONST, &cval);
615 /* push a pointer sized constant */
616 static void vpushs(addr_t v)
618 CValue cval;
619 cval.i = v;
620 vsetc(&size_type, VT_CONST, &cval);
623 /* push arbitrary 64bit constant */
624 ST_FUNC void vpush64(int ty, unsigned long long v)
626 CValue cval;
627 CType ctype;
628 ctype.t = ty;
629 ctype.ref = NULL;
630 cval.i = v;
631 vsetc(&ctype, VT_CONST, &cval);
634 /* push long long constant */
635 static inline void vpushll(long long v)
637 vpush64(VT_LLONG, v);
640 /* push a symbol value of TYPE */
641 static inline void vpushsym(CType *type, Sym *sym)
643 CValue cval;
644 cval.i = 0;
645 vsetc(type, VT_CONST | VT_SYM, &cval);
646 vtop->sym = sym;
649 /* Return a static symbol pointing to a section */
650 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
652 int v;
653 Sym *sym;
655 v = anon_sym++;
656 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
657 sym->type.ref = type->ref;
658 sym->r = VT_CONST | VT_SYM;
659 put_extern_sym(sym, sec, offset, size);
660 return sym;
663 /* push a reference to a section offset by adding a dummy symbol */
664 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
666 vpushsym(type, get_sym_ref(type, sec, offset, size));
669 /* define a new external reference to a symbol 'v' of type 'u' */
670 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
678 s->type.ref = type->ref;
679 s->r = r | VT_CONST | VT_SYM;
681 return s;
684 /* define a new external reference to a symbol 'v' */
685 static Sym *external_sym(int v, CType *type, int r)
687 Sym *s;
689 s = sym_find(v);
690 if (!s) {
691 /* push forward reference */
692 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
693 s->type.t |= VT_EXTERN;
694 } else if (s->type.ref == func_old_type.ref) {
695 s->type.ref = type->ref;
696 s->r = r | VT_CONST | VT_SYM;
697 s->type.t |= VT_EXTERN;
698 } else if (!is_compatible_types(&s->type, type)) {
699 tcc_error("incompatible types for redefinition of '%s'",
700 get_tok_str(v, NULL));
702 /* Merge some storage attributes. */
703 if (type->t & VT_WEAK)
704 weaken_symbol(s);
706 if (type->t & VT_VIS_MASK)
707 apply_visibility(s, type);
709 return s;
712 /* push a reference to global symbol v */
713 ST_FUNC void vpush_global_sym(CType *type, int v)
715 vpushsym(type, external_global_sym(v, type, 0));
718 ST_FUNC void vset(CType *type, int r, long v)
720 CValue cval;
722 cval.i = v;
723 vsetc(type, r, &cval);
726 static void vseti(int r, int v)
728 CType type;
729 type.t = VT_INT;
730 type.ref = 0;
731 vset(&type, r, v);
734 ST_FUNC void vswap(void)
736 SValue tmp;
737 /* cannot let cpu flags if other instruction are generated. Also
738 avoid leaving VT_JMP anywhere except on the top of the stack
739 because it would complicate the code generator. */
740 if (vtop >= vstack) {
741 int v = vtop->r & VT_VALMASK;
742 if (v == VT_CMP || (v & ~1) == VT_JMP)
743 gv(RC_INT);
745 tmp = vtop[0];
746 vtop[0] = vtop[-1];
747 vtop[-1] = tmp;
749 /* XXX: +2% overall speed possible with optimized memswap
751 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
755 ST_FUNC void vpushv(SValue *v)
757 if (vtop >= vstack + (VSTACK_SIZE - 1))
758 tcc_error("memory full (vstack)");
759 vtop++;
760 *vtop = *v;
763 static void vdup(void)
765 vpushv(vtop);
768 /* save registers up to (vtop - n) stack entry */
769 ST_FUNC void save_regs(int n)
771 SValue *p, *p1;
772 for(p = vstack, p1 = vtop - n; p <= p1; p++)
773 save_reg(p->r);
776 /* save r to the memory stack, and mark it as being free */
777 ST_FUNC void save_reg(int r)
779 save_reg_upstack(r, 0);
782 /* save r to the memory stack, and mark it as being free,
783 if seen up to (vtop - n) stack entry */
784 ST_FUNC void save_reg_upstack(int r, int n)
786 int l, saved, size, align;
787 SValue *p, *p1, sv;
788 CType *type;
790 if ((r &= VT_VALMASK) >= VT_CONST)
791 return;
792 if (nocode_wanted)
793 return;
795 /* modify all stack values */
796 saved = 0;
797 l = 0;
798 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
799 if ((p->r & VT_VALMASK) == r ||
800 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
801 /* must save value on stack if not already done */
802 if (!saved) {
803 /* NOTE: must reload 'r' because r might be equal to r2 */
804 r = p->r & VT_VALMASK;
805 /* store register in the stack */
806 type = &p->type;
807 if ((p->r & VT_LVAL) ||
808 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
809 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
810 type = &char_pointer_type;
811 #else
812 type = &int_type;
813 #endif
814 size = type_size(type, &align);
815 loc = (loc - size) & -align;
816 sv.type.t = type->t;
817 sv.r = VT_LOCAL | VT_LVAL;
818 sv.c.i = loc;
819 store(r, &sv);
820 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
821 /* x86 specific: need to pop fp register ST0 if saved */
822 if (r == TREG_ST0) {
823 o(0xd8dd); /* fstp %st(0) */
825 #endif
826 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
827 /* special long long case */
828 if ((type->t & VT_BTYPE) == VT_LLONG) {
829 sv.c.i += 4;
830 store(p->r2, &sv);
832 #endif
833 l = loc;
834 saved = 1;
836 /* mark that stack entry as being saved on the stack */
837 if (p->r & VT_LVAL) {
838 /* also clear the bounded flag because the
839 relocation address of the function was stored in
840 p->c.i */
841 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
842 } else {
843 p->r = lvalue_type(p->type.t) | VT_LOCAL;
845 p->r2 = VT_CONST;
846 p->c.i = l;
851 #ifdef TCC_TARGET_ARM
852 /* find a register of class 'rc2' with at most one reference on stack.
853 * If none, call get_reg(rc) */
854 ST_FUNC int get_reg_ex(int rc, int rc2)
856 int r;
857 SValue *p;
859 for(r=0;r<NB_REGS;r++) {
860 if (reg_classes[r] & rc2) {
861 int n;
862 n=0;
863 for(p = vstack; p <= vtop; p++) {
864 if ((p->r & VT_VALMASK) == r ||
865 (p->r2 & VT_VALMASK) == r)
866 n++;
868 if (n <= 1)
869 return r;
872 return get_reg(rc);
874 #endif
876 /* find a free register of class 'rc'. If none, save one register */
877 ST_FUNC int get_reg(int rc)
879 int r;
880 SValue *p;
882 /* find a free register */
883 for(r=0;r<NB_REGS;r++) {
884 if (reg_classes[r] & rc) {
885 if (nocode_wanted)
886 return r;
887 for(p=vstack;p<=vtop;p++) {
888 if ((p->r & VT_VALMASK) == r ||
889 (p->r2 & VT_VALMASK) == r)
890 goto notfound;
892 return r;
894 notfound: ;
897 /* no register left : free the first one on the stack (VERY
898 IMPORTANT to start from the bottom to ensure that we don't
899 spill registers used in gen_opi()) */
900 for(p=vstack;p<=vtop;p++) {
901 /* look at second register (if long long) */
902 r = p->r2 & VT_VALMASK;
903 if (r < VT_CONST && (reg_classes[r] & rc))
904 goto save_found;
905 r = p->r & VT_VALMASK;
906 if (r < VT_CONST && (reg_classes[r] & rc)) {
907 save_found:
908 save_reg(r);
909 return r;
912 /* Should never comes here */
913 return -1;
916 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
917 if needed */
918 static void move_reg(int r, int s, int t)
920 SValue sv;
922 if (r != s) {
923 save_reg(r);
924 sv.type.t = t;
925 sv.type.ref = NULL;
926 sv.r = s;
927 sv.c.i = 0;
928 load(r, &sv);
932 /* get address of vtop (vtop MUST BE an lvalue) */
933 ST_FUNC void gaddrof(void)
935 if (vtop->r & VT_REF)
936 gv(RC_INT);
937 vtop->r &= ~VT_LVAL;
938 /* tricky: if saved lvalue, then we can go back to lvalue */
939 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
940 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
945 #ifdef CONFIG_TCC_BCHECK
946 /* generate lvalue bound code */
947 static void gbound(void)
949 int lval_type;
950 CType type1;
952 vtop->r &= ~VT_MUSTBOUND;
953 /* if lvalue, then use checking code before dereferencing */
954 if (vtop->r & VT_LVAL) {
955 /* if not VT_BOUNDED value, then make one */
956 if (!(vtop->r & VT_BOUNDED)) {
957 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
958 /* must save type because we must set it to int to get pointer */
959 type1 = vtop->type;
960 vtop->type.t = VT_PTR;
961 gaddrof();
962 vpushi(0);
963 gen_bounded_ptr_add();
964 vtop->r |= lval_type;
965 vtop->type = type1;
967 /* then check for dereferencing */
968 gen_bounded_ptr_deref();
971 #endif
973 /* store vtop a register belonging to class 'rc'. lvalues are
974 converted to values. Cannot be used if cannot be converted to
975 register value (such as structures). */
976 ST_FUNC int gv(int rc)
978 int r, bit_pos, bit_size, size, align, i;
979 int rc2;
981 /* NOTE: get_reg can modify vstack[] */
982 if (vtop->type.t & VT_BITFIELD) {
983 CType type;
984 int bits = 32;
985 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
986 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
987 /* remove bit field info to avoid loops */
988 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
989 /* cast to int to propagate signedness in following ops */
990 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
991 type.t = VT_LLONG;
992 bits = 64;
993 } else
994 type.t = VT_INT;
995 if((vtop->type.t & VT_UNSIGNED) ||
996 (vtop->type.t & VT_BTYPE) == VT_BOOL)
997 type.t |= VT_UNSIGNED;
998 gen_cast(&type);
999 /* generate shifts */
1000 vpushi(bits - (bit_pos + bit_size));
1001 gen_op(TOK_SHL);
1002 vpushi(bits - bit_size);
1003 /* NOTE: transformed to SHR if unsigned */
1004 gen_op(TOK_SAR);
1005 r = gv(rc);
1006 } else {
1007 if (is_float(vtop->type.t) &&
1008 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1009 Sym *sym;
1010 int *ptr;
1011 unsigned long offset;
1012 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1013 CValue check;
1014 #endif
1016 /* XXX: unify with initializers handling ? */
1017 /* CPUs usually cannot use float constants, so we store them
1018 generically in data segment */
1019 size = type_size(&vtop->type, &align);
1020 offset = (data_section->data_offset + align - 1) & -align;
1021 data_section->data_offset = offset;
1022 /* XXX: not portable yet */
1023 #if defined(__i386__) || defined(__x86_64__)
1024 /* Zero pad x87 tenbyte long doubles */
1025 if (size == LDOUBLE_SIZE) {
1026 vtop->c.tab[2] &= 0xffff;
1027 #if LDOUBLE_SIZE == 16
1028 vtop->c.tab[3] = 0;
1029 #endif
1031 #endif
1032 ptr = section_ptr_add(data_section, size);
1033 size = size >> 2;
1034 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1035 check.d = 1;
1036 if(check.tab[0])
1037 for(i=0;i<size;i++)
1038 ptr[i] = vtop->c.tab[size-1-i];
1039 else
1040 #endif
1041 for(i=0;i<size;i++)
1042 ptr[i] = vtop->c.tab[i];
1043 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1044 vtop->r |= VT_LVAL | VT_SYM;
1045 vtop->sym = sym;
1046 vtop->c.i = 0;
1048 #ifdef CONFIG_TCC_BCHECK
1049 if (vtop->r & VT_MUSTBOUND)
1050 gbound();
1051 #endif
1053 r = vtop->r & VT_VALMASK;
1054 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1055 #ifndef TCC_TARGET_ARM64
1056 if (rc == RC_IRET)
1057 rc2 = RC_LRET;
1058 #ifdef TCC_TARGET_X86_64
1059 else if (rc == RC_FRET)
1060 rc2 = RC_QRET;
1061 #endif
1062 #endif
1064 /* need to reload if:
1065 - constant
1066 - lvalue (need to dereference pointer)
1067 - already a register, but not in the right class */
1068 if (r >= VT_CONST
1069 || (vtop->r & VT_LVAL)
1070 || !(reg_classes[r] & rc)
1071 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1072 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1073 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1074 #else
1075 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1076 #endif
1079 r = get_reg(rc);
1080 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1081 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1082 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1083 #else
1084 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1085 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1086 unsigned long long ll;
1087 #endif
1088 int r2, original_type;
1089 original_type = vtop->type.t;
1090 /* two register type load : expand to two words
1091 temporarily */
1092 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1093 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1094 /* load constant */
1095 ll = vtop->c.i;
1096 vtop->c.i = ll; /* first word */
1097 load(r, vtop);
1098 vtop->r = r; /* save register value */
1099 vpushi(ll >> 32); /* second word */
1100 } else
1101 #endif
1102 if (vtop->r & VT_LVAL) {
1103 /* We do not want to modifier the long long
1104 pointer here, so the safest (and less
1105 efficient) is to save all the other registers
1106 in the stack. XXX: totally inefficient. */
1107 #if 0
1108 save_regs(1);
1109 #else
1110 /* lvalue_save: save only if used further down the stack */
1111 save_reg_upstack(vtop->r, 1);
1112 #endif
1113 /* load from memory */
1114 vtop->type.t = load_type;
1115 load(r, vtop);
1116 vdup();
1117 vtop[-1].r = r; /* save register value */
1118 /* increment pointer to get second word */
1119 vtop->type.t = addr_type;
1120 gaddrof();
1121 vpushi(load_size);
1122 gen_op('+');
1123 vtop->r |= VT_LVAL;
1124 vtop->type.t = load_type;
1125 } else {
1126 /* move registers */
1127 load(r, vtop);
1128 vdup();
1129 vtop[-1].r = r; /* save register value */
1130 vtop->r = vtop[-1].r2;
1132 /* Allocate second register. Here we rely on the fact that
1133 get_reg() tries first to free r2 of an SValue. */
1134 r2 = get_reg(rc2);
1135 load(r2, vtop);
1136 vpop();
1137 /* write second register */
1138 vtop->r2 = r2;
1139 vtop->type.t = original_type;
1140 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1141 int t1, t;
1142 /* lvalue of scalar type : need to use lvalue type
1143 because of possible cast */
1144 t = vtop->type.t;
1145 t1 = t;
1146 /* compute memory access type */
1147 if (vtop->r & VT_REF)
1148 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1149 t = VT_PTR;
1150 #else
1151 t = VT_INT;
1152 #endif
1153 else if (vtop->r & VT_LVAL_BYTE)
1154 t = VT_BYTE;
1155 else if (vtop->r & VT_LVAL_SHORT)
1156 t = VT_SHORT;
1157 if (vtop->r & VT_LVAL_UNSIGNED)
1158 t |= VT_UNSIGNED;
1159 vtop->type.t = t;
1160 load(r, vtop);
1161 /* restore wanted type */
1162 vtop->type.t = t1;
1163 } else {
1164 /* one register type load */
1165 load(r, vtop);
1168 vtop->r = r;
1169 #ifdef TCC_TARGET_C67
1170 /* uses register pairs for doubles */
1171 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1172 vtop->r2 = r+1;
1173 #endif
1175 return r;
1178 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1179 ST_FUNC void gv2(int rc1, int rc2)
1181 int v;
1183 /* generate more generic register first. But VT_JMP or VT_CMP
1184 values must be generated first in all cases to avoid possible
1185 reload errors */
1186 v = vtop[0].r & VT_VALMASK;
1187 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1188 vswap();
1189 gv(rc1);
1190 vswap();
1191 gv(rc2);
1192 /* test if reload is needed for first register */
1193 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1194 vswap();
1195 gv(rc1);
1196 vswap();
1198 } else {
1199 gv(rc2);
1200 vswap();
1201 gv(rc1);
1202 vswap();
1203 /* test if reload is needed for first register */
1204 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1205 gv(rc2);
1210 #ifndef TCC_TARGET_ARM64
1211 /* wrapper around RC_FRET to return a register by type */
1212 static int rc_fret(int t)
1214 #ifdef TCC_TARGET_X86_64
1215 if (t == VT_LDOUBLE) {
1216 return RC_ST0;
1218 #endif
1219 return RC_FRET;
1221 #endif
1223 /* wrapper around REG_FRET to return a register by type */
1224 static int reg_fret(int t)
1226 #ifdef TCC_TARGET_X86_64
1227 if (t == VT_LDOUBLE) {
1228 return TREG_ST0;
1230 #endif
1231 return REG_FRET;
1234 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1235 /* expand 64bit on stack in two ints */
1236 static void lexpand(void)
1238 int u, v;
1239 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1240 v = vtop->r & (VT_VALMASK | VT_LVAL);
1241 if (v == VT_CONST) {
1242 vdup();
1243 vtop[0].c.i >>= 32;
1244 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1245 vdup();
1246 vtop[0].c.i += 4;
1247 } else {
1248 gv(RC_INT);
1249 vdup();
1250 vtop[0].r = vtop[-1].r2;
1251 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1253 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1255 #endif
1257 #ifdef TCC_TARGET_ARM
1258 /* expand long long on stack */
1259 ST_FUNC void lexpand_nr(void)
1261 int u,v;
1263 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1264 vdup();
1265 vtop->r2 = VT_CONST;
1266 vtop->type.t = VT_INT | u;
1267 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1268 if (v == VT_CONST) {
1269 vtop[-1].c.i = vtop->c.i;
1270 vtop->c.i = vtop->c.i >> 32;
1271 vtop->r = VT_CONST;
1272 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1273 vtop->c.i += 4;
1274 vtop->r = vtop[-1].r;
1275 } else if (v > VT_CONST) {
1276 vtop--;
1277 lexpand();
1278 } else
1279 vtop->r = vtop[-1].r2;
1280 vtop[-1].r2 = VT_CONST;
1281 vtop[-1].type.t = VT_INT | u;
1283 #endif
1285 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1286 /* build a long long from two ints */
1287 static void lbuild(int t)
1289 gv2(RC_INT, RC_INT);
1290 vtop[-1].r2 = vtop[0].r;
1291 vtop[-1].type.t = t;
1292 vpop();
1294 #endif
1296 /* rotate n first stack elements to the bottom
1297 I1 ... In -> I2 ... In I1 [top is right]
1299 ST_FUNC void vrotb(int n)
1301 int i;
1302 SValue tmp;
1304 tmp = vtop[-n + 1];
1305 for(i=-n+1;i!=0;i++)
1306 vtop[i] = vtop[i+1];
1307 vtop[0] = tmp;
1310 /* rotate the n elements before entry e towards the top
1311 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1313 ST_FUNC void vrote(SValue *e, int n)
1315 int i;
1316 SValue tmp;
1318 tmp = *e;
1319 for(i = 0;i < n - 1; i++)
1320 e[-i] = e[-i - 1];
1321 e[-n + 1] = tmp;
1324 /* rotate n first stack elements to the top
1325 I1 ... In -> In I1 ... I(n-1) [top is right]
1327 ST_FUNC void vrott(int n)
1329 vrote(vtop, n);
1332 /* pop stack value */
1333 ST_FUNC void vpop(void)
1335 int v;
1336 v = vtop->r & VT_VALMASK;
1337 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1338 /* for x86, we need to pop the FP stack */
1339 if (v == TREG_ST0) {
1340 o(0xd8dd); /* fstp %st(0) */
1341 } else
1342 #endif
1343 if (v == VT_JMP || v == VT_JMPI) {
1344 /* need to put correct jump if && or || without test */
1345 gsym(vtop->c.i);
1347 vtop--;
1350 /* convert stack entry to register and duplicate its value in another
1351 register */
1352 static void gv_dup(void)
1354 int rc, t, r, r1;
1355 SValue sv;
1357 t = vtop->type.t;
1358 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1359 if ((t & VT_BTYPE) == VT_LLONG) {
1360 lexpand();
1361 gv_dup();
1362 vswap();
1363 vrotb(3);
1364 gv_dup();
1365 vrotb(4);
1366 /* stack: H L L1 H1 */
1367 lbuild(t);
1368 vrotb(3);
1369 vrotb(3);
1370 vswap();
1371 lbuild(t);
1372 vswap();
1373 } else
1374 #endif
1376 /* duplicate value */
1377 rc = RC_INT;
1378 sv.type.t = VT_INT;
1379 if (is_float(t)) {
1380 rc = RC_FLOAT;
1381 #ifdef TCC_TARGET_X86_64
1382 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1383 rc = RC_ST0;
1385 #endif
1386 sv.type.t = t;
1388 r = gv(rc);
1389 r1 = get_reg(rc);
1390 sv.r = r;
1391 sv.c.i = 0;
1392 load(r1, &sv); /* move r to r1 */
1393 vdup();
1394 /* duplicates value */
1395 if (r != r1)
1396 vtop->r = r1;
1400 /* Generate value test
1402 * Generate a test for any value (jump, comparison and integers) */
1403 ST_FUNC int gvtst(int inv, int t)
1405 int v = vtop->r & VT_VALMASK;
1406 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1407 vpushi(0);
1408 gen_op(TOK_NE);
1410 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1411 /* constant jmp optimization */
1412 if ((vtop->c.i != 0) != inv)
1413 t = gjmp(t);
1414 vtop--;
1415 return t;
1417 return gtst(inv, t);
1420 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1421 /* generate CPU independent (unsigned) long long operations */
1422 static void gen_opl(int op)
1424 int t, a, b, op1, c, i;
1425 int func;
1426 unsigned short reg_iret = REG_IRET;
1427 unsigned short reg_lret = REG_LRET;
1428 SValue tmp;
1430 switch(op) {
1431 case '/':
1432 case TOK_PDIV:
1433 func = TOK___divdi3;
1434 goto gen_func;
1435 case TOK_UDIV:
1436 func = TOK___udivdi3;
1437 goto gen_func;
1438 case '%':
1439 func = TOK___moddi3;
1440 goto gen_mod_func;
1441 case TOK_UMOD:
1442 func = TOK___umoddi3;
1443 gen_mod_func:
1444 #ifdef TCC_ARM_EABI
1445 reg_iret = TREG_R2;
1446 reg_lret = TREG_R3;
1447 #endif
1448 gen_func:
1449 /* call generic long long function */
1450 vpush_global_sym(&func_old_type, func);
1451 vrott(3);
1452 gfunc_call(2);
1453 vpushi(0);
1454 vtop->r = reg_iret;
1455 vtop->r2 = reg_lret;
1456 break;
1457 case '^':
1458 case '&':
1459 case '|':
1460 case '*':
1461 case '+':
1462 case '-':
1463 //pv("gen_opl A",0,2);
1464 t = vtop->type.t;
1465 vswap();
1466 lexpand();
1467 vrotb(3);
1468 lexpand();
1469 /* stack: L1 H1 L2 H2 */
1470 tmp = vtop[0];
1471 vtop[0] = vtop[-3];
1472 vtop[-3] = tmp;
1473 tmp = vtop[-2];
1474 vtop[-2] = vtop[-3];
1475 vtop[-3] = tmp;
1476 vswap();
1477 /* stack: H1 H2 L1 L2 */
1478 //pv("gen_opl B",0,4);
1479 if (op == '*') {
1480 vpushv(vtop - 1);
1481 vpushv(vtop - 1);
1482 gen_op(TOK_UMULL);
1483 lexpand();
1484 /* stack: H1 H2 L1 L2 ML MH */
1485 for(i=0;i<4;i++)
1486 vrotb(6);
1487 /* stack: ML MH H1 H2 L1 L2 */
1488 tmp = vtop[0];
1489 vtop[0] = vtop[-2];
1490 vtop[-2] = tmp;
1491 /* stack: ML MH H1 L2 H2 L1 */
1492 gen_op('*');
1493 vrotb(3);
1494 vrotb(3);
1495 gen_op('*');
1496 /* stack: ML MH M1 M2 */
1497 gen_op('+');
1498 gen_op('+');
1499 } else if (op == '+' || op == '-') {
1500 /* XXX: add non carry method too (for MIPS or alpha) */
1501 if (op == '+')
1502 op1 = TOK_ADDC1;
1503 else
1504 op1 = TOK_SUBC1;
1505 gen_op(op1);
1506 /* stack: H1 H2 (L1 op L2) */
1507 vrotb(3);
1508 vrotb(3);
1509 gen_op(op1 + 1); /* TOK_xxxC2 */
1510 } else {
1511 gen_op(op);
1512 /* stack: H1 H2 (L1 op L2) */
1513 vrotb(3);
1514 vrotb(3);
1515 /* stack: (L1 op L2) H1 H2 */
1516 gen_op(op);
1517 /* stack: (L1 op L2) (H1 op H2) */
1519 /* stack: L H */
1520 lbuild(t);
1521 break;
1522 case TOK_SAR:
1523 case TOK_SHR:
1524 case TOK_SHL:
1525 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1526 t = vtop[-1].type.t;
1527 vswap();
1528 lexpand();
1529 vrotb(3);
1530 /* stack: L H shift */
1531 c = (int)vtop->c.i;
1532 /* constant: simpler */
1533 /* NOTE: all comments are for SHL. the other cases are
1534 done by swaping words */
1535 vpop();
1536 if (op != TOK_SHL)
1537 vswap();
1538 if (c >= 32) {
1539 /* stack: L H */
1540 vpop();
1541 if (c > 32) {
1542 vpushi(c - 32);
1543 gen_op(op);
1545 if (op != TOK_SAR) {
1546 vpushi(0);
1547 } else {
1548 gv_dup();
1549 vpushi(31);
1550 gen_op(TOK_SAR);
1552 vswap();
1553 } else {
1554 vswap();
1555 gv_dup();
1556 /* stack: H L L */
1557 vpushi(c);
1558 gen_op(op);
1559 vswap();
1560 vpushi(32 - c);
1561 if (op == TOK_SHL)
1562 gen_op(TOK_SHR);
1563 else
1564 gen_op(TOK_SHL);
1565 vrotb(3);
1566 /* stack: L L H */
1567 vpushi(c);
1568 if (op == TOK_SHL)
1569 gen_op(TOK_SHL);
1570 else
1571 gen_op(TOK_SHR);
1572 gen_op('|');
1574 if (op != TOK_SHL)
1575 vswap();
1576 lbuild(t);
1577 } else {
1578 /* XXX: should provide a faster fallback on x86 ? */
1579 switch(op) {
1580 case TOK_SAR:
1581 func = TOK___ashrdi3;
1582 goto gen_func;
1583 case TOK_SHR:
1584 func = TOK___lshrdi3;
1585 goto gen_func;
1586 case TOK_SHL:
1587 func = TOK___ashldi3;
1588 goto gen_func;
1591 break;
1592 default:
1593 /* compare operations */
1594 t = vtop->type.t;
1595 vswap();
1596 lexpand();
1597 vrotb(3);
1598 lexpand();
1599 /* stack: L1 H1 L2 H2 */
1600 tmp = vtop[-1];
1601 vtop[-1] = vtop[-2];
1602 vtop[-2] = tmp;
1603 /* stack: L1 L2 H1 H2 */
1604 /* compare high */
1605 op1 = op;
1606 /* when values are equal, we need to compare low words. since
1607 the jump is inverted, we invert the test too. */
1608 if (op1 == TOK_LT)
1609 op1 = TOK_LE;
1610 else if (op1 == TOK_GT)
1611 op1 = TOK_GE;
1612 else if (op1 == TOK_ULT)
1613 op1 = TOK_ULE;
1614 else if (op1 == TOK_UGT)
1615 op1 = TOK_UGE;
1616 a = 0;
1617 b = 0;
1618 gen_op(op1);
1619 if (op1 != TOK_NE) {
1620 a = gvtst(1, 0);
1622 if (op != TOK_EQ) {
1623 /* generate non equal test */
1624 /* XXX: NOT PORTABLE yet */
1625 if (a == 0) {
1626 b = gvtst(0, 0);
1627 } else {
1628 #if defined(TCC_TARGET_I386)
1629 b = gjmp2(0x850f, 0);
1630 #elif defined(TCC_TARGET_ARM)
1631 b = ind;
1632 o(0x1A000000 | encbranch(ind, 0, 1));
1633 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1634 tcc_error("not implemented");
1635 #else
1636 #error not supported
1637 #endif
1640 /* compare low. Always unsigned */
1641 op1 = op;
1642 if (op1 == TOK_LT)
1643 op1 = TOK_ULT;
1644 else if (op1 == TOK_LE)
1645 op1 = TOK_ULE;
1646 else if (op1 == TOK_GT)
1647 op1 = TOK_UGT;
1648 else if (op1 == TOK_GE)
1649 op1 = TOK_UGE;
1650 gen_op(op1);
1651 a = gvtst(1, a);
1652 gsym(b);
1653 vseti(VT_JMPI, a);
1654 break;
1657 #endif
1659 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1661 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1662 return (a ^ b) >> 63 ? -x : x;
1665 static int gen_opic_lt(uint64_t a, uint64_t b)
1667 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1670 /* handle integer constant optimizations and various machine
1671 independent opt */
1672 static void gen_opic(int op)
1674 SValue *v1 = vtop - 1;
1675 SValue *v2 = vtop;
1676 int t1 = v1->type.t & VT_BTYPE;
1677 int t2 = v2->type.t & VT_BTYPE;
1678 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1679 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1680 uint64_t l1 = c1 ? v1->c.i : 0;
1681 uint64_t l2 = c2 ? v2->c.i : 0;
1682 int shm = (t1 == VT_LLONG) ? 63 : 31;
1684 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1685 l1 = ((uint32_t)l1 |
1686 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1687 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1688 l2 = ((uint32_t)l2 |
1689 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1691 if (c1 && c2) {
1692 switch(op) {
1693 case '+': l1 += l2; break;
1694 case '-': l1 -= l2; break;
1695 case '&': l1 &= l2; break;
1696 case '^': l1 ^= l2; break;
1697 case '|': l1 |= l2; break;
1698 case '*': l1 *= l2; break;
1700 case TOK_PDIV:
1701 case '/':
1702 case '%':
1703 case TOK_UDIV:
1704 case TOK_UMOD:
1705 /* if division by zero, generate explicit division */
1706 if (l2 == 0) {
1707 if (const_wanted)
1708 tcc_error("division by zero in constant");
1709 goto general_case;
1711 switch(op) {
1712 default: l1 = gen_opic_sdiv(l1, l2); break;
1713 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1714 case TOK_UDIV: l1 = l1 / l2; break;
1715 case TOK_UMOD: l1 = l1 % l2; break;
1717 break;
1718 case TOK_SHL: l1 <<= (l2 & shm); break;
1719 case TOK_SHR: l1 >>= (l2 & shm); break;
1720 case TOK_SAR:
1721 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1722 break;
1723 /* tests */
1724 case TOK_ULT: l1 = l1 < l2; break;
1725 case TOK_UGE: l1 = l1 >= l2; break;
1726 case TOK_EQ: l1 = l1 == l2; break;
1727 case TOK_NE: l1 = l1 != l2; break;
1728 case TOK_ULE: l1 = l1 <= l2; break;
1729 case TOK_UGT: l1 = l1 > l2; break;
1730 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1731 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1732 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1733 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1734 /* logical */
1735 case TOK_LAND: l1 = l1 && l2; break;
1736 case TOK_LOR: l1 = l1 || l2; break;
1737 default:
1738 goto general_case;
1740 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1741 l1 = ((uint32_t)l1 |
1742 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1743 v1->c.i = l1;
1744 vtop--;
1745 } else {
1746 /* if commutative ops, put c2 as constant */
1747 if (c1 && (op == '+' || op == '&' || op == '^' ||
1748 op == '|' || op == '*')) {
1749 vswap();
1750 c2 = c1; //c = c1, c1 = c2, c2 = c;
1751 l2 = l1; //l = l1, l1 = l2, l2 = l;
1753 if (!const_wanted &&
1754 c1 && ((l1 == 0 &&
1755 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1756 (l1 == -1 && op == TOK_SAR))) {
1757 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1758 vtop--;
1759 } else if (!const_wanted &&
1760 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1761 (l2 == -1 && op == '|') ||
1762 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1763 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1764 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1765 if (l2 == 1)
1766 vtop->c.i = 0;
1767 vswap();
1768 vtop--;
1769 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1770 op == TOK_PDIV) &&
1771 l2 == 1) ||
1772 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1773 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1774 l2 == 0) ||
1775 (op == '&' &&
1776 l2 == -1))) {
1777 /* filter out NOP operations like x*1, x-0, x&-1... */
1778 vtop--;
1779 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1780 /* try to use shifts instead of muls or divs */
1781 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1782 int n = -1;
1783 while (l2) {
1784 l2 >>= 1;
1785 n++;
1787 vtop->c.i = n;
1788 if (op == '*')
1789 op = TOK_SHL;
1790 else if (op == TOK_PDIV)
1791 op = TOK_SAR;
1792 else
1793 op = TOK_SHR;
1795 goto general_case;
1796 } else if (c2 && (op == '+' || op == '-') &&
1797 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1798 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1799 /* symbol + constant case */
1800 if (op == '-')
1801 l2 = -l2;
1802 l2 += vtop[-1].c.i;
1803 /* The backends can't always deal with addends to symbols
1804 larger than +-1<<31. Don't construct such. */
1805 if ((int)l2 != l2)
1806 goto general_case;
1807 vtop--;
1808 vtop->c.i = l2;
1809 } else {
1810 general_case:
1811 /* call low level op generator */
1812 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1813 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1814 gen_opl(op);
1815 else
1816 gen_opi(op);
1821 /* generate a floating point operation with constant propagation */
1822 static void gen_opif(int op)
1824 int c1, c2;
1825 SValue *v1, *v2;
1826 long double f1, f2;
1828 v1 = vtop - 1;
1829 v2 = vtop;
1830 /* currently, we cannot do computations with forward symbols */
1831 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1832 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1833 if (c1 && c2) {
1834 if (v1->type.t == VT_FLOAT) {
1835 f1 = v1->c.f;
1836 f2 = v2->c.f;
1837 } else if (v1->type.t == VT_DOUBLE) {
1838 f1 = v1->c.d;
1839 f2 = v2->c.d;
1840 } else {
1841 f1 = v1->c.ld;
1842 f2 = v2->c.ld;
1845 /* NOTE: we only do constant propagation if finite number (not
1846 NaN or infinity) (ANSI spec) */
1847 if (!ieee_finite(f1) || !ieee_finite(f2))
1848 goto general_case;
1850 switch(op) {
1851 case '+': f1 += f2; break;
1852 case '-': f1 -= f2; break;
1853 case '*': f1 *= f2; break;
1854 case '/':
1855 if (f2 == 0.0) {
1856 if (const_wanted)
1857 tcc_error("division by zero in constant");
1858 goto general_case;
1860 f1 /= f2;
1861 break;
1862 /* XXX: also handles tests ? */
1863 default:
1864 goto general_case;
1866 /* XXX: overflow test ? */
1867 if (v1->type.t == VT_FLOAT) {
1868 v1->c.f = f1;
1869 } else if (v1->type.t == VT_DOUBLE) {
1870 v1->c.d = f1;
1871 } else {
1872 v1->c.ld = f1;
1874 vtop--;
1875 } else {
1876 general_case:
1877 gen_opf(op);
1881 static int pointed_size(CType *type)
1883 int align;
1884 return type_size(pointed_type(type), &align);
1887 static void vla_runtime_pointed_size(CType *type)
1889 int align;
1890 vla_runtime_type_size(pointed_type(type), &align);
1893 static inline int is_null_pointer(SValue *p)
1895 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1896 return 0;
1897 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1898 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1899 ((p->type.t & VT_BTYPE) == VT_PTR &&
1900 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1903 static inline int is_integer_btype(int bt)
1905 return (bt == VT_BYTE || bt == VT_SHORT ||
1906 bt == VT_INT || bt == VT_LLONG);
1909 /* check types for comparison or subtraction of pointers */
1910 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1912 CType *type1, *type2, tmp_type1, tmp_type2;
1913 int bt1, bt2;
1915 /* null pointers are accepted for all comparisons as gcc */
1916 if (is_null_pointer(p1) || is_null_pointer(p2))
1917 return;
1918 type1 = &p1->type;
1919 type2 = &p2->type;
1920 bt1 = type1->t & VT_BTYPE;
1921 bt2 = type2->t & VT_BTYPE;
1922 /* accept comparison between pointer and integer with a warning */
1923 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1924 if (op != TOK_LOR && op != TOK_LAND )
1925 tcc_warning("comparison between pointer and integer");
1926 return;
1929 /* both must be pointers or implicit function pointers */
1930 if (bt1 == VT_PTR) {
1931 type1 = pointed_type(type1);
1932 } else if (bt1 != VT_FUNC)
1933 goto invalid_operands;
1935 if (bt2 == VT_PTR) {
1936 type2 = pointed_type(type2);
1937 } else if (bt2 != VT_FUNC) {
1938 invalid_operands:
1939 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1941 if ((type1->t & VT_BTYPE) == VT_VOID ||
1942 (type2->t & VT_BTYPE) == VT_VOID)
1943 return;
1944 tmp_type1 = *type1;
1945 tmp_type2 = *type2;
1946 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1947 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1948 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1949 /* gcc-like error if '-' is used */
1950 if (op == '-')
1951 goto invalid_operands;
1952 else
1953 tcc_warning("comparison of distinct pointer types lacks a cast");
1957 /* generic gen_op: handles types problems */
1958 ST_FUNC void gen_op(int op)
1960 int u, t1, t2, bt1, bt2, t;
1961 CType type1;
1963 redo:
1964 t1 = vtop[-1].type.t;
1965 t2 = vtop[0].type.t;
1966 bt1 = t1 & VT_BTYPE;
1967 bt2 = t2 & VT_BTYPE;
1969 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1970 tcc_error("operation on a struct");
1971 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1972 if (bt2 == VT_FUNC) {
1973 mk_pointer(&vtop->type);
1974 gaddrof();
1976 if (bt1 == VT_FUNC) {
1977 vswap();
1978 mk_pointer(&vtop->type);
1979 gaddrof();
1980 vswap();
1982 goto redo;
1983 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1984 /* at least one operand is a pointer */
1985 /* relationnal op: must be both pointers */
1986 if (op >= TOK_ULT && op <= TOK_LOR) {
1987 check_comparison_pointer_types(vtop - 1, vtop, op);
1988 /* pointers are handled are unsigned */
1989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1990 t = VT_LLONG | VT_UNSIGNED;
1991 #else
1992 t = VT_INT | VT_UNSIGNED;
1993 #endif
1994 goto std_op;
1996 /* if both pointers, then it must be the '-' op */
1997 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1998 if (op != '-')
1999 tcc_error("cannot use pointers here");
2000 check_comparison_pointer_types(vtop - 1, vtop, op);
2001 /* XXX: check that types are compatible */
2002 if (vtop[-1].type.t & VT_VLA) {
2003 vla_runtime_pointed_size(&vtop[-1].type);
2004 } else {
2005 vpushi(pointed_size(&vtop[-1].type));
2007 vrott(3);
2008 gen_opic(op);
2009 /* set to integer type */
2010 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2011 vtop->type.t = VT_LLONG;
2012 #else
2013 vtop->type.t = VT_INT;
2014 #endif
2015 vswap();
2016 gen_op(TOK_PDIV);
2017 } else {
2018 /* exactly one pointer : must be '+' or '-'. */
2019 if (op != '-' && op != '+')
2020 tcc_error("cannot use pointers here");
2021 /* Put pointer as first operand */
2022 if (bt2 == VT_PTR) {
2023 vswap();
2024 swap(&t1, &t2);
2026 #if PTR_SIZE == 4
2027 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2028 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2029 gen_cast(&int_type);
2030 #endif
2031 type1 = vtop[-1].type;
2032 type1.t &= ~VT_ARRAY;
2033 if (vtop[-1].type.t & VT_VLA)
2034 vla_runtime_pointed_size(&vtop[-1].type);
2035 else {
2036 u = pointed_size(&vtop[-1].type);
2037 if (u < 0)
2038 tcc_error("unknown array element size");
2039 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2040 vpushll(u);
2041 #else
2042 /* XXX: cast to int ? (long long case) */
2043 vpushi(u);
2044 #endif
2046 gen_op('*');
2047 #if 0
2048 /* #ifdef CONFIG_TCC_BCHECK
2049 The main reason to removing this code:
2050 #include <stdio.h>
2051 int main ()
2053 int v[10];
2054 int i = 10;
2055 int j = 9;
2056 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2057 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2059 When this code is on. then the output looks like
2060 v+i-j = 0xfffffffe
2061 v+(i-j) = 0xbff84000
2063 /* if evaluating constant expression, no code should be
2064 generated, so no bound check */
2065 if (tcc_state->do_bounds_check && !const_wanted) {
2066 /* if bounded pointers, we generate a special code to
2067 test bounds */
2068 if (op == '-') {
2069 vpushi(0);
2070 vswap();
2071 gen_op('-');
2073 gen_bounded_ptr_add();
2074 } else
2075 #endif
2077 gen_opic(op);
2079 /* put again type if gen_opic() swaped operands */
2080 vtop->type = type1;
2082 } else if (is_float(bt1) || is_float(bt2)) {
2083 /* compute bigger type and do implicit casts */
2084 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2085 t = VT_LDOUBLE;
2086 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2087 t = VT_DOUBLE;
2088 } else {
2089 t = VT_FLOAT;
2091 /* floats can only be used for a few operations */
2092 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2093 (op < TOK_ULT || op > TOK_GT))
2094 tcc_error("invalid operands for binary operation");
2095 goto std_op;
2096 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2097 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2098 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2099 t |= VT_UNSIGNED;
2100 goto std_op;
2101 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2102 /* cast to biggest op */
2103 t = VT_LLONG;
2104 /* convert to unsigned if it does not fit in a long long */
2105 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2106 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2107 t |= VT_UNSIGNED;
2108 goto std_op;
2109 } else {
2110 /* integer operations */
2111 t = VT_INT;
2112 /* convert to unsigned if it does not fit in an integer */
2113 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2114 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2115 t |= VT_UNSIGNED;
2116 std_op:
2117 /* XXX: currently, some unsigned operations are explicit, so
2118 we modify them here */
2119 if (t & VT_UNSIGNED) {
2120 if (op == TOK_SAR)
2121 op = TOK_SHR;
2122 else if (op == '/')
2123 op = TOK_UDIV;
2124 else if (op == '%')
2125 op = TOK_UMOD;
2126 else if (op == TOK_LT)
2127 op = TOK_ULT;
2128 else if (op == TOK_GT)
2129 op = TOK_UGT;
2130 else if (op == TOK_LE)
2131 op = TOK_ULE;
2132 else if (op == TOK_GE)
2133 op = TOK_UGE;
2135 vswap();
2136 type1.t = t;
2137 gen_cast(&type1);
2138 vswap();
2139 /* special case for shifts and long long: we keep the shift as
2140 an integer */
2141 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2142 type1.t = VT_INT;
2143 gen_cast(&type1);
2144 if (is_float(t))
2145 gen_opif(op);
2146 else
2147 gen_opic(op);
2148 if (op >= TOK_ULT && op <= TOK_GT) {
2149 /* relationnal op: the result is an int */
2150 vtop->type.t = VT_INT;
2151 } else {
2152 vtop->type.t = t;
2155 // Make sure that we have converted to an rvalue:
2156 if (vtop->r & VT_LVAL)
2157 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2160 #ifndef TCC_TARGET_ARM
2161 /* generic itof for unsigned long long case */
2162 static void gen_cvt_itof1(int t)
2164 #ifdef TCC_TARGET_ARM64
2165 gen_cvt_itof(t);
2166 #else
2167 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2168 (VT_LLONG | VT_UNSIGNED)) {
2170 if (t == VT_FLOAT)
2171 vpush_global_sym(&func_old_type, TOK___floatundisf);
2172 #if LDOUBLE_SIZE != 8
2173 else if (t == VT_LDOUBLE)
2174 vpush_global_sym(&func_old_type, TOK___floatundixf);
2175 #endif
2176 else
2177 vpush_global_sym(&func_old_type, TOK___floatundidf);
2178 vrott(2);
2179 gfunc_call(1);
2180 vpushi(0);
2181 vtop->r = reg_fret(t);
2182 } else {
2183 gen_cvt_itof(t);
2185 #endif
2187 #endif
2189 /* generic ftoi for unsigned long long case */
2190 static void gen_cvt_ftoi1(int t)
2192 #ifdef TCC_TARGET_ARM64
2193 gen_cvt_ftoi(t);
2194 #else
2195 int st;
2197 if (t == (VT_LLONG | VT_UNSIGNED)) {
2198 /* not handled natively */
2199 st = vtop->type.t & VT_BTYPE;
2200 if (st == VT_FLOAT)
2201 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2202 #if LDOUBLE_SIZE != 8
2203 else if (st == VT_LDOUBLE)
2204 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2205 #endif
2206 else
2207 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2208 vrott(2);
2209 gfunc_call(1);
2210 vpushi(0);
2211 vtop->r = REG_IRET;
2212 vtop->r2 = REG_LRET;
2213 } else {
2214 gen_cvt_ftoi(t);
2216 #endif
2219 /* force char or short cast */
2220 static void force_charshort_cast(int t)
2222 int bits, dbt;
2223 dbt = t & VT_BTYPE;
2224 /* XXX: add optimization if lvalue : just change type and offset */
2225 if (dbt == VT_BYTE)
2226 bits = 8;
2227 else
2228 bits = 16;
2229 if (t & VT_UNSIGNED) {
2230 vpushi((1 << bits) - 1);
2231 gen_op('&');
2232 } else {
2233 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2234 bits = 64 - bits;
2235 else
2236 bits = 32 - bits;
2237 vpushi(bits);
2238 gen_op(TOK_SHL);
2239 /* result must be signed or the SAR is converted to an SHL
2240 This was not the case when "t" was a signed short
2241 and the last value on the stack was an unsigned int */
2242 vtop->type.t &= ~VT_UNSIGNED;
2243 vpushi(bits);
2244 gen_op(TOK_SAR);
2248 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2249 static void gen_cast(CType *type)
2251 int sbt, dbt, sf, df, c, p;
2253 /* special delayed cast for char/short */
2254 /* XXX: in some cases (multiple cascaded casts), it may still
2255 be incorrect */
2256 if (vtop->r & VT_MUSTCAST) {
2257 vtop->r &= ~VT_MUSTCAST;
2258 force_charshort_cast(vtop->type.t);
2261 /* bitfields first get cast to ints */
2262 if (vtop->type.t & VT_BITFIELD) {
2263 gv(RC_INT);
2266 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2267 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2269 if (sbt != dbt) {
2270 sf = is_float(sbt);
2271 df = is_float(dbt);
2272 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2273 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2274 if (c) {
2275 /* constant case: we can do it now */
2276 /* XXX: in ISOC, cannot do it if error in convert */
2277 if (sbt == VT_FLOAT)
2278 vtop->c.ld = vtop->c.f;
2279 else if (sbt == VT_DOUBLE)
2280 vtop->c.ld = vtop->c.d;
2282 if (df) {
2283 if ((sbt & VT_BTYPE) == VT_LLONG) {
2284 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2285 vtop->c.ld = vtop->c.i;
2286 else
2287 vtop->c.ld = -(long double)-vtop->c.i;
2288 } else if(!sf) {
2289 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2290 vtop->c.ld = (uint32_t)vtop->c.i;
2291 else
2292 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2295 if (dbt == VT_FLOAT)
2296 vtop->c.f = (float)vtop->c.ld;
2297 else if (dbt == VT_DOUBLE)
2298 vtop->c.d = (double)vtop->c.ld;
2299 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2300 vtop->c.i = vtop->c.ld;
2301 } else if (sf && dbt == VT_BOOL) {
2302 vtop->c.i = (vtop->c.ld != 0);
2303 } else {
2304 if(sf)
2305 vtop->c.i = vtop->c.ld;
2306 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2308 else if (sbt & VT_UNSIGNED)
2309 vtop->c.i = (uint32_t)vtop->c.i;
2310 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2311 else if (sbt == VT_PTR)
2313 #endif
2314 else if (sbt != VT_LLONG)
2315 vtop->c.i = ((uint32_t)vtop->c.i |
2316 -(vtop->c.i & 0x80000000));
2318 if (dbt == (VT_LLONG|VT_UNSIGNED))
2320 else if (dbt == VT_BOOL)
2321 vtop->c.i = (vtop->c.i != 0);
2322 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2323 else if (dbt == VT_PTR)
2325 #endif
2326 else if (dbt != VT_LLONG) {
2327 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2328 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2329 0xffffffff);
2330 vtop->c.i &= m;
2331 if (!(dbt & VT_UNSIGNED))
2332 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2335 } else if (p && dbt == VT_BOOL) {
2336 vtop->r = VT_CONST;
2337 vtop->c.i = 1;
2338 } else {
2339 /* non constant case: generate code */
2340 if (sf && df) {
2341 /* convert from fp to fp */
2342 gen_cvt_ftof(dbt);
2343 } else if (df) {
2344 /* convert int to fp */
2345 gen_cvt_itof1(dbt);
2346 } else if (sf) {
2347 /* convert fp to int */
2348 if (dbt == VT_BOOL) {
2349 vpushi(0);
2350 gen_op(TOK_NE);
2351 } else {
2352 /* we handle char/short/etc... with generic code */
2353 if (dbt != (VT_INT | VT_UNSIGNED) &&
2354 dbt != (VT_LLONG | VT_UNSIGNED) &&
2355 dbt != VT_LLONG)
2356 dbt = VT_INT;
2357 gen_cvt_ftoi1(dbt);
2358 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2359 /* additional cast for char/short... */
2360 vtop->type.t = dbt;
2361 gen_cast(type);
2364 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2365 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2366 if ((sbt & VT_BTYPE) != VT_LLONG) {
2367 /* scalar to long long */
2368 /* machine independent conversion */
2369 gv(RC_INT);
2370 /* generate high word */
2371 if (sbt == (VT_INT | VT_UNSIGNED)) {
2372 vpushi(0);
2373 gv(RC_INT);
2374 } else {
2375 if (sbt == VT_PTR) {
2376 /* cast from pointer to int before we apply
2377 shift operation, which pointers don't support*/
2378 gen_cast(&int_type);
2380 gv_dup();
2381 vpushi(31);
2382 gen_op(TOK_SAR);
2384 /* patch second register */
2385 vtop[-1].r2 = vtop->r;
2386 vpop();
2388 #else
2389 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2390 (dbt & VT_BTYPE) == VT_PTR ||
2391 (dbt & VT_BTYPE) == VT_FUNC) {
2392 if ((sbt & VT_BTYPE) != VT_LLONG &&
2393 (sbt & VT_BTYPE) != VT_PTR &&
2394 (sbt & VT_BTYPE) != VT_FUNC) {
2395 /* need to convert from 32bit to 64bit */
2396 gv(RC_INT);
2397 if (sbt != (VT_INT | VT_UNSIGNED)) {
2398 #if defined(TCC_TARGET_ARM64)
2399 gen_cvt_sxtw();
2400 #elif defined(TCC_TARGET_X86_64)
2401 int r = gv(RC_INT);
2402 /* x86_64 specific: movslq */
2403 o(0x6348);
2404 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2405 #else
2406 #error
2407 #endif
2410 #endif
2411 } else if (dbt == VT_BOOL) {
2412 /* scalar to bool */
2413 vpushi(0);
2414 gen_op(TOK_NE);
2415 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2416 (dbt & VT_BTYPE) == VT_SHORT) {
2417 if (sbt == VT_PTR) {
2418 vtop->type.t = VT_INT;
2419 tcc_warning("nonportable conversion from pointer to char/short");
2421 force_charshort_cast(dbt);
2422 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2423 } else if ((dbt & VT_BTYPE) == VT_INT) {
2424 /* scalar to int */
2425 if ((sbt & VT_BTYPE) == VT_LLONG) {
2426 /* from long long: just take low order word */
2427 lexpand();
2428 vpop();
2430 /* if lvalue and single word type, nothing to do because
2431 the lvalue already contains the real type size (see
2432 VT_LVAL_xxx constants) */
2433 #endif
2436 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2437 /* if we are casting between pointer types,
2438 we must update the VT_LVAL_xxx size */
2439 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2440 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2442 vtop->type = *type;
2445 /* return type size as known at compile time. Put alignment at 'a' */
2446 ST_FUNC int type_size(CType *type, int *a)
2448 Sym *s;
2449 int bt;
2451 bt = type->t & VT_BTYPE;
2452 if (bt == VT_STRUCT) {
2453 /* struct/union */
2454 s = type->ref;
2455 *a = s->r;
2456 return s->c;
2457 } else if (bt == VT_PTR) {
2458 if (type->t & VT_ARRAY) {
2459 int ts;
2461 s = type->ref;
2462 ts = type_size(&s->type, a);
2464 if (ts < 0 && s->c < 0)
2465 ts = -ts;
2467 return ts * s->c;
2468 } else {
2469 *a = PTR_SIZE;
2470 return PTR_SIZE;
2472 } else if (bt == VT_LDOUBLE) {
2473 *a = LDOUBLE_ALIGN;
2474 return LDOUBLE_SIZE;
2475 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2476 #ifdef TCC_TARGET_I386
2477 #ifdef TCC_TARGET_PE
2478 *a = 8;
2479 #else
2480 *a = 4;
2481 #endif
2482 #elif defined(TCC_TARGET_ARM)
2483 #ifdef TCC_ARM_EABI
2484 *a = 8;
2485 #else
2486 *a = 4;
2487 #endif
2488 #else
2489 *a = 8;
2490 #endif
2491 return 8;
2492 } else if (bt == VT_INT || bt == VT_FLOAT) {
2493 *a = 4;
2494 return 4;
2495 } else if (bt == VT_SHORT) {
2496 *a = 2;
2497 return 2;
2498 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2499 *a = 8;
2500 return 16;
2501 } else if (bt == VT_ENUM) {
2502 *a = 4;
2503 /* Enums might be incomplete, so don't just return '4' here. */
2504 return type->ref->c;
2505 } else {
2506 /* char, void, function, _Bool */
2507 *a = 1;
2508 return 1;
2512 /* push type size as known at runtime time on top of value stack. Put
2513 alignment at 'a' */
2514 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2516 if (type->t & VT_VLA) {
2517 type_size(&type->ref->type, a);
2518 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2519 } else {
2520 vpushi(type_size(type, a));
2524 static void vla_sp_restore(void) {
2525 if (vlas_in_scope) {
2526 gen_vla_sp_restore(vla_sp_loc);
2530 static void vla_sp_restore_root(void) {
2531 if (vlas_in_scope) {
2532 gen_vla_sp_restore(vla_sp_root_loc);
2536 /* return the pointed type of t */
2537 static inline CType *pointed_type(CType *type)
2539 return &type->ref->type;
2542 /* modify type so that its it is a pointer to type. */
2543 ST_FUNC void mk_pointer(CType *type)
2545 Sym *s;
2546 s = sym_push(SYM_FIELD, type, 0, -1);
2547 type->t = VT_PTR | (type->t & ~VT_TYPE);
2548 type->ref = s;
2551 /* compare function types. OLD functions match any new functions */
2552 static int is_compatible_func(CType *type1, CType *type2)
2554 Sym *s1, *s2;
2556 s1 = type1->ref;
2557 s2 = type2->ref;
2558 if (!is_compatible_types(&s1->type, &s2->type))
2559 return 0;
2560 /* check func_call */
2561 if (s1->a.func_call != s2->a.func_call)
2562 return 0;
2563 /* XXX: not complete */
2564 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2565 return 1;
2566 if (s1->c != s2->c)
2567 return 0;
2568 while (s1 != NULL) {
2569 if (s2 == NULL)
2570 return 0;
2571 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2572 return 0;
2573 s1 = s1->next;
2574 s2 = s2->next;
2576 if (s2)
2577 return 0;
2578 return 1;
2581 /* return true if type1 and type2 are the same. If unqualified is
2582 true, qualifiers on the types are ignored.
2584 - enums are not checked as gcc __builtin_types_compatible_p ()
2586 static int compare_types(CType *type1, CType *type2, int unqualified)
2588 int bt1, t1, t2;
2590 t1 = type1->t & VT_TYPE;
2591 t2 = type2->t & VT_TYPE;
2592 if (unqualified) {
2593 /* strip qualifiers before comparing */
2594 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2595 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2597 /* Default Vs explicit signedness only matters for char */
2598 if ((t1 & VT_BTYPE) != VT_BYTE) {
2599 t1 &= ~VT_DEFSIGN;
2600 t2 &= ~VT_DEFSIGN;
2602 /* An enum is compatible with (unsigned) int. Ideally we would
2603 store the enums signedness in type->ref.a.<some_bit> and
2604 only accept unsigned enums with unsigned int and vice versa.
2605 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2606 from pointer target types, so we can't add it here either. */
2607 if ((t1 & VT_BTYPE) == VT_ENUM) {
2608 t1 = VT_INT;
2609 if (type1->ref->a.unsigned_enum)
2610 t1 |= VT_UNSIGNED;
2612 if ((t2 & VT_BTYPE) == VT_ENUM) {
2613 t2 = VT_INT;
2614 if (type2->ref->a.unsigned_enum)
2615 t2 |= VT_UNSIGNED;
2617 /* XXX: bitfields ? */
2618 if (t1 != t2)
2619 return 0;
2620 /* test more complicated cases */
2621 bt1 = t1 & VT_BTYPE;
2622 if (bt1 == VT_PTR) {
2623 type1 = pointed_type(type1);
2624 type2 = pointed_type(type2);
2625 return is_compatible_types(type1, type2);
2626 } else if (bt1 == VT_STRUCT) {
2627 return (type1->ref == type2->ref);
2628 } else if (bt1 == VT_FUNC) {
2629 return is_compatible_func(type1, type2);
2630 } else {
2631 return 1;
2635 /* return true if type1 and type2 are exactly the same (including
2636 qualifiers).
2638 static int is_compatible_types(CType *type1, CType *type2)
2640 return compare_types(type1,type2,0);
2643 /* return true if type1 and type2 are the same (ignoring qualifiers).
2645 static int is_compatible_parameter_types(CType *type1, CType *type2)
2647 return compare_types(type1,type2,1);
2650 /* print a type. If 'varstr' is not NULL, then the variable is also
2651 printed in the type */
2652 /* XXX: union */
2653 /* XXX: add array and function pointers */
2654 static void type_to_str(char *buf, int buf_size,
2655 CType *type, const char *varstr)
2657 int bt, v, t;
2658 Sym *s, *sa;
2659 char buf1[256];
2660 const char *tstr;
2662 t = type->t & VT_TYPE;
2663 bt = t & VT_BTYPE;
2664 buf[0] = '\0';
2665 if (t & VT_CONSTANT)
2666 pstrcat(buf, buf_size, "const ");
2667 if (t & VT_VOLATILE)
2668 pstrcat(buf, buf_size, "volatile ");
2669 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2670 pstrcat(buf, buf_size, "unsigned ");
2671 else if (t & VT_DEFSIGN)
2672 pstrcat(buf, buf_size, "signed ");
2673 switch(bt) {
2674 case VT_VOID:
2675 tstr = "void";
2676 goto add_tstr;
2677 case VT_BOOL:
2678 tstr = "_Bool";
2679 goto add_tstr;
2680 case VT_BYTE:
2681 tstr = "char";
2682 goto add_tstr;
2683 case VT_SHORT:
2684 tstr = "short";
2685 goto add_tstr;
2686 case VT_INT:
2687 tstr = "int";
2688 goto add_tstr;
2689 case VT_LONG:
2690 tstr = "long";
2691 goto add_tstr;
2692 case VT_LLONG:
2693 tstr = "long long";
2694 goto add_tstr;
2695 case VT_FLOAT:
2696 tstr = "float";
2697 goto add_tstr;
2698 case VT_DOUBLE:
2699 tstr = "double";
2700 goto add_tstr;
2701 case VT_LDOUBLE:
2702 tstr = "long double";
2703 add_tstr:
2704 pstrcat(buf, buf_size, tstr);
2705 break;
2706 case VT_ENUM:
2707 case VT_STRUCT:
2708 if (bt == VT_STRUCT)
2709 tstr = "struct ";
2710 else
2711 tstr = "enum ";
2712 pstrcat(buf, buf_size, tstr);
2713 v = type->ref->v & ~SYM_STRUCT;
2714 if (v >= SYM_FIRST_ANOM)
2715 pstrcat(buf, buf_size, "<anonymous>");
2716 else
2717 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2718 break;
2719 case VT_FUNC:
2720 s = type->ref;
2721 type_to_str(buf, buf_size, &s->type, varstr);
2722 pstrcat(buf, buf_size, "(");
2723 sa = s->next;
2724 while (sa != NULL) {
2725 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2726 pstrcat(buf, buf_size, buf1);
2727 sa = sa->next;
2728 if (sa)
2729 pstrcat(buf, buf_size, ", ");
2731 pstrcat(buf, buf_size, ")");
2732 goto no_var;
2733 case VT_PTR:
2734 s = type->ref;
2735 if (t & VT_ARRAY) {
2736 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2737 type_to_str(buf, buf_size, &s->type, buf1);
2738 goto no_var;
2740 pstrcpy(buf1, sizeof(buf1), "*");
2741 if (t & VT_CONSTANT)
2742 pstrcat(buf1, buf_size, "const ");
2743 if (t & VT_VOLATILE)
2744 pstrcat(buf1, buf_size, "volatile ");
2745 if (varstr)
2746 pstrcat(buf1, sizeof(buf1), varstr);
2747 type_to_str(buf, buf_size, &s->type, buf1);
2748 goto no_var;
2750 if (varstr) {
2751 pstrcat(buf, buf_size, " ");
2752 pstrcat(buf, buf_size, varstr);
2754 no_var: ;
2757 /* verify type compatibility to store vtop in 'dt' type, and generate
2758 casts if needed. */
2759 static void gen_assign_cast(CType *dt)
2761 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2762 char buf1[256], buf2[256];
2763 int dbt, sbt;
2765 st = &vtop->type; /* source type */
2766 dbt = dt->t & VT_BTYPE;
2767 sbt = st->t & VT_BTYPE;
2768 if (sbt == VT_VOID || dbt == VT_VOID) {
2769 if (sbt == VT_VOID && dbt == VT_VOID)
2770 ; /*
2771 It is Ok if both are void
2772 A test program:
2773 void func1() {}
2774 void func2() {
2775 return func1();
2777 gcc accepts this program
2779 else
2780 tcc_error("cannot cast from/to void");
2782 if (dt->t & VT_CONSTANT)
2783 tcc_warning("assignment of read-only location");
2784 switch(dbt) {
2785 case VT_PTR:
2786 /* special cases for pointers */
2787 /* '0' can also be a pointer */
2788 if (is_null_pointer(vtop))
2789 goto type_ok;
2790 /* accept implicit pointer to integer cast with warning */
2791 if (is_integer_btype(sbt)) {
2792 tcc_warning("assignment makes pointer from integer without a cast");
2793 goto type_ok;
2795 type1 = pointed_type(dt);
2796 /* a function is implicitely a function pointer */
2797 if (sbt == VT_FUNC) {
2798 if ((type1->t & VT_BTYPE) != VT_VOID &&
2799 !is_compatible_types(pointed_type(dt), st))
2800 tcc_warning("assignment from incompatible pointer type");
2801 goto type_ok;
2803 if (sbt != VT_PTR)
2804 goto error;
2805 type2 = pointed_type(st);
2806 if ((type1->t & VT_BTYPE) == VT_VOID ||
2807 (type2->t & VT_BTYPE) == VT_VOID) {
2808 /* void * can match anything */
2809 } else {
2810 /* exact type match, except for qualifiers */
2811 tmp_type1 = *type1;
2812 tmp_type2 = *type2;
2813 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2814 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2815 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2816 /* Like GCC don't warn by default for merely changes
2817 in pointer target signedness. Do warn for different
2818 base types, though, in particular for unsigned enums
2819 and signed int targets. */
2820 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2821 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2822 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2824 else
2825 tcc_warning("assignment from incompatible pointer type");
2828 /* check const and volatile */
2829 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2830 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2831 tcc_warning("assignment discards qualifiers from pointer target type");
2832 break;
2833 case VT_BYTE:
2834 case VT_SHORT:
2835 case VT_INT:
2836 case VT_LLONG:
2837 if (sbt == VT_PTR || sbt == VT_FUNC) {
2838 tcc_warning("assignment makes integer from pointer without a cast");
2839 } else if (sbt == VT_STRUCT) {
2840 goto case_VT_STRUCT;
2842 /* XXX: more tests */
2843 break;
2844 case VT_STRUCT:
2845 case_VT_STRUCT:
2846 tmp_type1 = *dt;
2847 tmp_type2 = *st;
2848 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2849 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2850 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2851 error:
2852 type_to_str(buf1, sizeof(buf1), st, NULL);
2853 type_to_str(buf2, sizeof(buf2), dt, NULL);
2854 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2856 break;
2858 type_ok:
2859 gen_cast(dt);
2862 /* store vtop in lvalue pushed on stack */
2863 ST_FUNC void vstore(void)
2865 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2867 ft = vtop[-1].type.t;
2868 sbt = vtop->type.t & VT_BTYPE;
2869 dbt = ft & VT_BTYPE;
2870 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2871 (sbt == VT_INT && dbt == VT_SHORT))
2872 && !(vtop->type.t & VT_BITFIELD)) {
2873 /* optimize char/short casts */
2874 delayed_cast = VT_MUSTCAST;
2875 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2876 ((1 << VT_STRUCT_SHIFT) - 1));
2877 /* XXX: factorize */
2878 if (ft & VT_CONSTANT)
2879 tcc_warning("assignment of read-only location");
2880 } else {
2881 delayed_cast = 0;
2882 if (!(ft & VT_BITFIELD))
2883 gen_assign_cast(&vtop[-1].type);
2886 if (sbt == VT_STRUCT) {
2887 /* if structure, only generate pointer */
2888 /* structure assignment : generate memcpy */
2889 /* XXX: optimize if small size */
2890 size = type_size(&vtop->type, &align);
2892 /* destination */
2893 vswap();
2894 vtop->type.t = VT_PTR;
2895 gaddrof();
2897 /* address of memcpy() */
2898 #ifdef TCC_ARM_EABI
2899 if(!(align & 7))
2900 vpush_global_sym(&func_old_type, TOK_memcpy8);
2901 else if(!(align & 3))
2902 vpush_global_sym(&func_old_type, TOK_memcpy4);
2903 else
2904 #endif
2905 /* Use memmove, rather than memcpy, as dest and src may be same: */
2906 vpush_global_sym(&func_old_type, TOK_memmove);
2908 vswap();
2909 /* source */
2910 vpushv(vtop - 2);
2911 vtop->type.t = VT_PTR;
2912 gaddrof();
2913 /* type size */
2914 vpushi(size);
2915 gfunc_call(3);
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 #ifdef CONFIG_TCC_BCHECK
2965 /* bound check case */
2966 if (vtop[-1].r & VT_MUSTBOUND) {
2967 vswap();
2968 gbound();
2969 vswap();
2971 #endif
2972 rc = RC_INT;
2973 if (is_float(ft)) {
2974 rc = RC_FLOAT;
2975 #ifdef TCC_TARGET_X86_64
2976 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2977 rc = RC_ST0;
2978 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2979 rc = RC_FRET;
2981 #endif
2983 r = gv(rc); /* generate value */
2984 /* if lvalue was saved on stack, must read it */
2985 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2986 SValue sv;
2987 t = get_reg(RC_INT);
2988 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2989 sv.type.t = VT_PTR;
2990 #else
2991 sv.type.t = VT_INT;
2992 #endif
2993 sv.r = VT_LOCAL | VT_LVAL;
2994 sv.c.i = vtop[-1].c.i;
2995 load(t, &sv);
2996 vtop[-1].r = t | VT_LVAL;
2998 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2999 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3000 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3001 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3002 #else
3003 if ((ft & VT_BTYPE) == VT_LLONG) {
3004 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3005 #endif
3006 vtop[-1].type.t = load_type;
3007 store(r, vtop - 1);
3008 vswap();
3009 /* convert to int to increment easily */
3010 vtop->type.t = addr_type;
3011 gaddrof();
3012 vpushi(load_size);
3013 gen_op('+');
3014 vtop->r |= VT_LVAL;
3015 vswap();
3016 vtop[-1].type.t = load_type;
3017 /* XXX: it works because r2 is spilled last ! */
3018 store(vtop->r2, vtop - 1);
3019 } else {
3020 store(r, vtop - 1);
3023 vswap();
3024 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3025 vtop->r |= delayed_cast;
3029 /* post defines POST/PRE add. c is the token ++ or -- */
3030 ST_FUNC void inc(int post, int c)
3032 test_lvalue();
3033 vdup(); /* save lvalue */
3034 if (post) {
3035 gv_dup(); /* duplicate value */
3036 vrotb(3);
3037 vrotb(3);
3039 /* add constant */
3040 vpushi(c - TOK_MID);
3041 gen_op('+');
3042 vstore(); /* store value */
3043 if (post)
3044 vpop(); /* if post op, return saved value */
3047 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3049 /* read the string */
3050 if (tok != TOK_STR)
3051 expect(msg);
3052 cstr_new(astr);
3053 while (tok == TOK_STR) {
3054 /* XXX: add \0 handling too ? */
3055 cstr_cat(astr, tokc.str.data, -1);
3056 next();
3058 cstr_ccat(astr, '\0');
3061 /* If I is >= 1 and a power of two, returns log2(i)+1.
3062 If I is 0 returns 0. */
3063 static int exact_log2p1(int i)
3065 int ret;
3066 if (!i)
3067 return 0;
3068 for (ret = 1; i >= 1 << 8; ret += 8)
3069 i >>= 8;
3070 if (i >= 1 << 4)
3071 ret += 4, i >>= 4;
3072 if (i >= 1 << 2)
3073 ret += 2, i >>= 2;
3074 if (i >= 1 << 1)
3075 ret++;
3076 return ret;
3079 /* Parse GNUC __attribute__ extension. Currently, the following
3080 extensions are recognized:
3081 - aligned(n) : set data/function alignment.
3082 - packed : force data alignment to 1
3083 - section(x) : generate data/code in this section.
3084 - unused : currently ignored, but may be used someday.
3085 - regparm(n) : pass function parameters in registers (i386 only)
3087 static void parse_attribute(AttributeDef *ad)
3089 int t, n;
3090 CString astr;
3092 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3093 next();
3094 skip('(');
3095 skip('(');
3096 while (tok != ')') {
3097 if (tok < TOK_IDENT)
3098 expect("attribute name");
3099 t = tok;
3100 next();
3101 switch(t) {
3102 case TOK_SECTION1:
3103 case TOK_SECTION2:
3104 skip('(');
3105 parse_mult_str(&astr, "section name");
3106 ad->section = find_section(tcc_state, (char *)astr.data);
3107 skip(')');
3108 cstr_free(&astr);
3109 break;
3110 case TOK_ALIAS1:
3111 case TOK_ALIAS2:
3112 skip('(');
3113 parse_mult_str(&astr, "alias(\"target\")");
3114 ad->alias_target = /* save string as token, for later */
3115 tok_alloc((char*)astr.data, astr.size-1)->tok;
3116 skip(')');
3117 cstr_free(&astr);
3118 break;
3119 case TOK_VISIBILITY1:
3120 case TOK_VISIBILITY2:
3121 skip('(');
3122 parse_mult_str(&astr,
3123 "visibility(\"default|hidden|internal|protected\")");
3124 if (!strcmp (astr.data, "default"))
3125 ad->a.visibility = STV_DEFAULT;
3126 else if (!strcmp (astr.data, "hidden"))
3127 ad->a.visibility = STV_HIDDEN;
3128 else if (!strcmp (astr.data, "internal"))
3129 ad->a.visibility = STV_INTERNAL;
3130 else if (!strcmp (astr.data, "protected"))
3131 ad->a.visibility = STV_PROTECTED;
3132 else
3133 expect("visibility(\"default|hidden|internal|protected\")");
3134 skip(')');
3135 cstr_free(&astr);
3136 break;
3137 case TOK_ALIGNED1:
3138 case TOK_ALIGNED2:
3139 if (tok == '(') {
3140 next();
3141 n = expr_const();
3142 if (n <= 0 || (n & (n - 1)) != 0)
3143 tcc_error("alignment must be a positive power of two");
3144 skip(')');
3145 } else {
3146 n = MAX_ALIGN;
3148 ad->a.aligned = exact_log2p1(n);
3149 if (n != 1 << (ad->a.aligned - 1))
3150 tcc_error("alignment of %d is larger than implemented", n);
3151 break;
3152 case TOK_PACKED1:
3153 case TOK_PACKED2:
3154 ad->a.packed = 1;
3155 break;
3156 case TOK_WEAK1:
3157 case TOK_WEAK2:
3158 ad->a.weak = 1;
3159 break;
3160 case TOK_UNUSED1:
3161 case TOK_UNUSED2:
3162 /* currently, no need to handle it because tcc does not
3163 track unused objects */
3164 break;
3165 case TOK_NORETURN1:
3166 case TOK_NORETURN2:
3167 /* currently, no need to handle it because tcc does not
3168 track unused objects */
3169 break;
3170 case TOK_CDECL1:
3171 case TOK_CDECL2:
3172 case TOK_CDECL3:
3173 ad->a.func_call = FUNC_CDECL;
3174 break;
3175 case TOK_STDCALL1:
3176 case TOK_STDCALL2:
3177 case TOK_STDCALL3:
3178 ad->a.func_call = FUNC_STDCALL;
3179 break;
3180 #ifdef TCC_TARGET_I386
3181 case TOK_REGPARM1:
3182 case TOK_REGPARM2:
3183 skip('(');
3184 n = expr_const();
3185 if (n > 3)
3186 n = 3;
3187 else if (n < 0)
3188 n = 0;
3189 if (n > 0)
3190 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3191 skip(')');
3192 break;
3193 case TOK_FASTCALL1:
3194 case TOK_FASTCALL2:
3195 case TOK_FASTCALL3:
3196 ad->a.func_call = FUNC_FASTCALLW;
3197 break;
3198 #endif
3199 case TOK_MODE:
3200 skip('(');
3201 switch(tok) {
3202 case TOK_MODE_DI:
3203 ad->a.mode = VT_LLONG + 1;
3204 break;
3205 case TOK_MODE_QI:
3206 ad->a.mode = VT_BYTE + 1;
3207 break;
3208 case TOK_MODE_HI:
3209 ad->a.mode = VT_SHORT + 1;
3210 break;
3211 case TOK_MODE_SI:
3212 case TOK_MODE_word:
3213 ad->a.mode = VT_INT + 1;
3214 break;
3215 default:
3216 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3217 break;
3219 next();
3220 skip(')');
3221 break;
3222 case TOK_DLLEXPORT:
3223 ad->a.func_export = 1;
3224 break;
3225 case TOK_DLLIMPORT:
3226 ad->a.func_import = 1;
3227 break;
3228 default:
3229 if (tcc_state->warn_unsupported)
3230 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3231 /* skip parameters */
3232 if (tok == '(') {
3233 int parenthesis = 0;
3234 do {
3235 if (tok == '(')
3236 parenthesis++;
3237 else if (tok == ')')
3238 parenthesis--;
3239 next();
3240 } while (parenthesis && tok != -1);
3242 break;
3244 if (tok != ',')
3245 break;
3246 next();
3248 skip(')');
3249 skip(')');
3253 static Sym * find_field (CType *type, int v)
3255 Sym *s = type->ref;
3256 v |= SYM_FIELD;
3257 while ((s = s->next) != NULL) {
3258 if ((s->v & SYM_FIELD) &&
3259 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3260 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3261 Sym *ret = find_field (&s->type, v);
3262 if (ret)
3263 return ret;
3265 if (s->v == v)
3266 break;
3268 return s;
3271 static void struct_add_offset (Sym *s, int offset)
3273 while ((s = s->next) != NULL) {
3274 if ((s->v & SYM_FIELD) &&
3275 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3276 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3277 struct_add_offset(s->type.ref, offset);
3278 } else
3279 s->c += offset;
3283 static void struct_layout(CType *type, AttributeDef *ad)
3285 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3286 int pcc = !tcc_state->ms_bitfields;
3287 Sym *f;
3288 if (ad->a.aligned)
3289 maxalign = 1 << (ad->a.aligned - 1);
3290 else
3291 maxalign = 1;
3292 offset = 0;
3293 c = 0;
3294 bit_pos = 0;
3295 prevbt = VT_STRUCT; /* make it never match */
3296 prev_bit_size = 0;
3297 for (f = type->ref->next; f; f = f->next) {
3298 int typealign, bit_size;
3299 int size = type_size(&f->type, &typealign);
3300 if (f->type.t & VT_BITFIELD)
3301 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3302 else
3303 bit_size = -1;
3304 if (bit_size == 0 && pcc) {
3305 /* Zero-width bit-fields in PCC mode aren't affected
3306 by any packing (attribute or pragma). */
3307 align = typealign;
3308 } else if (f->r > 1) {
3309 align = f->r;
3310 } else if (ad->a.packed || f->r == 1) {
3311 align = 1;
3312 /* Packed fields or packed records don't let the base type
3313 influence the records type alignment. */
3314 typealign = 1;
3315 } else {
3316 align = typealign;
3318 if (type->ref->type.t != TOK_STRUCT) {
3319 if (pcc && bit_size >= 0)
3320 size = (bit_size + 7) >> 3;
3321 /* Bit position is already zero from our caller. */
3322 offset = 0;
3323 if (size > c)
3324 c = size;
3325 } else if (bit_size < 0) {
3326 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3327 prevbt = VT_STRUCT;
3328 prev_bit_size = 0;
3329 c = (c + addbytes + align - 1) & -align;
3330 offset = c;
3331 if (size > 0)
3332 c += size;
3333 bit_pos = 0;
3334 } else {
3335 /* A bit-field. Layout is more complicated. There are two
3336 options TCC implements: PCC compatible and MS compatible
3337 (PCC compatible is what GCC uses for almost all targets).
3338 In PCC layout the overall size of the struct (in c) is
3339 _excluding_ the current run of bit-fields (that is,
3340 there's at least additional bit_pos bits after c). In
3341 MS layout c does include the current run of bit-fields.
3343 This matters for calculating the natural alignment buckets
3344 in PCC mode. */
3346 /* 'align' will be used to influence records alignment,
3347 so it's the max of specified and type alignment, except
3348 in certain cases that depend on the mode. */
3349 if (align < typealign)
3350 align = typealign;
3351 if (pcc) {
3352 /* In PCC layout a non-packed bit-field is placed adjacent
3353 to the preceding bit-fields, except if it would overflow
3354 its container (depending on base type) or it's a zero-width
3355 bit-field. Packed non-zero-width bit-fields always are
3356 placed adjacent. */
3357 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3358 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3359 if (bit_size == 0 ||
3360 (typealign != 1 &&
3361 (ofs2 / (typealign * 8)) > (size/typealign))) {
3362 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3363 bit_pos = 0;
3365 offset = c;
3366 /* In PCC layout named bit-fields influence the alignment
3367 of the containing struct using the base types alignment,
3368 except for packed fields (which here have correct
3369 align/typealign). */
3370 if ((f->v & SYM_FIRST_ANOM))
3371 align = 1;
3372 } else {
3373 bt = f->type.t & VT_BTYPE;
3374 if ((bit_pos + bit_size > size * 8) ||
3375 (bit_size > 0) == (bt != prevbt)) {
3376 c = (c + typealign - 1) & -typealign;
3377 offset = c;
3378 bit_pos = 0;
3379 /* In MS bitfield mode a bit-field run always uses
3380 at least as many bits as the underlying type.
3381 To start a new run it's also required that this
3382 or the last bit-field had non-zero width. */
3383 if (bit_size || prev_bit_size)
3384 c += size;
3386 /* In MS layout the records alignment is normally
3387 influenced by the field, except for a zero-width
3388 field at the start of a run (but by further zero-width
3389 fields it is again). */
3390 if (bit_size == 0 && prevbt != bt)
3391 align = 1;
3392 prevbt = bt;
3393 prev_bit_size = bit_size;
3395 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3396 | (bit_pos << VT_STRUCT_SHIFT);
3397 bit_pos += bit_size;
3398 if (pcc && bit_pos >= size * 8) {
3399 c += size;
3400 bit_pos -= size * 8;
3403 if (align > maxalign)
3404 maxalign = align;
3405 #if 0
3406 printf("set field %s offset=%d c=%d",
3407 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3408 if (f->type.t & VT_BITFIELD) {
3409 printf(" pos=%d size=%d",
3410 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3411 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3413 printf("\n");
3414 #endif
3416 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3417 Sym *ass;
3418 /* An anonymous struct/union. Adjust member offsets
3419 to reflect the real offset of our containing struct.
3420 Also set the offset of this anon member inside
3421 the outer struct to be zero. Via this it
3422 works when accessing the field offset directly
3423 (from base object), as well as when recursing
3424 members in initializer handling. */
3425 int v2 = f->type.ref->v;
3426 if (!(v2 & SYM_FIELD) &&
3427 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3428 Sym **pps;
3429 /* This happens only with MS extensions. The
3430 anon member has a named struct type, so it
3431 potentially is shared with other references.
3432 We need to unshare members so we can modify
3433 them. */
3434 ass = f->type.ref;
3435 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3436 &f->type.ref->type, 0,
3437 f->type.ref->c);
3438 pps = &f->type.ref->next;
3439 while ((ass = ass->next) != NULL) {
3440 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3441 pps = &((*pps)->next);
3443 *pps = NULL;
3445 struct_add_offset(f->type.ref, offset);
3446 f->c = 0;
3447 } else {
3448 f->c = offset;
3451 f->r = 0;
3453 /* store size and alignment */
3454 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3455 + maxalign - 1) & -maxalign;
3456 type->ref->r = maxalign;
3459 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3460 static void struct_decl(CType *type, AttributeDef *ad, int u)
3462 int a, v, size, align, flexible, alignoverride;
3463 long c;
3464 int bit_size, bsize, bt;
3465 Sym *s, *ss, **ps;
3466 AttributeDef ad1;
3467 CType type1, btype;
3469 a = tok; /* save decl type */
3470 next();
3471 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3472 parse_attribute(ad);
3473 if (tok != '{') {
3474 v = tok;
3475 next();
3476 /* struct already defined ? return it */
3477 if (v < TOK_IDENT)
3478 expect("struct/union/enum name");
3479 s = struct_find(v);
3480 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3481 if (s->type.t != a)
3482 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3483 goto do_decl;
3485 } else {
3486 v = anon_sym++;
3488 /* Record the original enum/struct/union token. */
3489 type1.t = a;
3490 type1.ref = NULL;
3491 /* we put an undefined size for struct/union */
3492 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3493 s->r = 0; /* default alignment is zero as gcc */
3494 /* put struct/union/enum name in type */
3495 do_decl:
3496 type->t = u;
3497 type->ref = s;
3499 if (tok == '{') {
3500 next();
3501 if (s->c != -1)
3502 tcc_error("struct/union/enum already defined");
3503 /* cannot be empty */
3504 c = 0;
3505 /* non empty enums are not allowed */
3506 if (a == TOK_ENUM) {
3507 int seen_neg = 0;
3508 int seen_wide = 0;
3509 for(;;) {
3510 CType *t = &int_type;
3511 v = tok;
3512 if (v < TOK_UIDENT)
3513 expect("identifier");
3514 ss = sym_find(v);
3515 if (ss && !local_stack)
3516 tcc_error("redefinition of enumerator '%s'",
3517 get_tok_str(v, NULL));
3518 next();
3519 if (tok == '=') {
3520 next();
3521 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3522 c = expr_const64();
3523 #else
3524 /* We really want to support long long enums
3525 on i386 as well, but the Sym structure only
3526 holds a 'long' for associated constants,
3527 and enlarging it would bump its size (no
3528 available padding). So punt for now. */
3529 c = expr_const();
3530 #endif
3532 if (c < 0)
3533 seen_neg = 1;
3534 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3535 seen_wide = 1, t = &size_type;
3536 /* enum symbols have static storage */
3537 ss = sym_push(v, t, VT_CONST, c);
3538 ss->type.t |= VT_STATIC;
3539 if (tok != ',')
3540 break;
3541 next();
3542 c++;
3543 /* NOTE: we accept a trailing comma */
3544 if (tok == '}')
3545 break;
3547 if (!seen_neg)
3548 s->a.unsigned_enum = 1;
3549 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3550 skip('}');
3551 } else {
3552 ps = &s->next;
3553 flexible = 0;
3554 while (tok != '}') {
3555 if (!parse_btype(&btype, &ad1)) {
3556 skip(';');
3557 continue;
3559 while (1) {
3560 if (flexible)
3561 tcc_error("flexible array member '%s' not at the end of struct",
3562 get_tok_str(v, NULL));
3563 bit_size = -1;
3564 v = 0;
3565 type1 = btype;
3566 if (tok != ':') {
3567 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3568 if (v == 0) {
3569 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3570 expect("identifier");
3571 else {
3572 int v = btype.ref->v;
3573 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3574 if (tcc_state->ms_extensions == 0)
3575 expect("identifier");
3579 if (type_size(&type1, &align) < 0) {
3580 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3581 flexible = 1;
3582 else
3583 tcc_error("field '%s' has incomplete type",
3584 get_tok_str(v, NULL));
3586 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3587 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3588 tcc_error("invalid type for '%s'",
3589 get_tok_str(v, NULL));
3591 if (tok == ':') {
3592 next();
3593 bit_size = expr_const();
3594 /* XXX: handle v = 0 case for messages */
3595 if (bit_size < 0)
3596 tcc_error("negative width in bit-field '%s'",
3597 get_tok_str(v, NULL));
3598 if (v && bit_size == 0)
3599 tcc_error("zero width for bit-field '%s'",
3600 get_tok_str(v, NULL));
3601 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3602 parse_attribute(&ad1);
3604 size = type_size(&type1, &align);
3605 /* Only remember non-default alignment. */
3606 alignoverride = 0;
3607 if (ad1.a.aligned) {
3608 int speca = 1 << (ad1.a.aligned - 1);
3609 alignoverride = speca;
3610 } else if (ad1.a.packed || ad->a.packed) {
3611 alignoverride = 1;
3612 } else if (*tcc_state->pack_stack_ptr) {
3613 if (align > *tcc_state->pack_stack_ptr)
3614 alignoverride = *tcc_state->pack_stack_ptr;
3616 if (bit_size >= 0) {
3617 bt = type1.t & VT_BTYPE;
3618 if (bt != VT_INT &&
3619 bt != VT_BYTE &&
3620 bt != VT_SHORT &&
3621 bt != VT_BOOL &&
3622 bt != VT_ENUM &&
3623 bt != VT_LLONG)
3624 tcc_error("bitfields must have scalar type");
3625 bsize = size * 8;
3626 if (bit_size > bsize) {
3627 tcc_error("width of '%s' exceeds its type",
3628 get_tok_str(v, NULL));
3629 } else if (bit_size == bsize) {
3630 /* no need for bit fields */
3632 } else {
3633 type1.t |= VT_BITFIELD |
3634 (0 << VT_STRUCT_SHIFT) |
3635 (bit_size << (VT_STRUCT_SHIFT + 6));
3638 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3639 /* Remember we've seen a real field to check
3640 for placement of flexible array member. */
3641 c = 1;
3643 /* If member is a struct or bit-field, enforce
3644 placing into the struct (as anonymous). */
3645 if (v == 0 &&
3646 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3647 bit_size >= 0)) {
3648 v = anon_sym++;
3650 if (v) {
3651 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3652 *ps = ss;
3653 ps = &ss->next;
3655 if (tok == ';' || tok == TOK_EOF)
3656 break;
3657 skip(',');
3659 skip(';');
3661 skip('}');
3662 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3663 parse_attribute(ad);
3664 struct_layout(type, ad);
3669 /* return 1 if basic type is a type size (short, long, long long) */
3670 ST_FUNC int is_btype_size(int bt)
3672 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3675 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3676 are added to the element type, copied because it could be a typedef. */
3677 static void parse_btype_qualify(CType *type, int qualifiers)
3679 while (type->t & VT_ARRAY) {
3680 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3681 type = &type->ref->type;
3683 type->t |= qualifiers;
3686 /* return 0 if no type declaration. otherwise, return the basic type
3687 and skip it.
3689 static int parse_btype(CType *type, AttributeDef *ad)
3691 int t, u, bt_size, complete, type_found, typespec_found;
3692 Sym *s;
3693 CType type1;
3695 memset(ad, 0, sizeof(AttributeDef));
3696 complete = 0;
3697 type_found = 0;
3698 typespec_found = 0;
3699 t = 0;
3700 while(1) {
3701 switch(tok) {
3702 case TOK_EXTENSION:
3703 /* currently, we really ignore extension */
3704 next();
3705 continue;
3707 /* basic types */
3708 case TOK_CHAR:
3709 u = VT_BYTE;
3710 basic_type:
3711 next();
3712 basic_type1:
3713 if (complete)
3714 tcc_error("too many basic types");
3715 t |= u;
3716 bt_size = is_btype_size (u & VT_BTYPE);
3717 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3718 complete = 1;
3719 typespec_found = 1;
3720 break;
3721 case TOK_VOID:
3722 u = VT_VOID;
3723 goto basic_type;
3724 case TOK_SHORT:
3725 u = VT_SHORT;
3726 goto basic_type;
3727 case TOK_INT:
3728 u = VT_INT;
3729 goto basic_type;
3730 case TOK_LONG:
3731 next();
3732 if ((t & VT_BTYPE) == VT_DOUBLE) {
3733 #ifndef TCC_TARGET_PE
3734 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3735 #endif
3736 } else if ((t & VT_BTYPE) == VT_LONG) {
3737 t = (t & ~VT_BTYPE) | VT_LLONG;
3738 } else {
3739 u = VT_LONG;
3740 goto basic_type1;
3742 break;
3743 #ifdef TCC_TARGET_ARM64
3744 case TOK_UINT128:
3745 /* GCC's __uint128_t appears in some Linux header files. Make it a
3746 synonym for long double to get the size and alignment right. */
3747 u = VT_LDOUBLE;
3748 goto basic_type;
3749 #endif
3750 case TOK_BOOL:
3751 u = VT_BOOL;
3752 goto basic_type;
3753 case TOK_FLOAT:
3754 u = VT_FLOAT;
3755 goto basic_type;
3756 case TOK_DOUBLE:
3757 next();
3758 if ((t & VT_BTYPE) == VT_LONG) {
3759 #ifdef TCC_TARGET_PE
3760 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3761 #else
3762 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3763 #endif
3764 } else {
3765 u = VT_DOUBLE;
3766 goto basic_type1;
3768 break;
3769 case TOK_ENUM:
3770 struct_decl(&type1, ad, VT_ENUM);
3771 basic_type2:
3772 u = type1.t;
3773 type->ref = type1.ref;
3774 goto basic_type1;
3775 case TOK_STRUCT:
3776 case TOK_UNION:
3777 struct_decl(&type1, ad, VT_STRUCT);
3778 goto basic_type2;
3780 /* type modifiers */
3781 case TOK_CONST1:
3782 case TOK_CONST2:
3783 case TOK_CONST3:
3784 type->t = t;
3785 parse_btype_qualify(type, VT_CONSTANT);
3786 t = type->t;
3787 next();
3788 break;
3789 case TOK_VOLATILE1:
3790 case TOK_VOLATILE2:
3791 case TOK_VOLATILE3:
3792 type->t = t;
3793 parse_btype_qualify(type, VT_VOLATILE);
3794 t = type->t;
3795 next();
3796 break;
3797 case TOK_SIGNED1:
3798 case TOK_SIGNED2:
3799 case TOK_SIGNED3:
3800 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3801 tcc_error("signed and unsigned modifier");
3802 typespec_found = 1;
3803 t |= VT_DEFSIGN;
3804 next();
3805 break;
3806 case TOK_REGISTER:
3807 case TOK_AUTO:
3808 case TOK_RESTRICT1:
3809 case TOK_RESTRICT2:
3810 case TOK_RESTRICT3:
3811 next();
3812 break;
3813 case TOK_UNSIGNED:
3814 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3815 tcc_error("signed and unsigned modifier");
3816 t |= VT_DEFSIGN | VT_UNSIGNED;
3817 next();
3818 typespec_found = 1;
3819 break;
3821 /* storage */
3822 case TOK_EXTERN:
3823 t |= VT_EXTERN;
3824 next();
3825 break;
3826 case TOK_STATIC:
3827 t |= VT_STATIC;
3828 next();
3829 break;
3830 case TOK_TYPEDEF:
3831 t |= VT_TYPEDEF;
3832 next();
3833 break;
3834 case TOK_INLINE1:
3835 case TOK_INLINE2:
3836 case TOK_INLINE3:
3837 t |= VT_INLINE;
3838 next();
3839 break;
3841 /* GNUC attribute */
3842 case TOK_ATTRIBUTE1:
3843 case TOK_ATTRIBUTE2:
3844 parse_attribute(ad);
3845 if (ad->a.mode) {
3846 u = ad->a.mode -1;
3847 t = (t & ~VT_BTYPE) | u;
3849 break;
3850 /* GNUC typeof */
3851 case TOK_TYPEOF1:
3852 case TOK_TYPEOF2:
3853 case TOK_TYPEOF3:
3854 next();
3855 parse_expr_type(&type1);
3856 /* remove all storage modifiers except typedef */
3857 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3858 goto basic_type2;
3859 default:
3860 if (typespec_found)
3861 goto the_end;
3862 s = sym_find(tok);
3863 if (!s || !(s->type.t & VT_TYPEDEF))
3864 goto the_end;
3866 type->t = ((s->type.t & ~VT_TYPEDEF) |
3867 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3868 type->ref = s->type.ref;
3869 if (t & (VT_CONSTANT | VT_VOLATILE))
3870 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3871 t = type->t;
3873 if (s->r) {
3874 /* get attributes from typedef */
3875 if (0 == ad->a.aligned)
3876 ad->a.aligned = s->a.aligned;
3877 if (0 == ad->a.func_call)
3878 ad->a.func_call = s->a.func_call;
3879 ad->a.packed |= s->a.packed;
3881 next();
3882 typespec_found = 1;
3883 break;
3885 type_found = 1;
3887 the_end:
3888 if (tcc_state->char_is_unsigned) {
3889 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3890 t |= VT_UNSIGNED;
3893 /* long is never used as type */
3894 if ((t & VT_BTYPE) == VT_LONG)
3895 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3896 defined TCC_TARGET_PE
3897 t = (t & ~VT_BTYPE) | VT_INT;
3898 #else
3899 t = (t & ~VT_BTYPE) | VT_LLONG;
3900 #endif
3901 type->t = t;
3902 return type_found;
3905 /* convert a function parameter type (array to pointer and function to
3906 function pointer) */
3907 static inline void convert_parameter_type(CType *pt)
3909 /* remove const and volatile qualifiers (XXX: const could be used
3910 to indicate a const function parameter */
3911 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3912 /* array must be transformed to pointer according to ANSI C */
3913 pt->t &= ~VT_ARRAY;
3914 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3915 mk_pointer(pt);
3919 ST_FUNC void parse_asm_str(CString *astr)
3921 skip('(');
3922 parse_mult_str(astr, "string constant");
3925 /* Parse an asm label and return the token */
3926 static int asm_label_instr(void)
3928 int v;
3929 CString astr;
3931 next();
3932 parse_asm_str(&astr);
3933 skip(')');
3934 #ifdef ASM_DEBUG
3935 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3936 #endif
3937 v = tok_alloc(astr.data, astr.size - 1)->tok;
3938 cstr_free(&astr);
3939 return v;
3942 static void post_type(CType *type, AttributeDef *ad, int storage)
3944 int n, l, t1, arg_size, align;
3945 Sym **plast, *s, *first;
3946 AttributeDef ad1;
3947 CType pt;
3949 if (tok == '(') {
3950 /* function declaration */
3951 next();
3952 l = 0;
3953 first = NULL;
3954 plast = &first;
3955 arg_size = 0;
3956 if (tok != ')') {
3957 for(;;) {
3958 /* read param name and compute offset */
3959 if (l != FUNC_OLD) {
3960 if (!parse_btype(&pt, &ad1)) {
3961 if (l) {
3962 tcc_error("invalid type");
3963 } else {
3964 l = FUNC_OLD;
3965 goto old_proto;
3968 l = FUNC_NEW;
3969 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3970 break;
3971 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3972 if ((pt.t & VT_BTYPE) == VT_VOID)
3973 tcc_error("parameter declared as void");
3974 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3975 } else {
3976 old_proto:
3977 n = tok;
3978 if (n < TOK_UIDENT)
3979 expect("identifier");
3980 pt.t = VT_INT;
3981 next();
3983 convert_parameter_type(&pt);
3984 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3985 *plast = s;
3986 plast = &s->next;
3987 if (tok == ')')
3988 break;
3989 skip(',');
3990 if (l == FUNC_NEW && tok == TOK_DOTS) {
3991 l = FUNC_ELLIPSIS;
3992 next();
3993 break;
3997 /* if no parameters, then old type prototype */
3998 if (l == 0)
3999 l = FUNC_OLD;
4000 skip(')');
4001 /* NOTE: const is ignored in returned type as it has a special
4002 meaning in gcc / C++ */
4003 type->t &= ~VT_CONSTANT;
4004 /* some ancient pre-K&R C allows a function to return an array
4005 and the array brackets to be put after the arguments, such
4006 that "int c()[]" means something like "int[] c()" */
4007 if (tok == '[') {
4008 next();
4009 skip(']'); /* only handle simple "[]" */
4010 type->t |= VT_PTR;
4012 /* we push a anonymous symbol which will contain the function prototype */
4013 ad->a.func_args = arg_size;
4014 s = sym_push(SYM_FIELD, type, 0, l);
4015 s->a = ad->a;
4016 s->next = first;
4017 type->t = VT_FUNC;
4018 type->ref = s;
4019 } else if (tok == '[') {
4020 int saved_nocode_wanted = nocode_wanted;
4021 /* array definition */
4022 next();
4023 if (tok == TOK_RESTRICT1)
4024 next();
4025 n = -1;
4026 t1 = 0;
4027 if (tok != ']') {
4028 if (!local_stack || (storage & VT_STATIC))
4029 vpushi(expr_const());
4030 else {
4031 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4032 length must always be evaluated, even under nocode_wanted,
4033 so that its size slot is initialized (e.g. under sizeof
4034 or typeof). */
4035 nocode_wanted = 0;
4036 gexpr();
4038 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4039 n = vtop->c.i;
4040 if (n < 0)
4041 tcc_error("invalid array size");
4042 } else {
4043 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4044 tcc_error("size of variable length array should be an integer");
4045 t1 = VT_VLA;
4048 skip(']');
4049 /* parse next post type */
4050 post_type(type, ad, storage);
4051 if (type->t == VT_FUNC)
4052 tcc_error("declaration of an array of functions");
4053 t1 |= type->t & VT_VLA;
4055 if (t1 & VT_VLA) {
4056 loc -= type_size(&int_type, &align);
4057 loc &= -align;
4058 n = loc;
4060 vla_runtime_type_size(type, &align);
4061 gen_op('*');
4062 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4063 vswap();
4064 vstore();
4066 if (n != -1)
4067 vpop();
4068 nocode_wanted = saved_nocode_wanted;
4070 /* we push an anonymous symbol which will contain the array
4071 element type */
4072 s = sym_push(SYM_FIELD, type, 0, n);
4073 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4074 type->ref = s;
4078 /* Parse a type declaration (except basic type), and return the type
4079 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4080 expected. 'type' should contain the basic type. 'ad' is the
4081 attribute definition of the basic type. It can be modified by
4082 type_decl().
4084 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4086 Sym *s;
4087 CType type1, *type2;
4088 int qualifiers, storage;
4090 while (tok == '*') {
4091 qualifiers = 0;
4092 redo:
4093 next();
4094 switch(tok) {
4095 case TOK_CONST1:
4096 case TOK_CONST2:
4097 case TOK_CONST3:
4098 qualifiers |= VT_CONSTANT;
4099 goto redo;
4100 case TOK_VOLATILE1:
4101 case TOK_VOLATILE2:
4102 case TOK_VOLATILE3:
4103 qualifiers |= VT_VOLATILE;
4104 goto redo;
4105 case TOK_RESTRICT1:
4106 case TOK_RESTRICT2:
4107 case TOK_RESTRICT3:
4108 goto redo;
4109 /* XXX: clarify attribute handling */
4110 case TOK_ATTRIBUTE1:
4111 case TOK_ATTRIBUTE2:
4112 parse_attribute(ad);
4113 break;
4115 mk_pointer(type);
4116 type->t |= qualifiers;
4119 /* recursive type */
4120 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4121 type1.t = 0; /* XXX: same as int */
4122 if (tok == '(') {
4123 next();
4124 /* XXX: this is not correct to modify 'ad' at this point, but
4125 the syntax is not clear */
4126 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4127 parse_attribute(ad);
4128 type_decl(&type1, ad, v, td);
4129 skip(')');
4130 } else {
4131 /* type identifier */
4132 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4133 *v = tok;
4134 next();
4135 } else {
4136 if (!(td & TYPE_ABSTRACT))
4137 expect("identifier");
4138 *v = 0;
4141 storage = type->t & VT_STORAGE;
4142 type->t &= ~VT_STORAGE;
4143 post_type(type, ad, storage);
4144 type->t |= storage;
4145 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4146 parse_attribute(ad);
4148 if (!type1.t)
4149 return;
4150 /* append type at the end of type1 */
4151 type2 = &type1;
4152 for(;;) {
4153 s = type2->ref;
4154 type2 = &s->type;
4155 if (!type2->t) {
4156 *type2 = *type;
4157 break;
4160 *type = type1;
4163 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4164 ST_FUNC int lvalue_type(int t)
4166 int bt, r;
4167 r = VT_LVAL;
4168 bt = t & VT_BTYPE;
4169 if (bt == VT_BYTE || bt == VT_BOOL)
4170 r |= VT_LVAL_BYTE;
4171 else if (bt == VT_SHORT)
4172 r |= VT_LVAL_SHORT;
4173 else
4174 return r;
4175 if (t & VT_UNSIGNED)
4176 r |= VT_LVAL_UNSIGNED;
4177 return r;
4180 /* indirection with full error checking and bound check */
4181 ST_FUNC void indir(void)
4183 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4184 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4185 return;
4186 expect("pointer");
4188 if (vtop->r & VT_LVAL)
4189 gv(RC_INT);
4190 vtop->type = *pointed_type(&vtop->type);
4191 /* Arrays and functions are never lvalues */
4192 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4193 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4194 vtop->r |= lvalue_type(vtop->type.t);
4195 /* if bound checking, the referenced pointer must be checked */
4196 #ifdef CONFIG_TCC_BCHECK
4197 if (tcc_state->do_bounds_check)
4198 vtop->r |= VT_MUSTBOUND;
4199 #endif
4203 /* pass a parameter to a function and do type checking and casting */
4204 static void gfunc_param_typed(Sym *func, Sym *arg)
4206 int func_type;
4207 CType type;
4209 func_type = func->c;
4210 if (func_type == FUNC_OLD ||
4211 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4212 /* default casting : only need to convert float to double */
4213 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4214 type.t = VT_DOUBLE;
4215 gen_cast(&type);
4216 } else if (vtop->type.t & VT_BITFIELD) {
4217 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4218 type.ref = vtop->type.ref;
4219 gen_cast(&type);
4221 } else if (arg == NULL) {
4222 tcc_error("too many arguments to function");
4223 } else {
4224 type = arg->type;
4225 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4226 gen_assign_cast(&type);
4230 /* parse an expression of the form '(type)' or '(expr)' and return its
4231 type */
4232 static void parse_expr_type(CType *type)
4234 int n;
4235 AttributeDef ad;
4237 skip('(');
4238 if (parse_btype(type, &ad)) {
4239 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4240 } else {
4241 expr_type(type);
4243 skip(')');
4246 static void parse_type(CType *type)
4248 AttributeDef ad;
4249 int n;
4251 if (!parse_btype(type, &ad)) {
4252 expect("type");
4254 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4257 static void vpush_tokc(int t)
4259 CType type;
4260 type.t = t;
4261 type.ref = 0;
4262 vsetc(&type, VT_CONST, &tokc);
4265 ST_FUNC void unary(void)
4267 int n, t, align, size, r, sizeof_caller;
4268 CType type;
4269 Sym *s;
4270 AttributeDef ad;
4272 sizeof_caller = in_sizeof;
4273 in_sizeof = 0;
4274 /* XXX: GCC 2.95.3 does not generate a table although it should be
4275 better here */
4276 tok_next:
4277 switch(tok) {
4278 case TOK_EXTENSION:
4279 next();
4280 goto tok_next;
4281 case TOK_CINT:
4282 case TOK_CCHAR:
4283 case TOK_LCHAR:
4284 vpushi(tokc.i);
4285 next();
4286 break;
4287 case TOK_CUINT:
4288 vpush_tokc(VT_INT | VT_UNSIGNED);
4289 next();
4290 break;
4291 case TOK_CLLONG:
4292 vpush_tokc(VT_LLONG);
4293 next();
4294 break;
4295 case TOK_CULLONG:
4296 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4297 next();
4298 break;
4299 case TOK_CFLOAT:
4300 vpush_tokc(VT_FLOAT);
4301 next();
4302 break;
4303 case TOK_CDOUBLE:
4304 vpush_tokc(VT_DOUBLE);
4305 next();
4306 break;
4307 case TOK_CLDOUBLE:
4308 vpush_tokc(VT_LDOUBLE);
4309 next();
4310 break;
4311 case TOK___FUNCTION__:
4312 if (!gnu_ext)
4313 goto tok_identifier;
4314 /* fall thru */
4315 case TOK___FUNC__:
4317 void *ptr;
4318 int len;
4319 /* special function name identifier */
4320 len = strlen(funcname) + 1;
4321 /* generate char[len] type */
4322 type.t = VT_BYTE;
4323 mk_pointer(&type);
4324 type.t |= VT_ARRAY;
4325 type.ref->c = len;
4326 vpush_ref(&type, data_section, data_section->data_offset, len);
4327 ptr = section_ptr_add(data_section, len);
4328 memcpy(ptr, funcname, len);
4329 next();
4331 break;
4332 case TOK_LSTR:
4333 #ifdef TCC_TARGET_PE
4334 t = VT_SHORT | VT_UNSIGNED;
4335 #else
4336 t = VT_INT;
4337 #endif
4338 goto str_init;
4339 case TOK_STR:
4340 /* string parsing */
4341 t = VT_BYTE;
4342 str_init:
4343 if (tcc_state->warn_write_strings)
4344 t |= VT_CONSTANT;
4345 type.t = t;
4346 mk_pointer(&type);
4347 type.t |= VT_ARRAY;
4348 memset(&ad, 0, sizeof(AttributeDef));
4349 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4350 break;
4351 case '(':
4352 next();
4353 /* cast ? */
4354 if (parse_btype(&type, &ad)) {
4355 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4356 skip(')');
4357 /* check ISOC99 compound literal */
4358 if (tok == '{') {
4359 /* data is allocated locally by default */
4360 if (global_expr)
4361 r = VT_CONST;
4362 else
4363 r = VT_LOCAL;
4364 /* all except arrays are lvalues */
4365 if (!(type.t & VT_ARRAY))
4366 r |= lvalue_type(type.t);
4367 memset(&ad, 0, sizeof(AttributeDef));
4368 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4369 } else {
4370 if (sizeof_caller) {
4371 vpush(&type);
4372 return;
4374 unary();
4375 gen_cast(&type);
4377 } else if (tok == '{') {
4378 int saved_nocode_wanted = nocode_wanted;
4379 if (const_wanted)
4380 tcc_error("expected constant");
4381 /* save all registers */
4382 save_regs(0);
4383 /* statement expression : we do not accept break/continue
4384 inside as GCC does. We do retain the nocode_wanted state,
4385 as statement expressions can't ever be entered from the
4386 outside, so any reactivation of code emission (from labels
4387 or loop heads) can be disabled again after the end of it. */
4388 block(NULL, NULL, 1);
4389 nocode_wanted = saved_nocode_wanted;
4390 skip(')');
4391 } else {
4392 gexpr();
4393 skip(')');
4395 break;
4396 case '*':
4397 next();
4398 unary();
4399 indir();
4400 break;
4401 case '&':
4402 next();
4403 unary();
4404 /* functions names must be treated as function pointers,
4405 except for unary '&' and sizeof. Since we consider that
4406 functions are not lvalues, we only have to handle it
4407 there and in function calls. */
4408 /* arrays can also be used although they are not lvalues */
4409 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4410 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4411 test_lvalue();
4412 mk_pointer(&vtop->type);
4413 gaddrof();
4414 break;
4415 case '!':
4416 next();
4417 unary();
4418 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4419 CType boolean;
4420 boolean.t = VT_BOOL;
4421 gen_cast(&boolean);
4422 vtop->c.i = !vtop->c.i;
4423 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4424 vtop->c.i ^= 1;
4425 else {
4426 save_regs(1);
4427 vseti(VT_JMP, gvtst(1, 0));
4429 break;
4430 case '~':
4431 next();
4432 unary();
4433 vpushi(-1);
4434 gen_op('^');
4435 break;
4436 case '+':
4437 next();
4438 unary();
4439 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4440 tcc_error("pointer not accepted for unary plus");
4441 /* In order to force cast, we add zero, except for floating point
4442 where we really need an noop (otherwise -0.0 will be transformed
4443 into +0.0). */
4444 if (!is_float(vtop->type.t)) {
4445 vpushi(0);
4446 gen_op('+');
4448 break;
4449 case TOK_SIZEOF:
4450 case TOK_ALIGNOF1:
4451 case TOK_ALIGNOF2:
4452 t = tok;
4453 next();
4454 in_sizeof++;
4455 unary_type(&type); // Perform a in_sizeof = 0;
4456 size = type_size(&type, &align);
4457 if (t == TOK_SIZEOF) {
4458 if (!(type.t & VT_VLA)) {
4459 if (size < 0)
4460 tcc_error("sizeof applied to an incomplete type");
4461 vpushs(size);
4462 } else {
4463 vla_runtime_type_size(&type, &align);
4465 } else {
4466 vpushs(align);
4468 vtop->type.t |= VT_UNSIGNED;
4469 break;
4471 case TOK_builtin_expect:
4473 /* __builtin_expect is a no-op for now */
4474 next();
4475 skip('(');
4476 expr_eq();
4477 skip(',');
4478 nocode_wanted++;
4479 expr_lor_const();
4480 vpop();
4481 nocode_wanted--;
4482 skip(')');
4484 break;
4485 case TOK_builtin_types_compatible_p:
4487 CType type1, type2;
4488 next();
4489 skip('(');
4490 parse_type(&type1);
4491 skip(',');
4492 parse_type(&type2);
4493 skip(')');
4494 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4495 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4496 vpushi(is_compatible_types(&type1, &type2));
4498 break;
4499 case TOK_builtin_choose_expr:
4501 int64_t c;
4502 next();
4503 skip('(');
4504 c = expr_const64();
4505 skip(',');
4506 if (!c) {
4507 nocode_wanted++;
4509 expr_eq();
4510 if (!c) {
4511 vpop();
4512 nocode_wanted--;
4514 skip(',');
4515 if (c) {
4516 nocode_wanted++;
4518 expr_eq();
4519 if (c) {
4520 vpop();
4521 nocode_wanted--;
4523 skip(')');
4525 break;
4526 case TOK_builtin_constant_p:
4528 int res;
4529 next();
4530 skip('(');
4531 nocode_wanted++;
4532 gexpr();
4533 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4534 vpop();
4535 nocode_wanted--;
4536 skip(')');
4537 vpushi(res);
4539 break;
4540 case TOK_builtin_frame_address:
4541 case TOK_builtin_return_address:
4543 int tok1 = tok;
4544 int level;
4545 CType type;
4546 next();
4547 skip('(');
4548 if (tok != TOK_CINT) {
4549 tcc_error("%s only takes positive integers",
4550 tok1 == TOK_builtin_return_address ?
4551 "__builtin_return_address" :
4552 "__builtin_frame_address");
4554 level = (uint32_t)tokc.i;
4555 next();
4556 skip(')');
4557 type.t = VT_VOID;
4558 mk_pointer(&type);
4559 vset(&type, VT_LOCAL, 0); /* local frame */
4560 while (level--) {
4561 mk_pointer(&vtop->type);
4562 indir(); /* -> parent frame */
4564 if (tok1 == TOK_builtin_return_address) {
4565 // assume return address is just above frame pointer on stack
4566 vpushi(PTR_SIZE);
4567 gen_op('+');
4568 mk_pointer(&vtop->type);
4569 indir();
4572 break;
4573 #ifdef TCC_TARGET_X86_64
4574 #ifdef TCC_TARGET_PE
4575 case TOK_builtin_va_start:
4577 next();
4578 skip('(');
4579 expr_eq();
4580 skip(',');
4581 expr_eq();
4582 skip(')');
4583 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4584 tcc_error("__builtin_va_start expects a local variable");
4585 vtop->r &= ~(VT_LVAL | VT_REF);
4586 vtop->type = char_pointer_type;
4587 vtop->c.i += 8;
4588 vstore();
4590 break;
4591 #else
4592 case TOK_builtin_va_arg_types:
4594 CType type;
4595 next();
4596 skip('(');
4597 parse_type(&type);
4598 skip(')');
4599 vpushi(classify_x86_64_va_arg(&type));
4601 break;
4602 #endif
4603 #endif
4605 #ifdef TCC_TARGET_ARM64
4606 case TOK___va_start: {
4607 next();
4608 skip('(');
4609 expr_eq();
4610 skip(',');
4611 expr_eq();
4612 skip(')');
4613 //xx check types
4614 gen_va_start();
4615 vpushi(0);
4616 vtop->type.t = VT_VOID;
4617 break;
4619 case TOK___va_arg: {
4620 CType type;
4621 next();
4622 skip('(');
4623 expr_eq();
4624 skip(',');
4625 parse_type(&type);
4626 skip(')');
4627 //xx check types
4628 gen_va_arg(&type);
4629 vtop->type = type;
4630 break;
4632 case TOK___arm64_clear_cache: {
4633 next();
4634 skip('(');
4635 expr_eq();
4636 skip(',');
4637 expr_eq();
4638 skip(')');
4639 gen_clear_cache();
4640 vpushi(0);
4641 vtop->type.t = VT_VOID;
4642 break;
4644 #endif
4645 /* pre operations */
4646 case TOK_INC:
4647 case TOK_DEC:
4648 t = tok;
4649 next();
4650 unary();
4651 inc(0, t);
4652 break;
4653 case '-':
4654 next();
4655 unary();
4656 t = vtop->type.t & VT_BTYPE;
4657 if (is_float(t)) {
4658 /* In IEEE negate(x) isn't subtract(0,x), but rather
4659 subtract(-0, x). */
4660 vpush(&vtop->type);
4661 if (t == VT_FLOAT)
4662 vtop->c.f = -0.0f;
4663 else if (t == VT_DOUBLE)
4664 vtop->c.d = -0.0;
4665 else
4666 vtop->c.ld = -0.0;
4667 } else
4668 vpushi(0);
4669 vswap();
4670 gen_op('-');
4671 break;
4672 case TOK_LAND:
4673 if (!gnu_ext)
4674 goto tok_identifier;
4675 next();
4676 /* allow to take the address of a label */
4677 if (tok < TOK_UIDENT)
4678 expect("label identifier");
4679 s = label_find(tok);
4680 if (!s) {
4681 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4682 } else {
4683 if (s->r == LABEL_DECLARED)
4684 s->r = LABEL_FORWARD;
4686 if (!s->type.t) {
4687 s->type.t = VT_VOID;
4688 mk_pointer(&s->type);
4689 s->type.t |= VT_STATIC;
4691 vpushsym(&s->type, s);
4692 next();
4693 break;
4695 // special qnan , snan and infinity values
4696 case TOK___NAN__:
4697 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4698 next();
4699 break;
4700 case TOK___SNAN__:
4701 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4702 next();
4703 break;
4704 case TOK___INF__:
4705 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4706 next();
4707 break;
4709 default:
4710 tok_identifier:
4711 t = tok;
4712 next();