Add make testspp.all/testspp.20
[tinycc.git] / tccgen.c
blob5265e494f5d6486878300dd191af428474754cbe
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
58 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
59 ST_DATA int func_vc;
60 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
61 ST_DATA const char *funcname;
62 ST_DATA int g_debug;
64 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
66 ST_DATA struct switch_t {
67 struct case_t {
68 int64_t v1, v2;
69 int sym;
70 } **p; int n; /* list of case ranges */
71 int def_sym; /* default symbol */
72 } *cur_switch; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static void gen_cast_s(int t);
78 static inline CType *pointed_type(CType *type);
79 static int is_compatible_types(CType *type1, CType *type2);
80 static int parse_btype(CType *type, AttributeDef *ad);
81 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
82 static void parse_expr_type(CType *type);
83 static void init_putv(CType *type, Section *sec, unsigned long c);
84 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
85 static void block(int *bsym, int *csym, int is_expr);
86 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
87 static void decl(int l);
88 static int decl0(int l, int is_for_loop_init, Sym *);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType *type, int *a);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType *type1, CType *type2);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty, unsigned long long v);
96 static void vpush(CType *type);
97 static int gvtst(int inv, int t);
98 static void gen_inline_functions(TCCState *s);
99 static void skip_or_save_block(TokenString **str);
100 static void gv_dup(void);
102 ST_INLN int is_float(int t)
104 int bt;
105 bt = t & VT_BTYPE;
106 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC int ieee_finite(double d)
114 int p[4];
115 memcpy(p, &d, sizeof(double));
116 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
123 #endif
125 ST_FUNC void test_lvalue(void)
127 if (!(vtop->r & VT_LVAL))
128 expect("lvalue");
131 ST_FUNC void check_vstack(void)
133 if (pvtop != vtop)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
140 #if 0
141 void pv (const char *lbl, int a, int b)
143 int i;
144 for (i = a; i < a + b; ++i) {
145 SValue *p = &vtop[-i];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
150 #endif
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC void tcc_debug_start(TCCState *s1)
156 if (s1->do_debug) {
157 char buf[512];
159 /* file info: full path + filename */
160 section_sym = put_elf_sym(symtab_section, 0, 0,
161 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
162 text_section->sh_num, NULL);
163 getcwd(buf, sizeof(buf));
164 #ifdef _WIN32
165 normalize_slashes(buf);
166 #endif
167 pstrcat(buf, sizeof(buf), "/");
168 put_stabs_r(buf, N_SO, 0, 0,
169 text_section->data_offset, text_section, section_sym);
170 put_stabs_r(file->filename, N_SO, 0, 0,
171 text_section->data_offset, text_section, section_sym);
172 last_ind = 0;
173 last_line_num = 0;
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section, 0, 0,
179 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
180 SHN_ABS, file->filename);
183 /* put end of translation unit info */
184 ST_FUNC void tcc_debug_end(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 put_stabs_r(NULL, N_SO, 0, 0,
189 text_section->data_offset, text_section, section_sym);
193 /* generate line number info */
194 ST_FUNC void tcc_debug_line(TCCState *s1)
196 if (!s1->do_debug)
197 return;
198 if ((last_line_num != file->line_num || last_ind != ind)) {
199 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
200 last_ind = ind;
201 last_line_num = file->line_num;
205 /* put function symbol */
206 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
208 char buf[512];
210 if (!s1->do_debug)
211 return;
213 /* stabs info */
214 /* XXX: we put here a dummy type */
215 snprintf(buf, sizeof(buf), "%s:%c1",
216 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
217 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
218 cur_text_section, sym->c);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE, 0, file->line_num, 0);
222 last_ind = 0;
223 last_line_num = 0;
226 /* put function size */
227 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
229 if (!s1->do_debug)
230 return;
231 put_stabn(N_FUN, 0, 0, size);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC int tccgen_compile(TCCState *s1)
237 cur_text_section = NULL;
238 funcname = "";
239 anon_sym = SYM_FIRST_ANOM;
240 section_sym = 0;
241 const_wanted = 0;
242 nocode_wanted = 0x80000000;
244 /* define some often used types */
245 int_type.t = VT_INT;
246 char_pointer_type.t = VT_BYTE;
247 mk_pointer(&char_pointer_type);
248 #if PTR_SIZE == 4
249 size_type.t = VT_INT | VT_UNSIGNED;
250 ptrdiff_type.t = VT_INT;
251 #elif LONG_SIZE == 4
252 size_type.t = VT_LLONG | VT_UNSIGNED;
253 ptrdiff_type.t = VT_LLONG;
254 #else
255 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
256 ptrdiff_type.t = VT_LONG | VT_LLONG;
257 #endif
258 func_old_type.t = VT_FUNC;
259 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
260 func_old_type.ref->f.func_call = FUNC_CDECL;
261 func_old_type.ref->f.func_type = FUNC_OLD;
263 tcc_debug_start(s1);
265 #ifdef TCC_TARGET_ARM
266 arm_init(s1);
267 #endif
269 #ifdef INC_DEBUG
270 printf("%s: **** new file\n", file->filename);
271 #endif
273 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
274 next();
275 decl(VT_CONST);
276 gen_inline_functions(s1);
277 check_vstack();
278 /* end of translation unit info */
279 tcc_debug_end(s1);
280 return 0;
283 /* ------------------------------------------------------------------------- */
284 ST_FUNC ElfSym *elfsym(Sym *s)
286 if (!s || !s->c)
287 return NULL;
288 return &((ElfSym *)symtab_section->data)[s->c];
291 /* apply storage attributes to Elf symbol */
292 ST_FUNC void update_storage(Sym *sym)
294 ElfSym *esym;
295 int sym_bind, old_sym_bind;
297 esym = elfsym(sym);
298 if (!esym)
299 return;
301 if (sym->a.visibility)
302 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
303 | sym->a.visibility;
305 if (sym->type.t & VT_STATIC)
306 sym_bind = STB_LOCAL;
307 else if (sym->a.weak)
308 sym_bind = STB_WEAK;
309 else
310 sym_bind = STB_GLOBAL;
311 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
312 if (sym_bind != old_sym_bind) {
313 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
316 #ifdef TCC_TARGET_PE
317 if (sym->a.dllimport)
318 esym->st_other |= ST_PE_IMPORT;
319 if (sym->a.dllexport)
320 esym->st_other |= ST_PE_EXPORT;
321 #endif
323 #if 0
324 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
325 get_tok_str(sym->v, NULL),
326 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
327 sym->a.visibility,
328 sym->a.dllexport,
329 sym->a.dllimport
331 #endif
334 /* ------------------------------------------------------------------------- */
335 /* update sym->c so that it points to an external symbol in section
336 'section' with value 'value' */
338 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
339 addr_t value, unsigned long size,
340 int can_add_underscore)
342 int sym_type, sym_bind, info, other, t;
343 ElfSym *esym;
344 const char *name;
345 char buf1[256];
346 #ifdef CONFIG_TCC_BCHECK
347 char buf[32];
348 #endif
350 if (!sym->c) {
351 name = get_tok_str(sym->v, NULL);
352 #ifdef CONFIG_TCC_BCHECK
353 if (tcc_state->do_bounds_check) {
354 /* XXX: avoid doing that for statics ? */
355 /* if bound checking is activated, we change some function
356 names by adding the "__bound" prefix */
357 switch(sym->v) {
358 #ifdef TCC_TARGET_PE
359 /* XXX: we rely only on malloc hooks */
360 case TOK_malloc:
361 case TOK_free:
362 case TOK_realloc:
363 case TOK_memalign:
364 case TOK_calloc:
365 #endif
366 case TOK_memcpy:
367 case TOK_memmove:
368 case TOK_memset:
369 case TOK_strlen:
370 case TOK_strcpy:
371 case TOK_alloca:
372 strcpy(buf, "__bound_");
373 strcat(buf, name);
374 name = buf;
375 break;
378 #endif
379 t = sym->type.t;
380 if ((t & VT_BTYPE) == VT_FUNC) {
381 sym_type = STT_FUNC;
382 } else if ((t & VT_BTYPE) == VT_VOID) {
383 sym_type = STT_NOTYPE;
384 } else {
385 sym_type = STT_OBJECT;
387 if (t & VT_STATIC)
388 sym_bind = STB_LOCAL;
389 else
390 sym_bind = STB_GLOBAL;
391 other = 0;
392 #ifdef TCC_TARGET_PE
393 if (sym_type == STT_FUNC && sym->type.ref) {
394 Sym *ref = sym->type.ref;
395 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
396 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
397 name = buf1;
398 other |= ST_PE_STDCALL;
399 can_add_underscore = 0;
402 #endif
403 if (tcc_state->leading_underscore && can_add_underscore) {
404 buf1[0] = '_';
405 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
406 name = buf1;
408 if (sym->asm_label)
409 name = get_tok_str(sym->asm_label, NULL);
410 info = ELFW(ST_INFO)(sym_bind, sym_type);
411 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
412 } else {
413 esym = elfsym(sym);
414 esym->st_value = value;
415 esym->st_size = size;
416 esym->st_shndx = sh_num;
418 update_storage(sym);
421 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
422 addr_t value, unsigned long size)
424 int sh_num = section ? section->sh_num : SHN_UNDEF;
425 put_extern_sym2(sym, sh_num, value, size, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
430 addr_t addend)
432 int c = 0;
434 if (nocode_wanted && s == cur_text_section)
435 return;
437 if (sym) {
438 if (0 == sym->c)
439 put_extern_sym(sym, NULL, 0, 0);
440 c = sym->c;
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section, s, offset, type, c, addend);
447 #if PTR_SIZE == 4
448 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
450 greloca(s, sym, offset, type, 0);
452 #endif
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym *__sym_malloc(void)
458 Sym *sym_pool, *sym, *last_sym;
459 int i;
461 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
462 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
464 last_sym = sym_free_first;
465 sym = sym_pool;
466 for(i = 0; i < SYM_POOL_NB; i++) {
467 sym->next = last_sym;
468 last_sym = sym;
469 sym++;
471 sym_free_first = last_sym;
472 return last_sym;
475 static inline Sym *sym_malloc(void)
477 Sym *sym;
478 #ifndef SYM_DEBUG
479 sym = sym_free_first;
480 if (!sym)
481 sym = __sym_malloc();
482 sym_free_first = sym->next;
483 return sym;
484 #else
485 sym = tcc_malloc(sizeof(Sym));
486 return sym;
487 #endif
490 ST_INLN void sym_free(Sym *sym)
492 #ifndef SYM_DEBUG
493 sym->next = sym_free_first;
494 sym_free_first = sym;
495 #else
496 tcc_free(sym);
497 #endif
500 /* push, without hashing */
501 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
503 Sym *s;
505 s = sym_malloc();
506 memset(s, 0, sizeof *s);
507 s->v = v;
508 s->type.t = t;
509 s->c = c;
510 /* add in stack */
511 s->prev = *ps;
512 *ps = s;
513 return s;
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym *sym_find2(Sym *s, int v)
520 while (s) {
521 if (s->v == v)
522 return s;
523 else if (s->v == -1)
524 return NULL;
525 s = s->prev;
527 return NULL;
530 /* structure lookup */
531 ST_INLN Sym *struct_find(int v)
533 v -= TOK_IDENT;
534 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
535 return NULL;
536 return table_ident[v]->sym_struct;
539 /* find an identifier */
540 ST_INLN Sym *sym_find(int v)
542 v -= TOK_IDENT;
543 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
544 return NULL;
545 return table_ident[v]->sym_identifier;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
551 Sym *s, **ps;
552 TokenSym *ts;
554 if (local_stack)
555 ps = &local_stack;
556 else
557 ps = &global_stack;
558 s = sym_push2(ps, v, type->t, c);
559 s->type.ref = type->ref;
560 s->r = r;
561 /* don't record fields or anonymous symbols */
562 /* XXX: simplify */
563 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
564 /* record symbol in token array */
565 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
566 if (v & SYM_STRUCT)
567 ps = &ts->sym_struct;
568 else
569 ps = &ts->sym_identifier;
570 s->prev_tok = *ps;
571 *ps = s;
572 s->sym_scope = local_scope;
573 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v & ~SYM_STRUCT, NULL));
577 return s;
580 /* push a global identifier */
581 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
583 Sym *s, **ps;
584 s = sym_push2(&global_stack, v, t, c);
585 /* don't record anonymous symbol */
586 if (v < SYM_FIRST_ANOM) {
587 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps != NULL && (*ps)->sym_scope)
591 ps = &(*ps)->prev_tok;
592 s->prev_tok = *ps;
593 *ps = s;
595 return s;
598 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
599 pop them yet from the list, but do remove them from the token array. */
600 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
602 Sym *s, *ss, **ps;
603 TokenSym *ts;
604 int v;
606 s = *ptop;
607 while(s != b) {
608 ss = s->prev;
609 v = s->v;
610 /* remove symbol in token array */
611 /* XXX: simplify */
612 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
613 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
614 if (v & SYM_STRUCT)
615 ps = &ts->sym_struct;
616 else
617 ps = &ts->sym_identifier;
618 *ps = s->prev_tok;
620 if (!keep)
621 sym_free(s);
622 s = ss;
624 if (!keep)
625 *ptop = b;
628 /* ------------------------------------------------------------------------- */
630 static void vsetc(CType *type, int r, CValue *vc)
632 int v;
634 if (vtop >= vstack + (VSTACK_SIZE - 1))
635 tcc_error("memory full (vstack)");
636 /* cannot let cpu flags if other instruction are generated. Also
637 avoid leaving VT_JMP anywhere except on the top of the stack
638 because it would complicate the code generator.
640 Don't do this when nocode_wanted. vtop might come from
641 !nocode_wanted regions (see 88_codeopt.c) and transforming
642 it to a register without actually generating code is wrong
643 as their value might still be used for real. All values
644 we push under nocode_wanted will eventually be popped
645 again, so that the VT_CMP/VT_JMP value will be in vtop
646 when code is unsuppressed again.
648 Same logic below in vswap(); */
649 if (vtop >= vstack && !nocode_wanted) {
650 v = vtop->r & VT_VALMASK;
651 if (v == VT_CMP || (v & ~1) == VT_JMP)
652 gv(RC_INT);
655 vtop++;
656 vtop->type = *type;
657 vtop->r = r;
658 vtop->r2 = VT_CONST;
659 vtop->c = *vc;
660 vtop->sym = NULL;
663 ST_FUNC void vswap(void)
665 SValue tmp;
666 /* cannot vswap cpu flags. See comment at vsetc() above */
667 if (vtop >= vstack && !nocode_wanted) {
668 int v = vtop->r & VT_VALMASK;
669 if (v == VT_CMP || (v & ~1) == VT_JMP)
670 gv(RC_INT);
672 tmp = vtop[0];
673 vtop[0] = vtop[-1];
674 vtop[-1] = tmp;
677 /* pop stack value */
678 ST_FUNC void vpop(void)
680 int v;
681 v = vtop->r & VT_VALMASK;
682 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
683 /* for x86, we need to pop the FP stack */
684 if (v == TREG_ST0) {
685 o(0xd8dd); /* fstp %st(0) */
686 } else
687 #endif
688 if (v == VT_JMP || v == VT_JMPI) {
689 /* need to put correct jump if && or || without test */
690 gsym(vtop->c.i);
692 vtop--;
695 /* push constant of type "type" with useless value */
696 ST_FUNC void vpush(CType *type)
698 vset(type, VT_CONST, 0);
701 /* push integer constant */
702 ST_FUNC void vpushi(int v)
704 CValue cval;
705 cval.i = v;
706 vsetc(&int_type, VT_CONST, &cval);
709 /* push a pointer sized constant */
710 static void vpushs(addr_t v)
712 CValue cval;
713 cval.i = v;
714 vsetc(&size_type, VT_CONST, &cval);
717 /* push arbitrary 64bit constant */
718 ST_FUNC void vpush64(int ty, unsigned long long v)
720 CValue cval;
721 CType ctype;
722 ctype.t = ty;
723 ctype.ref = NULL;
724 cval.i = v;
725 vsetc(&ctype, VT_CONST, &cval);
728 /* push long long constant */
729 static inline void vpushll(long long v)
731 vpush64(VT_LLONG, v);
734 ST_FUNC void vset(CType *type, int r, int v)
736 CValue cval;
738 cval.i = v;
739 vsetc(type, r, &cval);
742 static void vseti(int r, int v)
744 CType type;
745 type.t = VT_INT;
746 type.ref = NULL;
747 vset(&type, r, v);
750 ST_FUNC void vpushv(SValue *v)
752 if (vtop >= vstack + (VSTACK_SIZE - 1))
753 tcc_error("memory full (vstack)");
754 vtop++;
755 *vtop = *v;
758 static void vdup(void)
760 vpushv(vtop);
763 /* rotate n first stack elements to the bottom
764 I1 ... In -> I2 ... In I1 [top is right]
766 ST_FUNC void vrotb(int n)
768 int i;
769 SValue tmp;
771 tmp = vtop[-n + 1];
772 for(i=-n+1;i!=0;i++)
773 vtop[i] = vtop[i+1];
774 vtop[0] = tmp;
777 /* rotate the n elements before entry e towards the top
778 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
780 ST_FUNC void vrote(SValue *e, int n)
782 int i;
783 SValue tmp;
785 tmp = *e;
786 for(i = 0;i < n - 1; i++)
787 e[-i] = e[-i - 1];
788 e[-n + 1] = tmp;
791 /* rotate n first stack elements to the top
792 I1 ... In -> In I1 ... I(n-1) [top is right]
794 ST_FUNC void vrott(int n)
796 vrote(vtop, n);
799 /* push a symbol value of TYPE */
800 static inline void vpushsym(CType *type, Sym *sym)
802 CValue cval;
803 cval.i = 0;
804 vsetc(type, VT_CONST | VT_SYM, &cval);
805 vtop->sym = sym;
808 /* Return a static symbol pointing to a section */
809 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
811 int v;
812 Sym *sym;
814 v = anon_sym++;
815 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
816 sym->type.ref = type->ref;
817 sym->r = VT_CONST | VT_SYM;
818 put_extern_sym(sym, sec, offset, size);
819 return sym;
822 /* push a reference to a section offset by adding a dummy symbol */
823 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
825 vpushsym(type, get_sym_ref(type, sec, offset, size));
828 /* define a new external reference to a symbol 'v' of type 'u' */
829 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
831 Sym *s;
833 s = sym_find(v);
834 if (!s) {
835 /* push forward reference */
836 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
837 s->type.ref = type->ref;
838 s->r = r | VT_CONST | VT_SYM;
839 } else if (IS_ASM_SYM(s)) {
840 s->type.t = type->t | (s->type.t & VT_EXTERN);
841 s->type.ref = type->ref;
842 update_storage(s);
844 return s;
847 /* Merge some type attributes. */
848 static void patch_type(Sym *sym, CType *type)
850 if (!(type->t & VT_EXTERN)) {
851 if (!(sym->type.t & VT_EXTERN))
852 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
853 sym->type.t &= ~VT_EXTERN;
856 if (IS_ASM_SYM(sym)) {
857 /* stay static if both are static */
858 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
859 sym->type.ref = type->ref;
862 if (!is_compatible_types(&sym->type, type)) {
863 tcc_error("incompatible types for redefinition of '%s'",
864 get_tok_str(sym->v, NULL));
866 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
867 int static_proto = sym->type.t & VT_STATIC;
868 /* warn if static follows non-static function declaration */
869 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
870 tcc_warning("static storage ignored for redefinition of '%s'",
871 get_tok_str(sym->v, NULL));
873 if (0 == (type->t & VT_EXTERN)) {
874 /* put complete type, use static from prototype */
875 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
876 if (type->t & VT_INLINE)
877 sym->type.t = type->t;
878 sym->type.ref = type->ref;
881 } else {
882 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
883 /* set array size if it was omitted in extern declaration */
884 if (sym->type.ref->c < 0)
885 sym->type.ref->c = type->ref->c;
886 else if (sym->type.ref->c != type->ref->c)
887 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
889 if ((type->t ^ sym->type.t) & VT_STATIC)
890 tcc_warning("storage mismatch for redefinition of '%s'",
891 get_tok_str(sym->v, NULL));
896 /* Merge some storage attributes. */
897 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
899 if (type)
900 patch_type(sym, type);
902 #ifdef TCC_TARGET_PE
903 if (sym->a.dllimport != ad->a.dllimport)
904 tcc_error("incompatible dll linkage for redefinition of '%s'",
905 get_tok_str(sym->v, NULL));
906 sym->a.dllexport |= ad->a.dllexport;
907 #endif
908 sym->a.weak |= ad->a.weak;
909 if (ad->a.visibility) {
910 int vis = sym->a.visibility;
911 int vis2 = ad->a.visibility;
912 if (vis == STV_DEFAULT)
913 vis = vis2;
914 else if (vis2 != STV_DEFAULT)
915 vis = (vis < vis2) ? vis : vis2;
916 sym->a.visibility = vis;
918 if (ad->a.aligned)
919 sym->a.aligned = ad->a.aligned;
920 if (ad->asm_label)
921 sym->asm_label = ad->asm_label;
922 update_storage(sym);
925 /* define a new external reference to a symbol 'v' */
926 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
928 Sym *s;
929 s = sym_find(v);
930 if (!s) {
931 /* push forward reference */
932 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
933 s->type.t |= VT_EXTERN;
934 s->a = ad->a;
935 s->sym_scope = 0;
936 } else {
937 if (s->type.ref == func_old_type.ref) {
938 s->type.ref = type->ref;
939 s->r = r | VT_CONST | VT_SYM;
940 s->type.t |= VT_EXTERN;
942 patch_storage(s, ad, type);
944 return s;
947 /* push a reference to global symbol v */
948 ST_FUNC void vpush_global_sym(CType *type, int v)
950 vpushsym(type, external_global_sym(v, type, 0));
953 /* save registers up to (vtop - n) stack entry */
954 ST_FUNC void save_regs(int n)
956 SValue *p, *p1;
957 for(p = vstack, p1 = vtop - n; p <= p1; p++)
958 save_reg(p->r);
961 /* save r to the memory stack, and mark it as being free */
962 ST_FUNC void save_reg(int r)
964 save_reg_upstack(r, 0);
967 /* save r to the memory stack, and mark it as being free,
968 if seen up to (vtop - n) stack entry */
969 ST_FUNC void save_reg_upstack(int r, int n)
971 int l, saved, size, align;
972 SValue *p, *p1, sv;
973 CType *type;
975 if ((r &= VT_VALMASK) >= VT_CONST)
976 return;
977 if (nocode_wanted)
978 return;
980 /* modify all stack values */
981 saved = 0;
982 l = 0;
983 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
984 if ((p->r & VT_VALMASK) == r ||
985 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
986 /* must save value on stack if not already done */
987 if (!saved) {
988 /* NOTE: must reload 'r' because r might be equal to r2 */
989 r = p->r & VT_VALMASK;
990 /* store register in the stack */
991 type = &p->type;
992 if ((p->r & VT_LVAL) ||
993 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
994 #if PTR_SIZE == 8
995 type = &char_pointer_type;
996 #else
997 type = &int_type;
998 #endif
999 size = type_size(type, &align);
1000 loc = (loc - size) & -align;
1001 sv.type.t = type->t;
1002 sv.r = VT_LOCAL | VT_LVAL;
1003 sv.c.i = loc;
1004 store(r, &sv);
1005 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1006 /* x86 specific: need to pop fp register ST0 if saved */
1007 if (r == TREG_ST0) {
1008 o(0xd8dd); /* fstp %st(0) */
1010 #endif
1011 #if PTR_SIZE == 4
1012 /* special long long case */
1013 if ((type->t & VT_BTYPE) == VT_LLONG) {
1014 sv.c.i += 4;
1015 store(p->r2, &sv);
1017 #endif
1018 l = loc;
1019 saved = 1;
1021 /* mark that stack entry as being saved on the stack */
1022 if (p->r & VT_LVAL) {
1023 /* also clear the bounded flag because the
1024 relocation address of the function was stored in
1025 p->c.i */
1026 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1027 } else {
1028 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1030 p->r2 = VT_CONST;
1031 p->c.i = l;
1036 #ifdef TCC_TARGET_ARM
1037 /* find a register of class 'rc2' with at most one reference on stack.
1038 * If none, call get_reg(rc) */
1039 ST_FUNC int get_reg_ex(int rc, int rc2)
1041 int r;
1042 SValue *p;
1044 for(r=0;r<NB_REGS;r++) {
1045 if (reg_classes[r] & rc2) {
1046 int n;
1047 n=0;
1048 for(p = vstack; p <= vtop; p++) {
1049 if ((p->r & VT_VALMASK) == r ||
1050 (p->r2 & VT_VALMASK) == r)
1051 n++;
1053 if (n <= 1)
1054 return r;
1057 return get_reg(rc);
1059 #endif
1061 /* find a free register of class 'rc'. If none, save one register */
1062 ST_FUNC int get_reg(int rc)
1064 int r;
1065 SValue *p;
1067 /* find a free register */
1068 for(r=0;r<NB_REGS;r++) {
1069 if (reg_classes[r] & rc) {
1070 if (nocode_wanted)
1071 return r;
1072 for(p=vstack;p<=vtop;p++) {
1073 if ((p->r & VT_VALMASK) == r ||
1074 (p->r2 & VT_VALMASK) == r)
1075 goto notfound;
1077 return r;
1079 notfound: ;
1082 /* no register left : free the first one on the stack (VERY
1083 IMPORTANT to start from the bottom to ensure that we don't
1084 spill registers used in gen_opi()) */
1085 for(p=vstack;p<=vtop;p++) {
1086 /* look at second register (if long long) */
1087 r = p->r2 & VT_VALMASK;
1088 if (r < VT_CONST && (reg_classes[r] & rc))
1089 goto save_found;
1090 r = p->r & VT_VALMASK;
1091 if (r < VT_CONST && (reg_classes[r] & rc)) {
1092 save_found:
1093 save_reg(r);
1094 return r;
1097 /* Should never comes here */
1098 return -1;
1101 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1102 if needed */
1103 static void move_reg(int r, int s, int t)
1105 SValue sv;
1107 if (r != s) {
1108 save_reg(r);
1109 sv.type.t = t;
1110 sv.type.ref = NULL;
1111 sv.r = s;
1112 sv.c.i = 0;
1113 load(r, &sv);
1117 /* get address of vtop (vtop MUST BE an lvalue) */
1118 ST_FUNC void gaddrof(void)
1120 vtop->r &= ~VT_LVAL;
1121 /* tricky: if saved lvalue, then we can go back to lvalue */
1122 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1123 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1128 #ifdef CONFIG_TCC_BCHECK
1129 /* generate lvalue bound code */
1130 static void gbound(void)
1132 int lval_type;
1133 CType type1;
1135 vtop->r &= ~VT_MUSTBOUND;
1136 /* if lvalue, then use checking code before dereferencing */
1137 if (vtop->r & VT_LVAL) {
1138 /* if not VT_BOUNDED value, then make one */
1139 if (!(vtop->r & VT_BOUNDED)) {
1140 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1141 /* must save type because we must set it to int to get pointer */
1142 type1 = vtop->type;
1143 vtop->type.t = VT_PTR;
1144 gaddrof();
1145 vpushi(0);
1146 gen_bounded_ptr_add();
1147 vtop->r |= lval_type;
1148 vtop->type = type1;
1150 /* then check for dereferencing */
1151 gen_bounded_ptr_deref();
1154 #endif
1156 static void incr_bf_adr(int o)
1158 vtop->type = char_pointer_type;
1159 gaddrof();
1160 vpushi(o);
1161 gen_op('+');
1162 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1163 | (VT_BYTE|VT_UNSIGNED);
1164 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1165 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1168 /* single-byte load mode for packed or otherwise unaligned bitfields */
1169 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1171 int n, o, bits;
1172 save_reg_upstack(vtop->r, 1);
1173 vpush64(type->t & VT_BTYPE, 0); // B X
1174 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1175 do {
1176 vswap(); // X B
1177 incr_bf_adr(o);
1178 vdup(); // X B B
1179 n = 8 - bit_pos;
1180 if (n > bit_size)
1181 n = bit_size;
1182 if (bit_pos)
1183 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1184 if (n < 8)
1185 vpushi((1 << n) - 1), gen_op('&');
1186 gen_cast(type);
1187 if (bits)
1188 vpushi(bits), gen_op(TOK_SHL);
1189 vrotb(3); // B Y X
1190 gen_op('|'); // B X
1191 bits += n, bit_size -= n, o = 1;
1192 } while (bit_size);
1193 vswap(), vpop();
1194 if (!(type->t & VT_UNSIGNED)) {
1195 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1196 vpushi(n), gen_op(TOK_SHL);
1197 vpushi(n), gen_op(TOK_SAR);
1201 /* single-byte store mode for packed or otherwise unaligned bitfields */
1202 static void store_packed_bf(int bit_pos, int bit_size)
1204 int bits, n, o, m, c;
1206 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1207 vswap(); // X B
1208 save_reg_upstack(vtop->r, 1);
1209 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1210 do {
1211 incr_bf_adr(o); // X B
1212 vswap(); //B X
1213 c ? vdup() : gv_dup(); // B V X
1214 vrott(3); // X B V
1215 if (bits)
1216 vpushi(bits), gen_op(TOK_SHR);
1217 if (bit_pos)
1218 vpushi(bit_pos), gen_op(TOK_SHL);
1219 n = 8 - bit_pos;
1220 if (n > bit_size)
1221 n = bit_size;
1222 if (n < 8) {
1223 m = ((1 << n) - 1) << bit_pos;
1224 vpushi(m), gen_op('&'); // X B V1
1225 vpushv(vtop-1); // X B V1 B
1226 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1227 gen_op('&'); // X B V1 B1
1228 gen_op('|'); // X B V2
1230 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1231 vstore(), vpop(); // X B
1232 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1233 } while (bit_size);
1234 vpop(), vpop();
1237 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1239 int t;
1240 if (0 == sv->type.ref)
1241 return 0;
1242 t = sv->type.ref->auxtype;
1243 if (t != -1 && t != VT_STRUCT) {
1244 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1245 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1247 return t;
1250 /* store vtop a register belonging to class 'rc'. lvalues are
1251 converted to values. Cannot be used if cannot be converted to
1252 register value (such as structures). */
1253 ST_FUNC int gv(int rc)
1255 int r, bit_pos, bit_size, size, align, rc2;
1257 /* NOTE: get_reg can modify vstack[] */
1258 if (vtop->type.t & VT_BITFIELD) {
1259 CType type;
1261 bit_pos = BIT_POS(vtop->type.t);
1262 bit_size = BIT_SIZE(vtop->type.t);
1263 /* remove bit field info to avoid loops */
1264 vtop->type.t &= ~VT_STRUCT_MASK;
1266 type.ref = NULL;
1267 type.t = vtop->type.t & VT_UNSIGNED;
1268 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1269 type.t |= VT_UNSIGNED;
1271 r = adjust_bf(vtop, bit_pos, bit_size);
1273 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1274 type.t |= VT_LLONG;
1275 else
1276 type.t |= VT_INT;
1278 if (r == VT_STRUCT) {
1279 load_packed_bf(&type, bit_pos, bit_size);
1280 } else {
1281 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1282 /* cast to int to propagate signedness in following ops */
1283 gen_cast(&type);
1284 /* generate shifts */
1285 vpushi(bits - (bit_pos + bit_size));
1286 gen_op(TOK_SHL);
1287 vpushi(bits - bit_size);
1288 /* NOTE: transformed to SHR if unsigned */
1289 gen_op(TOK_SAR);
1291 r = gv(rc);
1292 } else {
1293 if (is_float(vtop->type.t) &&
1294 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1295 unsigned long offset;
1296 /* CPUs usually cannot use float constants, so we store them
1297 generically in data segment */
1298 size = type_size(&vtop->type, &align);
1299 if (NODATA_WANTED)
1300 size = 0, align = 1;
1301 offset = section_add(data_section, size, align);
1302 vpush_ref(&vtop->type, data_section, offset, size);
1303 vswap();
1304 init_putv(&vtop->type, data_section, offset);
1305 vtop->r |= VT_LVAL;
1307 #ifdef CONFIG_TCC_BCHECK
1308 if (vtop->r & VT_MUSTBOUND)
1309 gbound();
1310 #endif
1312 r = vtop->r & VT_VALMASK;
1313 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1314 #ifndef TCC_TARGET_ARM64
1315 if (rc == RC_IRET)
1316 rc2 = RC_LRET;
1317 #ifdef TCC_TARGET_X86_64
1318 else if (rc == RC_FRET)
1319 rc2 = RC_QRET;
1320 #endif
1321 #endif
1322 /* need to reload if:
1323 - constant
1324 - lvalue (need to dereference pointer)
1325 - already a register, but not in the right class */
1326 if (r >= VT_CONST
1327 || (vtop->r & VT_LVAL)
1328 || !(reg_classes[r] & rc)
1329 #if PTR_SIZE == 8
1330 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1331 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1332 #else
1333 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1334 #endif
1337 r = get_reg(rc);
1338 #if PTR_SIZE == 8
1339 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1340 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1341 #else
1342 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1343 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1344 unsigned long long ll;
1345 #endif
1346 int r2, original_type;
1347 original_type = vtop->type.t;
1348 /* two register type load : expand to two words
1349 temporarily */
1350 #if PTR_SIZE == 4
1351 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1352 /* load constant */
1353 ll = vtop->c.i;
1354 vtop->c.i = ll; /* first word */
1355 load(r, vtop);
1356 vtop->r = r; /* save register value */
1357 vpushi(ll >> 32); /* second word */
1358 } else
1359 #endif
1360 if (vtop->r & VT_LVAL) {
1361 /* We do not want to modifier the long long
1362 pointer here, so the safest (and less
1363 efficient) is to save all the other registers
1364 in the stack. XXX: totally inefficient. */
1365 #if 0
1366 save_regs(1);
1367 #else
1368 /* lvalue_save: save only if used further down the stack */
1369 save_reg_upstack(vtop->r, 1);
1370 #endif
1371 /* load from memory */
1372 vtop->type.t = load_type;
1373 load(r, vtop);
1374 vdup();
1375 vtop[-1].r = r; /* save register value */
1376 /* increment pointer to get second word */
1377 vtop->type.t = addr_type;
1378 gaddrof();
1379 vpushi(load_size);
1380 gen_op('+');
1381 vtop->r |= VT_LVAL;
1382 vtop->type.t = load_type;
1383 } else {
1384 /* move registers */
1385 load(r, vtop);
1386 vdup();
1387 vtop[-1].r = r; /* save register value */
1388 vtop->r = vtop[-1].r2;
1390 /* Allocate second register. Here we rely on the fact that
1391 get_reg() tries first to free r2 of an SValue. */
1392 r2 = get_reg(rc2);
1393 load(r2, vtop);
1394 vpop();
1395 /* write second register */
1396 vtop->r2 = r2;
1397 vtop->type.t = original_type;
1398 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1399 int t1, t;
1400 /* lvalue of scalar type : need to use lvalue type
1401 because of possible cast */
1402 t = vtop->type.t;
1403 t1 = t;
1404 /* compute memory access type */
1405 if (vtop->r & VT_LVAL_BYTE)
1406 t = VT_BYTE;
1407 else if (vtop->r & VT_LVAL_SHORT)
1408 t = VT_SHORT;
1409 if (vtop->r & VT_LVAL_UNSIGNED)
1410 t |= VT_UNSIGNED;
1411 vtop->type.t = t;
1412 load(r, vtop);
1413 /* restore wanted type */
1414 vtop->type.t = t1;
1415 } else {
1416 /* one register type load */
1417 load(r, vtop);
1420 vtop->r = r;
1421 #ifdef TCC_TARGET_C67
1422 /* uses register pairs for doubles */
1423 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1424 vtop->r2 = r+1;
1425 #endif
1427 return r;
1430 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1431 ST_FUNC void gv2(int rc1, int rc2)
1433 int v;
1435 /* generate more generic register first. But VT_JMP or VT_CMP
1436 values must be generated first in all cases to avoid possible
1437 reload errors */
1438 v = vtop[0].r & VT_VALMASK;
1439 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1440 vswap();
1441 gv(rc1);
1442 vswap();
1443 gv(rc2);
1444 /* test if reload is needed for first register */
1445 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1446 vswap();
1447 gv(rc1);
1448 vswap();
1450 } else {
1451 gv(rc2);
1452 vswap();
1453 gv(rc1);
1454 vswap();
1455 /* test if reload is needed for first register */
1456 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1457 gv(rc2);
1462 #ifndef TCC_TARGET_ARM64
1463 /* wrapper around RC_FRET to return a register by type */
1464 static int rc_fret(int t)
1466 #ifdef TCC_TARGET_X86_64
1467 if (t == VT_LDOUBLE) {
1468 return RC_ST0;
1470 #endif
1471 return RC_FRET;
1473 #endif
1475 /* wrapper around REG_FRET to return a register by type */
1476 static int reg_fret(int t)
1478 #ifdef TCC_TARGET_X86_64
1479 if (t == VT_LDOUBLE) {
1480 return TREG_ST0;
1482 #endif
1483 return REG_FRET;
1486 #if PTR_SIZE == 4
1487 /* expand 64bit on stack in two ints */
1488 static void lexpand(void)
1490 int u, v;
1491 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1492 v = vtop->r & (VT_VALMASK | VT_LVAL);
1493 if (v == VT_CONST) {
1494 vdup();
1495 vtop[0].c.i >>= 32;
1496 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1497 vdup();
1498 vtop[0].c.i += 4;
1499 } else {
1500 gv(RC_INT);
1501 vdup();
1502 vtop[0].r = vtop[-1].r2;
1503 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1505 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1507 #endif
1509 #ifdef TCC_TARGET_ARM
1510 /* expand long long on stack */
1511 ST_FUNC void lexpand_nr(void)
1513 int u,v;
1515 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1516 vdup();
1517 vtop->r2 = VT_CONST;
1518 vtop->type.t = VT_INT | u;
1519 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1520 if (v == VT_CONST) {
1521 vtop[-1].c.i = vtop->c.i;
1522 vtop->c.i = vtop->c.i >> 32;
1523 vtop->r = VT_CONST;
1524 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1525 vtop->c.i += 4;
1526 vtop->r = vtop[-1].r;
1527 } else if (v > VT_CONST) {
1528 vtop--;
1529 lexpand();
1530 } else
1531 vtop->r = vtop[-1].r2;
1532 vtop[-1].r2 = VT_CONST;
1533 vtop[-1].type.t = VT_INT | u;
1535 #endif
1537 #if PTR_SIZE == 4
1538 /* build a long long from two ints */
1539 static void lbuild(int t)
1541 gv2(RC_INT, RC_INT);
1542 vtop[-1].r2 = vtop[0].r;
1543 vtop[-1].type.t = t;
1544 vpop();
1546 #endif
1548 /* convert stack entry to register and duplicate its value in another
1549 register */
1550 static void gv_dup(void)
1552 int rc, t, r, r1;
1553 SValue sv;
1555 t = vtop->type.t;
1556 #if PTR_SIZE == 4
1557 if ((t & VT_BTYPE) == VT_LLONG) {
1558 if (t & VT_BITFIELD) {
1559 gv(RC_INT);
1560 t = vtop->type.t;
1562 lexpand();
1563 gv_dup();
1564 vswap();
1565 vrotb(3);
1566 gv_dup();
1567 vrotb(4);
1568 /* stack: H L L1 H1 */
1569 lbuild(t);
1570 vrotb(3);
1571 vrotb(3);
1572 vswap();
1573 lbuild(t);
1574 vswap();
1575 } else
1576 #endif
1578 /* duplicate value */
1579 rc = RC_INT;
1580 sv.type.t = VT_INT;
1581 if (is_float(t)) {
1582 rc = RC_FLOAT;
1583 #ifdef TCC_TARGET_X86_64
1584 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1585 rc = RC_ST0;
1587 #endif
1588 sv.type.t = t;
1590 r = gv(rc);
1591 r1 = get_reg(rc);
1592 sv.r = r;
1593 sv.c.i = 0;
1594 load(r1, &sv); /* move r to r1 */
1595 vdup();
1596 /* duplicates value */
1597 if (r != r1)
1598 vtop->r = r1;
1602 /* Generate value test
1604 * Generate a test for any value (jump, comparison and integers) */
1605 ST_FUNC int gvtst(int inv, int t)
1607 int v = vtop->r & VT_VALMASK;
1608 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1609 vpushi(0);
1610 gen_op(TOK_NE);
1612 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1613 /* constant jmp optimization */
1614 if ((vtop->c.i != 0) != inv)
1615 t = gjmp(t);
1616 vtop--;
1617 return t;
1619 return gtst(inv, t);
1622 #if PTR_SIZE == 4
1623 /* generate CPU independent (unsigned) long long operations */
1624 static void gen_opl(int op)
1626 int t, a, b, op1, c, i;
1627 int func;
1628 unsigned short reg_iret = REG_IRET;
1629 unsigned short reg_lret = REG_LRET;
1630 SValue tmp;
1632 switch(op) {
1633 case '/':
1634 case TOK_PDIV:
1635 func = TOK___divdi3;
1636 goto gen_func;
1637 case TOK_UDIV:
1638 func = TOK___udivdi3;
1639 goto gen_func;
1640 case '%':
1641 func = TOK___moddi3;
1642 goto gen_mod_func;
1643 case TOK_UMOD:
1644 func = TOK___umoddi3;
1645 gen_mod_func:
1646 #ifdef TCC_ARM_EABI
1647 reg_iret = TREG_R2;
1648 reg_lret = TREG_R3;
1649 #endif
1650 gen_func:
1651 /* call generic long long function */
1652 vpush_global_sym(&func_old_type, func);
1653 vrott(3);
1654 gfunc_call(2);
1655 vpushi(0);
1656 vtop->r = reg_iret;
1657 vtop->r2 = reg_lret;
1658 break;
1659 case '^':
1660 case '&':
1661 case '|':
1662 case '*':
1663 case '+':
1664 case '-':
1665 //pv("gen_opl A",0,2);
1666 t = vtop->type.t;
1667 vswap();
1668 lexpand();
1669 vrotb(3);
1670 lexpand();
1671 /* stack: L1 H1 L2 H2 */
1672 tmp = vtop[0];
1673 vtop[0] = vtop[-3];
1674 vtop[-3] = tmp;
1675 tmp = vtop[-2];
1676 vtop[-2] = vtop[-3];
1677 vtop[-3] = tmp;
1678 vswap();
1679 /* stack: H1 H2 L1 L2 */
1680 //pv("gen_opl B",0,4);
1681 if (op == '*') {
1682 vpushv(vtop - 1);
1683 vpushv(vtop - 1);
1684 gen_op(TOK_UMULL);
1685 lexpand();
1686 /* stack: H1 H2 L1 L2 ML MH */
1687 for(i=0;i<4;i++)
1688 vrotb(6);
1689 /* stack: ML MH H1 H2 L1 L2 */
1690 tmp = vtop[0];
1691 vtop[0] = vtop[-2];
1692 vtop[-2] = tmp;
1693 /* stack: ML MH H1 L2 H2 L1 */
1694 gen_op('*');
1695 vrotb(3);
1696 vrotb(3);
1697 gen_op('*');
1698 /* stack: ML MH M1 M2 */
1699 gen_op('+');
1700 gen_op('+');
1701 } else if (op == '+' || op == '-') {
1702 /* XXX: add non carry method too (for MIPS or alpha) */
1703 if (op == '+')
1704 op1 = TOK_ADDC1;
1705 else
1706 op1 = TOK_SUBC1;
1707 gen_op(op1);
1708 /* stack: H1 H2 (L1 op L2) */
1709 vrotb(3);
1710 vrotb(3);
1711 gen_op(op1 + 1); /* TOK_xxxC2 */
1712 } else {
1713 gen_op(op);
1714 /* stack: H1 H2 (L1 op L2) */
1715 vrotb(3);
1716 vrotb(3);
1717 /* stack: (L1 op L2) H1 H2 */
1718 gen_op(op);
1719 /* stack: (L1 op L2) (H1 op H2) */
1721 /* stack: L H */
1722 lbuild(t);
1723 break;
1724 case TOK_SAR:
1725 case TOK_SHR:
1726 case TOK_SHL:
1727 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1728 t = vtop[-1].type.t;
1729 vswap();
1730 lexpand();
1731 vrotb(3);
1732 /* stack: L H shift */
1733 c = (int)vtop->c.i;
1734 /* constant: simpler */
1735 /* NOTE: all comments are for SHL. the other cases are
1736 done by swapping words */
1737 vpop();
1738 if (op != TOK_SHL)
1739 vswap();
1740 if (c >= 32) {
1741 /* stack: L H */
1742 vpop();
1743 if (c > 32) {
1744 vpushi(c - 32);
1745 gen_op(op);
1747 if (op != TOK_SAR) {
1748 vpushi(0);
1749 } else {
1750 gv_dup();
1751 vpushi(31);
1752 gen_op(TOK_SAR);
1754 vswap();
1755 } else {
1756 vswap();
1757 gv_dup();
1758 /* stack: H L L */
1759 vpushi(c);
1760 gen_op(op);
1761 vswap();
1762 vpushi(32 - c);
1763 if (op == TOK_SHL)
1764 gen_op(TOK_SHR);
1765 else
1766 gen_op(TOK_SHL);
1767 vrotb(3);
1768 /* stack: L L H */
1769 vpushi(c);
1770 if (op == TOK_SHL)
1771 gen_op(TOK_SHL);
1772 else
1773 gen_op(TOK_SHR);
1774 gen_op('|');
1776 if (op != TOK_SHL)
1777 vswap();
1778 lbuild(t);
1779 } else {
1780 /* XXX: should provide a faster fallback on x86 ? */
1781 switch(op) {
1782 case TOK_SAR:
1783 func = TOK___ashrdi3;
1784 goto gen_func;
1785 case TOK_SHR:
1786 func = TOK___lshrdi3;
1787 goto gen_func;
1788 case TOK_SHL:
1789 func = TOK___ashldi3;
1790 goto gen_func;
1793 break;
1794 default:
1795 /* compare operations */
1796 t = vtop->type.t;
1797 vswap();
1798 lexpand();
1799 vrotb(3);
1800 lexpand();
1801 /* stack: L1 H1 L2 H2 */
1802 tmp = vtop[-1];
1803 vtop[-1] = vtop[-2];
1804 vtop[-2] = tmp;
1805 /* stack: L1 L2 H1 H2 */
1806 /* compare high */
1807 op1 = op;
1808 /* when values are equal, we need to compare low words. since
1809 the jump is inverted, we invert the test too. */
1810 if (op1 == TOK_LT)
1811 op1 = TOK_LE;
1812 else if (op1 == TOK_GT)
1813 op1 = TOK_GE;
1814 else if (op1 == TOK_ULT)
1815 op1 = TOK_ULE;
1816 else if (op1 == TOK_UGT)
1817 op1 = TOK_UGE;
1818 a = 0;
1819 b = 0;
1820 gen_op(op1);
1821 if (op == TOK_NE) {
1822 b = gvtst(0, 0);
1823 } else {
1824 a = gvtst(1, 0);
1825 if (op != TOK_EQ) {
1826 /* generate non equal test */
1827 vpushi(TOK_NE);
1828 vtop->r = VT_CMP;
1829 b = gvtst(0, 0);
1832 /* compare low. Always unsigned */
1833 op1 = op;
1834 if (op1 == TOK_LT)
1835 op1 = TOK_ULT;
1836 else if (op1 == TOK_LE)
1837 op1 = TOK_ULE;
1838 else if (op1 == TOK_GT)
1839 op1 = TOK_UGT;
1840 else if (op1 == TOK_GE)
1841 op1 = TOK_UGE;
1842 gen_op(op1);
1843 a = gvtst(1, a);
1844 gsym(b);
1845 vseti(VT_JMPI, a);
1846 break;
1849 #endif
1851 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1853 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1854 return (a ^ b) >> 63 ? -x : x;
1857 static int gen_opic_lt(uint64_t a, uint64_t b)
1859 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1862 /* handle integer constant optimizations and various machine
1863 independent opt */
1864 static void gen_opic(int op)
1866 SValue *v1 = vtop - 1;
1867 SValue *v2 = vtop;
1868 int t1 = v1->type.t & VT_BTYPE;
1869 int t2 = v2->type.t & VT_BTYPE;
1870 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1871 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1872 uint64_t l1 = c1 ? v1->c.i : 0;
1873 uint64_t l2 = c2 ? v2->c.i : 0;
1874 int shm = (t1 == VT_LLONG) ? 63 : 31;
1876 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1877 l1 = ((uint32_t)l1 |
1878 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1879 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1880 l2 = ((uint32_t)l2 |
1881 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1883 if (c1 && c2) {
1884 switch(op) {
1885 case '+': l1 += l2; break;
1886 case '-': l1 -= l2; break;
1887 case '&': l1 &= l2; break;
1888 case '^': l1 ^= l2; break;
1889 case '|': l1 |= l2; break;
1890 case '*': l1 *= l2; break;
1892 case TOK_PDIV:
1893 case '/':
1894 case '%':
1895 case TOK_UDIV:
1896 case TOK_UMOD:
1897 /* if division by zero, generate explicit division */
1898 if (l2 == 0) {
1899 if (const_wanted)
1900 tcc_error("division by zero in constant");
1901 goto general_case;
1903 switch(op) {
1904 default: l1 = gen_opic_sdiv(l1, l2); break;
1905 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1906 case TOK_UDIV: l1 = l1 / l2; break;
1907 case TOK_UMOD: l1 = l1 % l2; break;
1909 break;
1910 case TOK_SHL: l1 <<= (l2 & shm); break;
1911 case TOK_SHR: l1 >>= (l2 & shm); break;
1912 case TOK_SAR:
1913 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1914 break;
1915 /* tests */
1916 case TOK_ULT: l1 = l1 < l2; break;
1917 case TOK_UGE: l1 = l1 >= l2; break;
1918 case TOK_EQ: l1 = l1 == l2; break;
1919 case TOK_NE: l1 = l1 != l2; break;
1920 case TOK_ULE: l1 = l1 <= l2; break;
1921 case TOK_UGT: l1 = l1 > l2; break;
1922 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1923 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1924 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1925 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1926 /* logical */
1927 case TOK_LAND: l1 = l1 && l2; break;
1928 case TOK_LOR: l1 = l1 || l2; break;
1929 default:
1930 goto general_case;
1932 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1933 l1 = ((uint32_t)l1 |
1934 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1935 v1->c.i = l1;
1936 vtop--;
1937 } else {
1938 /* if commutative ops, put c2 as constant */
1939 if (c1 && (op == '+' || op == '&' || op == '^' ||
1940 op == '|' || op == '*')) {
1941 vswap();
1942 c2 = c1; //c = c1, c1 = c2, c2 = c;
1943 l2 = l1; //l = l1, l1 = l2, l2 = l;
1945 if (!const_wanted &&
1946 c1 && ((l1 == 0 &&
1947 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1948 (l1 == -1 && op == TOK_SAR))) {
1949 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1950 vtop--;
1951 } else if (!const_wanted &&
1952 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1953 (op == '|' &&
1954 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1955 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1956 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1957 if (l2 == 1)
1958 vtop->c.i = 0;
1959 vswap();
1960 vtop--;
1961 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1962 op == TOK_PDIV) &&
1963 l2 == 1) ||
1964 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1965 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1966 l2 == 0) ||
1967 (op == '&' &&
1968 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1969 /* filter out NOP operations like x*1, x-0, x&-1... */
1970 vtop--;
1971 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1972 /* try to use shifts instead of muls or divs */
1973 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1974 int n = -1;
1975 while (l2) {
1976 l2 >>= 1;
1977 n++;
1979 vtop->c.i = n;
1980 if (op == '*')
1981 op = TOK_SHL;
1982 else if (op == TOK_PDIV)
1983 op = TOK_SAR;
1984 else
1985 op = TOK_SHR;
1987 goto general_case;
1988 } else if (c2 && (op == '+' || op == '-') &&
1989 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1990 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1991 /* symbol + constant case */
1992 if (op == '-')
1993 l2 = -l2;
1994 l2 += vtop[-1].c.i;
1995 /* The backends can't always deal with addends to symbols
1996 larger than +-1<<31. Don't construct such. */
1997 if ((int)l2 != l2)
1998 goto general_case;
1999 vtop--;
2000 vtop->c.i = l2;
2001 } else {
2002 general_case:
2003 /* call low level op generator */
2004 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2005 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2006 gen_opl(op);
2007 else
2008 gen_opi(op);
2013 /* generate a floating point operation with constant propagation */
2014 static void gen_opif(int op)
2016 int c1, c2;
2017 SValue *v1, *v2;
2018 #if defined _MSC_VER && defined _AMD64_
2019 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2020 volatile
2021 #endif
2022 long double f1, f2;
2024 v1 = vtop - 1;
2025 v2 = vtop;
2026 /* currently, we cannot do computations with forward symbols */
2027 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2028 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2029 if (c1 && c2) {
2030 if (v1->type.t == VT_FLOAT) {
2031 f1 = v1->c.f;
2032 f2 = v2->c.f;
2033 } else if (v1->type.t == VT_DOUBLE) {
2034 f1 = v1->c.d;
2035 f2 = v2->c.d;
2036 } else {
2037 f1 = v1->c.ld;
2038 f2 = v2->c.ld;
2041 /* NOTE: we only do constant propagation if finite number (not
2042 NaN or infinity) (ANSI spec) */
2043 if (!ieee_finite(f1) || !ieee_finite(f2))
2044 goto general_case;
2046 switch(op) {
2047 case '+': f1 += f2; break;
2048 case '-': f1 -= f2; break;
2049 case '*': f1 *= f2; break;
2050 case '/':
2051 if (f2 == 0.0) {
2052 /* If not in initializer we need to potentially generate
2053 FP exceptions at runtime, otherwise we want to fold. */
2054 if (!const_wanted)
2055 goto general_case;
2057 f1 /= f2;
2058 break;
2059 /* XXX: also handles tests ? */
2060 default:
2061 goto general_case;
2063 /* XXX: overflow test ? */
2064 if (v1->type.t == VT_FLOAT) {
2065 v1->c.f = f1;
2066 } else if (v1->type.t == VT_DOUBLE) {
2067 v1->c.d = f1;
2068 } else {
2069 v1->c.ld = f1;
2071 vtop--;
2072 } else {
2073 general_case:
2074 gen_opf(op);
2078 static int pointed_size(CType *type)
2080 int align;
2081 return type_size(pointed_type(type), &align);
2084 static void vla_runtime_pointed_size(CType *type)
2086 int align;
2087 vla_runtime_type_size(pointed_type(type), &align);
2090 static inline int is_null_pointer(SValue *p)
2092 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2093 return 0;
2094 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2095 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2096 ((p->type.t & VT_BTYPE) == VT_PTR &&
2097 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2100 static inline int is_integer_btype(int bt)
2102 return (bt == VT_BYTE || bt == VT_SHORT ||
2103 bt == VT_INT || bt == VT_LLONG);
2106 /* check types for comparison or subtraction of pointers */
2107 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2109 CType *type1, *type2, tmp_type1, tmp_type2;
2110 int bt1, bt2;
2112 /* null pointers are accepted for all comparisons as gcc */
2113 if (is_null_pointer(p1) || is_null_pointer(p2))
2114 return;
2115 type1 = &p1->type;
2116 type2 = &p2->type;
2117 bt1 = type1->t & VT_BTYPE;
2118 bt2 = type2->t & VT_BTYPE;
2119 /* accept comparison between pointer and integer with a warning */
2120 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2121 if (op != TOK_LOR && op != TOK_LAND )
2122 tcc_warning("comparison between pointer and integer");
2123 return;
2126 /* both must be pointers or implicit function pointers */
2127 if (bt1 == VT_PTR) {
2128 type1 = pointed_type(type1);
2129 } else if (bt1 != VT_FUNC)
2130 goto invalid_operands;
2132 if (bt2 == VT_PTR) {
2133 type2 = pointed_type(type2);
2134 } else if (bt2 != VT_FUNC) {
2135 invalid_operands:
2136 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2138 if ((type1->t & VT_BTYPE) == VT_VOID ||
2139 (type2->t & VT_BTYPE) == VT_VOID)
2140 return;
2141 tmp_type1 = *type1;
2142 tmp_type2 = *type2;
2143 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2144 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2145 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2146 /* gcc-like error if '-' is used */
2147 if (op == '-')
2148 goto invalid_operands;
2149 else
2150 tcc_warning("comparison of distinct pointer types lacks a cast");
2154 /* generic gen_op: handles types problems */
2155 ST_FUNC void gen_op(int op)
2157 int u, t1, t2, bt1, bt2, t;
2158 CType type1;
2160 redo:
2161 t1 = vtop[-1].type.t;
2162 t2 = vtop[0].type.t;
2163 bt1 = t1 & VT_BTYPE;
2164 bt2 = t2 & VT_BTYPE;
2166 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2167 tcc_error("operation on a struct");
2168 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2169 if (bt2 == VT_FUNC) {
2170 mk_pointer(&vtop->type);
2171 gaddrof();
2173 if (bt1 == VT_FUNC) {
2174 vswap();
2175 mk_pointer(&vtop->type);
2176 gaddrof();
2177 vswap();
2179 goto redo;
2180 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2181 /* at least one operand is a pointer */
2182 /* relational op: must be both pointers */
2183 if (op >= TOK_ULT && op <= TOK_LOR) {
2184 check_comparison_pointer_types(vtop - 1, vtop, op);
2185 /* pointers are handled are unsigned */
2186 #if PTR_SIZE == 8
2187 t = VT_LLONG | VT_UNSIGNED;
2188 #else
2189 t = VT_INT | VT_UNSIGNED;
2190 #endif
2191 goto std_op;
2193 /* if both pointers, then it must be the '-' op */
2194 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2195 if (op != '-')
2196 tcc_error("cannot use pointers here");
2197 check_comparison_pointer_types(vtop - 1, vtop, op);
2198 /* XXX: check that types are compatible */
2199 if (vtop[-1].type.t & VT_VLA) {
2200 vla_runtime_pointed_size(&vtop[-1].type);
2201 } else {
2202 vpushi(pointed_size(&vtop[-1].type));
2204 vrott(3);
2205 gen_opic(op);
2206 vtop->type.t = ptrdiff_type.t;
2207 vswap();
2208 gen_op(TOK_PDIV);
2209 } else {
2210 /* exactly one pointer : must be '+' or '-'. */
2211 if (op != '-' && op != '+')
2212 tcc_error("cannot use pointers here");
2213 /* Put pointer as first operand */
2214 if (bt2 == VT_PTR) {
2215 vswap();
2216 t = t1, t1 = t2, t2 = t;
2218 #if PTR_SIZE == 4
2219 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2220 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2221 gen_cast_s(VT_INT);
2222 #endif
2223 type1 = vtop[-1].type;
2224 type1.t &= ~VT_ARRAY;
2225 if (vtop[-1].type.t & VT_VLA)
2226 vla_runtime_pointed_size(&vtop[-1].type);
2227 else {
2228 u = pointed_size(&vtop[-1].type);
2229 if (u < 0)
2230 tcc_error("unknown array element size");
2231 #if PTR_SIZE == 8
2232 vpushll(u);
2233 #else
2234 /* XXX: cast to int ? (long long case) */
2235 vpushi(u);
2236 #endif
2238 gen_op('*');
2239 #if 0
2240 /* #ifdef CONFIG_TCC_BCHECK
2241 The main reason to removing this code:
2242 #include <stdio.h>
2243 int main ()
2245 int v[10];
2246 int i = 10;
2247 int j = 9;
2248 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2249 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2251 When this code is on. then the output looks like
2252 v+i-j = 0xfffffffe
2253 v+(i-j) = 0xbff84000
2255 /* if evaluating constant expression, no code should be
2256 generated, so no bound check */
2257 if (tcc_state->do_bounds_check && !const_wanted) {
2258 /* if bounded pointers, we generate a special code to
2259 test bounds */
2260 if (op == '-') {
2261 vpushi(0);
2262 vswap();
2263 gen_op('-');
2265 gen_bounded_ptr_add();
2266 } else
2267 #endif
2269 gen_opic(op);
2271 /* put again type if gen_opic() swaped operands */
2272 vtop->type = type1;
2274 } else if (is_float(bt1) || is_float(bt2)) {
2275 /* compute bigger type and do implicit casts */
2276 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2277 t = VT_LDOUBLE;
2278 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2279 t = VT_DOUBLE;
2280 } else {
2281 t = VT_FLOAT;
2283 /* floats can only be used for a few operations */
2284 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2285 (op < TOK_ULT || op > TOK_GT))
2286 tcc_error("invalid operands for binary operation");
2287 goto std_op;
2288 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2289 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2290 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2291 t |= VT_UNSIGNED;
2292 t |= (VT_LONG & t1);
2293 goto std_op;
2294 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2295 /* cast to biggest op */
2296 t = VT_LLONG | VT_LONG;
2297 if (bt1 == VT_LLONG)
2298 t &= t1;
2299 if (bt2 == VT_LLONG)
2300 t &= t2;
2301 /* convert to unsigned if it does not fit in a long long */
2302 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2303 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2304 t |= VT_UNSIGNED;
2305 goto std_op;
2306 } else {
2307 /* integer operations */
2308 t = VT_INT | (VT_LONG & (t1 | t2));
2309 /* convert to unsigned if it does not fit in an integer */
2310 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2311 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2312 t |= VT_UNSIGNED;
2313 std_op:
2314 /* XXX: currently, some unsigned operations are explicit, so
2315 we modify them here */
2316 if (t & VT_UNSIGNED) {
2317 if (op == TOK_SAR)
2318 op = TOK_SHR;
2319 else if (op == '/')
2320 op = TOK_UDIV;
2321 else if (op == '%')
2322 op = TOK_UMOD;
2323 else if (op == TOK_LT)
2324 op = TOK_ULT;
2325 else if (op == TOK_GT)
2326 op = TOK_UGT;
2327 else if (op == TOK_LE)
2328 op = TOK_ULE;
2329 else if (op == TOK_GE)
2330 op = TOK_UGE;
2332 vswap();
2333 type1.t = t;
2334 type1.ref = NULL;
2335 gen_cast(&type1);
2336 vswap();
2337 /* special case for shifts and long long: we keep the shift as
2338 an integer */
2339 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2340 type1.t = VT_INT;
2341 gen_cast(&type1);
2342 if (is_float(t))
2343 gen_opif(op);
2344 else
2345 gen_opic(op);
2346 if (op >= TOK_ULT && op <= TOK_GT) {
2347 /* relational op: the result is an int */
2348 vtop->type.t = VT_INT;
2349 } else {
2350 vtop->type.t = t;
2353 // Make sure that we have converted to an rvalue:
2354 if (vtop->r & VT_LVAL)
2355 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2358 #ifndef TCC_TARGET_ARM
2359 /* generic itof for unsigned long long case */
2360 static void gen_cvt_itof1(int t)
2362 #ifdef TCC_TARGET_ARM64
2363 gen_cvt_itof(t);
2364 #else
2365 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2366 (VT_LLONG | VT_UNSIGNED)) {
2368 if (t == VT_FLOAT)
2369 vpush_global_sym(&func_old_type, TOK___floatundisf);
2370 #if LDOUBLE_SIZE != 8
2371 else if (t == VT_LDOUBLE)
2372 vpush_global_sym(&func_old_type, TOK___floatundixf);
2373 #endif
2374 else
2375 vpush_global_sym(&func_old_type, TOK___floatundidf);
2376 vrott(2);
2377 gfunc_call(1);
2378 vpushi(0);
2379 vtop->r = reg_fret(t);
2380 } else {
2381 gen_cvt_itof(t);
2383 #endif
2385 #endif
2387 /* generic ftoi for unsigned long long case */
2388 static void gen_cvt_ftoi1(int t)
2390 #ifdef TCC_TARGET_ARM64
2391 gen_cvt_ftoi(t);
2392 #else
2393 int st;
2395 if (t == (VT_LLONG | VT_UNSIGNED)) {
2396 /* not handled natively */
2397 st = vtop->type.t & VT_BTYPE;
2398 if (st == VT_FLOAT)
2399 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2400 #if LDOUBLE_SIZE != 8
2401 else if (st == VT_LDOUBLE)
2402 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2403 #endif
2404 else
2405 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2406 vrott(2);
2407 gfunc_call(1);
2408 vpushi(0);
2409 vtop->r = REG_IRET;
2410 vtop->r2 = REG_LRET;
2411 } else {
2412 gen_cvt_ftoi(t);
2414 #endif
2417 /* force char or short cast */
2418 static void force_charshort_cast(int t)
2420 int bits, dbt;
2422 /* cannot cast static initializers */
2423 if (STATIC_DATA_WANTED)
2424 return;
2426 dbt = t & VT_BTYPE;
2427 /* XXX: add optimization if lvalue : just change type and offset */
2428 if (dbt == VT_BYTE)
2429 bits = 8;
2430 else
2431 bits = 16;
2432 if (t & VT_UNSIGNED) {
2433 vpushi((1 << bits) - 1);
2434 gen_op('&');
2435 } else {
2436 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2437 bits = 64 - bits;
2438 else
2439 bits = 32 - bits;
2440 vpushi(bits);
2441 gen_op(TOK_SHL);
2442 /* result must be signed or the SAR is converted to an SHL
2443 This was not the case when "t" was a signed short
2444 and the last value on the stack was an unsigned int */
2445 vtop->type.t &= ~VT_UNSIGNED;
2446 vpushi(bits);
2447 gen_op(TOK_SAR);
2451 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2452 static void gen_cast_s(int t)
2454 CType type;
2455 type.t = t;
2456 type.ref = NULL;
2457 gen_cast(&type);
2460 static void gen_cast(CType *type)
2462 int sbt, dbt, sf, df, c, p;
2464 /* special delayed cast for char/short */
2465 /* XXX: in some cases (multiple cascaded casts), it may still
2466 be incorrect */
2467 if (vtop->r & VT_MUSTCAST) {
2468 vtop->r &= ~VT_MUSTCAST;
2469 force_charshort_cast(vtop->type.t);
2472 /* bitfields first get cast to ints */
2473 if (vtop->type.t & VT_BITFIELD) {
2474 gv(RC_INT);
2477 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2478 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2480 if (sbt != dbt) {
2481 sf = is_float(sbt);
2482 df = is_float(dbt);
2483 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2484 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2485 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2486 c &= dbt != VT_LDOUBLE;
2487 #endif
2488 if (c) {
2489 /* constant case: we can do it now */
2490 /* XXX: in ISOC, cannot do it if error in convert */
2491 if (sbt == VT_FLOAT)
2492 vtop->c.ld = vtop->c.f;
2493 else if (sbt == VT_DOUBLE)
2494 vtop->c.ld = vtop->c.d;
2496 if (df) {
2497 if ((sbt & VT_BTYPE) == VT_LLONG) {
2498 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2499 vtop->c.ld = vtop->c.i;
2500 else
2501 vtop->c.ld = -(long double)-vtop->c.i;
2502 } else if(!sf) {
2503 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2504 vtop->c.ld = (uint32_t)vtop->c.i;
2505 else
2506 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2509 if (dbt == VT_FLOAT)
2510 vtop->c.f = (float)vtop->c.ld;
2511 else if (dbt == VT_DOUBLE)
2512 vtop->c.d = (double)vtop->c.ld;
2513 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2514 vtop->c.i = vtop->c.ld;
2515 } else if (sf && dbt == VT_BOOL) {
2516 vtop->c.i = (vtop->c.ld != 0);
2517 } else {
2518 if(sf)
2519 vtop->c.i = vtop->c.ld;
2520 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2522 else if (sbt & VT_UNSIGNED)
2523 vtop->c.i = (uint32_t)vtop->c.i;
2524 #if PTR_SIZE == 8
2525 else if (sbt == VT_PTR)
2527 #endif
2528 else if (sbt != VT_LLONG)
2529 vtop->c.i = ((uint32_t)vtop->c.i |
2530 -(vtop->c.i & 0x80000000));
2532 if (dbt == (VT_LLONG|VT_UNSIGNED))
2534 else if (dbt == VT_BOOL)
2535 vtop->c.i = (vtop->c.i != 0);
2536 #if PTR_SIZE == 8
2537 else if (dbt == VT_PTR)
2539 #endif
2540 else if (dbt != VT_LLONG) {
2541 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2542 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2543 0xffffffff);
2544 vtop->c.i &= m;
2545 if (!(dbt & VT_UNSIGNED))
2546 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2549 } else if (p && dbt == VT_BOOL) {
2550 vtop->r = VT_CONST;
2551 vtop->c.i = 1;
2552 } else {
2553 /* non constant case: generate code */
2554 if (sf && df) {
2555 /* convert from fp to fp */
2556 gen_cvt_ftof(dbt);
2557 } else if (df) {
2558 /* convert int to fp */
2559 gen_cvt_itof1(dbt);
2560 } else if (sf) {
2561 /* convert fp to int */
2562 if (dbt == VT_BOOL) {
2563 vpushi(0);
2564 gen_op(TOK_NE);
2565 } else {
2566 /* we handle char/short/etc... with generic code */
2567 if (dbt != (VT_INT | VT_UNSIGNED) &&
2568 dbt != (VT_LLONG | VT_UNSIGNED) &&
2569 dbt != VT_LLONG)
2570 dbt = VT_INT;
2571 gen_cvt_ftoi1(dbt);
2572 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2573 /* additional cast for char/short... */
2574 vtop->type.t = dbt;
2575 gen_cast(type);
2578 #if PTR_SIZE == 4
2579 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2580 if ((sbt & VT_BTYPE) != VT_LLONG) {
2581 /* scalar to long long */
2582 /* machine independent conversion */
2583 gv(RC_INT);
2584 /* generate high word */
2585 if (sbt == (VT_INT | VT_UNSIGNED)) {
2586 vpushi(0);
2587 gv(RC_INT);
2588 } else {
2589 if (sbt == VT_PTR) {
2590 /* cast from pointer to int before we apply
2591 shift operation, which pointers don't support*/
2592 gen_cast_s(VT_INT);
2594 gv_dup();
2595 vpushi(31);
2596 gen_op(TOK_SAR);
2598 /* patch second register */
2599 vtop[-1].r2 = vtop->r;
2600 vpop();
2602 #else
2603 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2604 (dbt & VT_BTYPE) == VT_PTR ||
2605 (dbt & VT_BTYPE) == VT_FUNC) {
2606 if ((sbt & VT_BTYPE) != VT_LLONG &&
2607 (sbt & VT_BTYPE) != VT_PTR &&
2608 (sbt & VT_BTYPE) != VT_FUNC) {
2609 /* need to convert from 32bit to 64bit */
2610 gv(RC_INT);
2611 if (sbt != (VT_INT | VT_UNSIGNED)) {
2612 #if defined(TCC_TARGET_ARM64)
2613 gen_cvt_sxtw();
2614 #elif defined(TCC_TARGET_X86_64)
2615 int r = gv(RC_INT);
2616 /* x86_64 specific: movslq */
2617 o(0x6348);
2618 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2619 #else
2620 #error
2621 #endif
2624 #endif
2625 } else if (dbt == VT_BOOL) {
2626 /* scalar to bool */
2627 vpushi(0);
2628 gen_op(TOK_NE);
2629 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2630 (dbt & VT_BTYPE) == VT_SHORT) {
2631 if (sbt == VT_PTR) {
2632 vtop->type.t = VT_INT;
2633 tcc_warning("nonportable conversion from pointer to char/short");
2635 force_charshort_cast(dbt);
2636 #if PTR_SIZE == 4
2637 } else if ((dbt & VT_BTYPE) == VT_INT) {
2638 /* scalar to int */
2639 if ((sbt & VT_BTYPE) == VT_LLONG) {
2640 /* from long long: just take low order word */
2641 lexpand();
2642 vpop();
2644 /* if lvalue and single word type, nothing to do because
2645 the lvalue already contains the real type size (see
2646 VT_LVAL_xxx constants) */
2647 #endif
2650 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2651 /* if we are casting between pointer types,
2652 we must update the VT_LVAL_xxx size */
2653 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2654 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2656 vtop->type = *type;
2659 /* return type size as known at compile time. Put alignment at 'a' */
2660 ST_FUNC int type_size(CType *type, int *a)
2662 Sym *s;
2663 int bt;
2665 bt = type->t & VT_BTYPE;
2666 if (bt == VT_STRUCT) {
2667 /* struct/union */
2668 s = type->ref;
2669 *a = s->r;
2670 return s->c;
2671 } else if (bt == VT_PTR) {
2672 if (type->t & VT_ARRAY) {
2673 int ts;
2675 s = type->ref;
2676 ts = type_size(&s->type, a);
2678 if (ts < 0 && s->c < 0)
2679 ts = -ts;
2681 return ts * s->c;
2682 } else {
2683 *a = PTR_SIZE;
2684 return PTR_SIZE;
2686 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2687 return -1; /* incomplete enum */
2688 } else if (bt == VT_LDOUBLE) {
2689 *a = LDOUBLE_ALIGN;
2690 return LDOUBLE_SIZE;
2691 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2692 #ifdef TCC_TARGET_I386
2693 #ifdef TCC_TARGET_PE
2694 *a = 8;
2695 #else
2696 *a = 4;
2697 #endif
2698 #elif defined(TCC_TARGET_ARM)
2699 #ifdef TCC_ARM_EABI
2700 *a = 8;
2701 #else
2702 *a = 4;
2703 #endif
2704 #else
2705 *a = 8;
2706 #endif
2707 return 8;
2708 } else if (bt == VT_INT || bt == VT_FLOAT) {
2709 *a = 4;
2710 return 4;
2711 } else if (bt == VT_SHORT) {
2712 *a = 2;
2713 return 2;
2714 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2715 *a = 8;
2716 return 16;
2717 } else {
2718 /* char, void, function, _Bool */
2719 *a = 1;
2720 return 1;
2724 /* push type size as known at runtime time on top of value stack. Put
2725 alignment at 'a' */
2726 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2728 if (type->t & VT_VLA) {
2729 type_size(&type->ref->type, a);
2730 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2731 } else {
2732 vpushi(type_size(type, a));
2736 static void vla_sp_restore(void) {
2737 if (vlas_in_scope) {
2738 gen_vla_sp_restore(vla_sp_loc);
2742 static void vla_sp_restore_root(void) {
2743 if (vlas_in_scope) {
2744 gen_vla_sp_restore(vla_sp_root_loc);
2748 /* return the pointed type of t */
2749 static inline CType *pointed_type(CType *type)
2751 return &type->ref->type;
2754 /* modify type so that its it is a pointer to type. */
2755 ST_FUNC void mk_pointer(CType *type)
2757 Sym *s;
2758 s = sym_push(SYM_FIELD, type, 0, -1);
2759 type->t = VT_PTR | (type->t & VT_STORAGE);
2760 type->ref = s;
2763 /* compare function types. OLD functions match any new functions */
2764 static int is_compatible_func(CType *type1, CType *type2)
2766 Sym *s1, *s2;
2768 s1 = type1->ref;
2769 s2 = type2->ref;
2770 if (!is_compatible_types(&s1->type, &s2->type))
2771 return 0;
2772 /* check func_call */
2773 if (s1->f.func_call != s2->f.func_call)
2774 return 0;
2775 /* XXX: not complete */
2776 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2777 return 1;
2778 if (s1->f.func_type != s2->f.func_type)
2779 return 0;
2780 while (s1 != NULL) {
2781 if (s2 == NULL)
2782 return 0;
2783 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2784 return 0;
2785 s1 = s1->next;
2786 s2 = s2->next;
2788 if (s2)
2789 return 0;
2790 return 1;
2793 /* return true if type1 and type2 are the same. If unqualified is
2794 true, qualifiers on the types are ignored.
2796 - enums are not checked as gcc __builtin_types_compatible_p ()
2798 static int compare_types(CType *type1, CType *type2, int unqualified)
2800 int bt1, t1, t2;
2802 t1 = type1->t & VT_TYPE;
2803 t2 = type2->t & VT_TYPE;
2804 if (unqualified) {
2805 /* strip qualifiers before comparing */
2806 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2807 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2810 /* Default Vs explicit signedness only matters for char */
2811 if ((t1 & VT_BTYPE) != VT_BYTE) {
2812 t1 &= ~VT_DEFSIGN;
2813 t2 &= ~VT_DEFSIGN;
2815 /* XXX: bitfields ? */
2816 if (t1 != t2)
2817 return 0;
2818 /* test more complicated cases */
2819 bt1 = t1 & VT_BTYPE;
2820 if (bt1 == VT_PTR) {
2821 type1 = pointed_type(type1);
2822 type2 = pointed_type(type2);
2823 return is_compatible_types(type1, type2);
2824 } else if (bt1 == VT_STRUCT) {
2825 return (type1->ref == type2->ref);
2826 } else if (bt1 == VT_FUNC) {
2827 return is_compatible_func(type1, type2);
2828 } else {
2829 return 1;
2833 /* return true if type1 and type2 are exactly the same (including
2834 qualifiers).
2836 static int is_compatible_types(CType *type1, CType *type2)
2838 return compare_types(type1,type2,0);
2841 /* return true if type1 and type2 are the same (ignoring qualifiers).
2843 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2845 return compare_types(type1,type2,1);
2848 /* print a type. If 'varstr' is not NULL, then the variable is also
2849 printed in the type */
2850 /* XXX: union */
2851 /* XXX: add array and function pointers */
2852 static void type_to_str(char *buf, int buf_size,
2853 CType *type, const char *varstr)
2855 int bt, v, t;
2856 Sym *s, *sa;
2857 char buf1[256];
2858 const char *tstr;
2860 t = type->t;
2861 bt = t & VT_BTYPE;
2862 buf[0] = '\0';
2864 if (t & VT_EXTERN)
2865 pstrcat(buf, buf_size, "extern ");
2866 if (t & VT_STATIC)
2867 pstrcat(buf, buf_size, "static ");
2868 if (t & VT_TYPEDEF)
2869 pstrcat(buf, buf_size, "typedef ");
2870 if (t & VT_INLINE)
2871 pstrcat(buf, buf_size, "inline ");
2872 if (t & VT_VOLATILE)
2873 pstrcat(buf, buf_size, "volatile ");
2874 if (t & VT_CONSTANT)
2875 pstrcat(buf, buf_size, "const ");
2877 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2878 || ((t & VT_UNSIGNED)
2879 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2880 && !IS_ENUM(t)
2882 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2884 buf_size -= strlen(buf);
2885 buf += strlen(buf);
2887 switch(bt) {
2888 case VT_VOID:
2889 tstr = "void";
2890 goto add_tstr;
2891 case VT_BOOL:
2892 tstr = "_Bool";
2893 goto add_tstr;
2894 case VT_BYTE:
2895 tstr = "char";
2896 goto add_tstr;
2897 case VT_SHORT:
2898 tstr = "short";
2899 goto add_tstr;
2900 case VT_INT:
2901 tstr = "int";
2902 goto maybe_long;
2903 case VT_LLONG:
2904 tstr = "long long";
2905 maybe_long:
2906 if (t & VT_LONG)
2907 tstr = "long";
2908 if (!IS_ENUM(t))
2909 goto add_tstr;
2910 tstr = "enum ";
2911 goto tstruct;
2912 case VT_FLOAT:
2913 tstr = "float";
2914 goto add_tstr;
2915 case VT_DOUBLE:
2916 tstr = "double";
2917 goto add_tstr;
2918 case VT_LDOUBLE:
2919 tstr = "long double";
2920 add_tstr:
2921 pstrcat(buf, buf_size, tstr);
2922 break;
2923 case VT_STRUCT:
2924 tstr = "struct ";
2925 if (IS_UNION(t))
2926 tstr = "union ";
2927 tstruct:
2928 pstrcat(buf, buf_size, tstr);
2929 v = type->ref->v & ~SYM_STRUCT;
2930 if (v >= SYM_FIRST_ANOM)
2931 pstrcat(buf, buf_size, "<anonymous>");
2932 else
2933 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2934 break;
2935 case VT_FUNC:
2936 s = type->ref;
2937 type_to_str(buf, buf_size, &s->type, varstr);
2938 pstrcat(buf, buf_size, "(");
2939 sa = s->next;
2940 while (sa != NULL) {
2941 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2942 pstrcat(buf, buf_size, buf1);
2943 sa = sa->next;
2944 if (sa)
2945 pstrcat(buf, buf_size, ", ");
2947 pstrcat(buf, buf_size, ")");
2948 goto no_var;
2949 case VT_PTR:
2950 s = type->ref;
2951 if (t & VT_ARRAY) {
2952 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2953 type_to_str(buf, buf_size, &s->type, buf1);
2954 goto no_var;
2956 pstrcpy(buf1, sizeof(buf1), "*");
2957 if (t & VT_CONSTANT)
2958 pstrcat(buf1, buf_size, "const ");
2959 if (t & VT_VOLATILE)
2960 pstrcat(buf1, buf_size, "volatile ");
2961 if (varstr)
2962 pstrcat(buf1, sizeof(buf1), varstr);
2963 type_to_str(buf, buf_size, &s->type, buf1);
2964 goto no_var;
2966 if (varstr) {
2967 pstrcat(buf, buf_size, " ");
2968 pstrcat(buf, buf_size, varstr);
2970 no_var: ;
2973 /* verify type compatibility to store vtop in 'dt' type, and generate
2974 casts if needed. */
2975 static void gen_assign_cast(CType *dt)
2977 CType *st, *type1, *type2;
2978 char buf1[256], buf2[256];
2979 int dbt, sbt;
2981 st = &vtop->type; /* source type */
2982 dbt = dt->t & VT_BTYPE;
2983 sbt = st->t & VT_BTYPE;
2984 if (sbt == VT_VOID || dbt == VT_VOID) {
2985 if (sbt == VT_VOID && dbt == VT_VOID)
2986 ; /*
2987 It is Ok if both are void
2988 A test program:
2989 void func1() {}
2990 void func2() {
2991 return func1();
2993 gcc accepts this program
2995 else
2996 tcc_error("cannot cast from/to void");
2998 if (dt->t & VT_CONSTANT)
2999 tcc_warning("assignment of read-only location");
3000 switch(dbt) {
3001 case VT_PTR:
3002 /* special cases for pointers */
3003 /* '0' can also be a pointer */
3004 if (is_null_pointer(vtop))
3005 goto type_ok;
3006 /* accept implicit pointer to integer cast with warning */
3007 if (is_integer_btype(sbt)) {
3008 tcc_warning("assignment makes pointer from integer without a cast");
3009 goto type_ok;
3011 type1 = pointed_type(dt);
3012 /* a function is implicitly a function pointer */
3013 if (sbt == VT_FUNC) {
3014 if ((type1->t & VT_BTYPE) != VT_VOID &&
3015 !is_compatible_types(pointed_type(dt), st))
3016 tcc_warning("assignment from incompatible pointer type");
3017 goto type_ok;
3019 if (sbt != VT_PTR)
3020 goto error;
3021 type2 = pointed_type(st);
3022 if ((type1->t & VT_BTYPE) == VT_VOID ||
3023 (type2->t & VT_BTYPE) == VT_VOID) {
3024 /* void * can match anything */
3025 } else {
3026 //printf("types %08x %08x\n", type1->t, type2->t);
3027 /* exact type match, except for qualifiers */
3028 if (!is_compatible_unqualified_types(type1, type2)) {
3029 /* Like GCC don't warn by default for merely changes
3030 in pointer target signedness. Do warn for different
3031 base types, though, in particular for unsigned enums
3032 and signed int targets. */
3033 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
3034 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
3036 tcc_warning("assignment from incompatible pointer type");
3039 /* check const and volatile */
3040 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
3041 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
3042 tcc_warning("assignment discards qualifiers from pointer target type");
3043 break;
3044 case VT_BYTE:
3045 case VT_SHORT:
3046 case VT_INT:
3047 case VT_LLONG:
3048 if (sbt == VT_PTR || sbt == VT_FUNC) {
3049 tcc_warning("assignment makes integer from pointer without a cast");
3050 } else if (sbt == VT_STRUCT) {
3051 goto case_VT_STRUCT;
3053 /* XXX: more tests */
3054 break;
3055 case VT_STRUCT:
3056 case_VT_STRUCT:
3057 if (!is_compatible_unqualified_types(dt, st)) {
3058 error:
3059 type_to_str(buf1, sizeof(buf1), st, NULL);
3060 type_to_str(buf2, sizeof(buf2), dt, NULL);
3061 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3063 break;
3065 type_ok:
3066 gen_cast(dt);
3069 /* store vtop in lvalue pushed on stack */
3070 ST_FUNC void vstore(void)
3072 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3074 ft = vtop[-1].type.t;
3075 sbt = vtop->type.t & VT_BTYPE;
3076 dbt = ft & VT_BTYPE;
3077 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3078 (sbt == VT_INT && dbt == VT_SHORT))
3079 && !(vtop->type.t & VT_BITFIELD)) {
3080 /* optimize char/short casts */
3081 delayed_cast = VT_MUSTCAST;
3082 vtop->type.t = ft & VT_TYPE;
3083 /* XXX: factorize */
3084 if (ft & VT_CONSTANT)
3085 tcc_warning("assignment of read-only location");
3086 } else {
3087 delayed_cast = 0;
3088 if (!(ft & VT_BITFIELD))
3089 gen_assign_cast(&vtop[-1].type);
3092 if (sbt == VT_STRUCT) {
3093 /* if structure, only generate pointer */
3094 /* structure assignment : generate memcpy */
3095 /* XXX: optimize if small size */
3096 size = type_size(&vtop->type, &align);
3098 /* destination */
3099 vswap();
3100 vtop->type.t = VT_PTR;
3101 gaddrof();
3103 /* address of memcpy() */
3104 #ifdef TCC_ARM_EABI
3105 if(!(align & 7))
3106 vpush_global_sym(&func_old_type, TOK_memcpy8);
3107 else if(!(align & 3))
3108 vpush_global_sym(&func_old_type, TOK_memcpy4);
3109 else
3110 #endif
3111 /* Use memmove, rather than memcpy, as dest and src may be same: */
3112 vpush_global_sym(&func_old_type, TOK_memmove);
3114 vswap();
3115 /* source */
3116 vpushv(vtop - 2);
3117 vtop->type.t = VT_PTR;
3118 gaddrof();
3119 /* type size */
3120 vpushi(size);
3121 gfunc_call(3);
3123 /* leave source on stack */
3124 } else if (ft & VT_BITFIELD) {
3125 /* bitfield store handling */
3127 /* save lvalue as expression result (example: s.b = s.a = n;) */
3128 vdup(), vtop[-1] = vtop[-2];
3130 bit_pos = BIT_POS(ft);
3131 bit_size = BIT_SIZE(ft);
3132 /* remove bit field info to avoid loops */
3133 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3135 if ((ft & VT_BTYPE) == VT_BOOL) {
3136 gen_cast(&vtop[-1].type);
3137 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3140 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3141 if (r == VT_STRUCT) {
3142 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3143 store_packed_bf(bit_pos, bit_size);
3144 } else {
3145 unsigned long long mask = (1ULL << bit_size) - 1;
3146 if ((ft & VT_BTYPE) != VT_BOOL) {
3147 /* mask source */
3148 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3149 vpushll(mask);
3150 else
3151 vpushi((unsigned)mask);
3152 gen_op('&');
3154 /* shift source */
3155 vpushi(bit_pos);
3156 gen_op(TOK_SHL);
3157 vswap();
3158 /* duplicate destination */
3159 vdup();
3160 vrott(3);
3161 /* load destination, mask and or with source */
3162 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3163 vpushll(~(mask << bit_pos));
3164 else
3165 vpushi(~((unsigned)mask << bit_pos));
3166 gen_op('&');
3167 gen_op('|');
3168 /* store result */
3169 vstore();
3170 /* ... and discard */
3171 vpop();
3173 } else if (dbt == VT_VOID) {
3174 --vtop;
3175 } else {
3176 #ifdef CONFIG_TCC_BCHECK
3177 /* bound check case */
3178 if (vtop[-1].r & VT_MUSTBOUND) {
3179 vswap();
3180 gbound();
3181 vswap();
3183 #endif
3184 rc = RC_INT;
3185 if (is_float(ft)) {
3186 rc = RC_FLOAT;
3187 #ifdef TCC_TARGET_X86_64
3188 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3189 rc = RC_ST0;
3190 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3191 rc = RC_FRET;
3193 #endif
3195 r = gv(rc); /* generate value */
3196 /* if lvalue was saved on stack, must read it */
3197 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3198 SValue sv;
3199 t = get_reg(RC_INT);
3200 #if PTR_SIZE == 8
3201 sv.type.t = VT_PTR;
3202 #else
3203 sv.type.t = VT_INT;
3204 #endif
3205 sv.r = VT_LOCAL | VT_LVAL;
3206 sv.c.i = vtop[-1].c.i;
3207 load(t, &sv);
3208 vtop[-1].r = t | VT_LVAL;
3210 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3211 #if PTR_SIZE == 8
3212 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3213 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3214 #else
3215 if ((ft & VT_BTYPE) == VT_LLONG) {
3216 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3217 #endif
3218 vtop[-1].type.t = load_type;
3219 store(r, vtop - 1);
3220 vswap();
3221 /* convert to int to increment easily */
3222 vtop->type.t = addr_type;
3223 gaddrof();
3224 vpushi(load_size);
3225 gen_op('+');
3226 vtop->r |= VT_LVAL;
3227 vswap();
3228 vtop[-1].type.t = load_type;
3229 /* XXX: it works because r2 is spilled last ! */
3230 store(vtop->r2, vtop - 1);
3231 } else {
3232 store(r, vtop - 1);
3235 vswap();
3236 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3237 vtop->r |= delayed_cast;
3241 /* post defines POST/PRE add. c is the token ++ or -- */
3242 ST_FUNC void inc(int post, int c)
3244 test_lvalue();
3245 vdup(); /* save lvalue */
3246 if (post) {
3247 gv_dup(); /* duplicate value */
3248 vrotb(3);
3249 vrotb(3);
3251 /* add constant */
3252 vpushi(c - TOK_MID);
3253 gen_op('+');
3254 vstore(); /* store value */
3255 if (post)
3256 vpop(); /* if post op, return saved value */
3259 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3261 /* read the string */
3262 if (tok != TOK_STR)
3263 expect(msg);
3264 cstr_new(astr);
3265 while (tok == TOK_STR) {
3266 /* XXX: add \0 handling too ? */
3267 cstr_cat(astr, tokc.str.data, -1);
3268 next();
3270 cstr_ccat(astr, '\0');
3273 /* If I is >= 1 and a power of two, returns log2(i)+1.
3274 If I is 0 returns 0. */
3275 static int exact_log2p1(int i)
3277 int ret;
3278 if (!i)
3279 return 0;
3280 for (ret = 1; i >= 1 << 8; ret += 8)
3281 i >>= 8;
3282 if (i >= 1 << 4)
3283 ret += 4, i >>= 4;
3284 if (i >= 1 << 2)
3285 ret += 2, i >>= 2;
3286 if (i >= 1 << 1)
3287 ret++;
3288 return ret;
3291 /* Parse __attribute__((...)) GNUC extension. */
3292 static void parse_attribute(AttributeDef *ad)
3294 int t, n;
3295 CString astr;
3297 redo:
3298 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3299 return;
3300 next();
3301 skip('(');
3302 skip('(');
3303 while (tok != ')') {
3304 if (tok < TOK_IDENT)
3305 expect("attribute name");
3306 t = tok;
3307 next();
3308 switch(t) {
3309 case TOK_SECTION1:
3310 case TOK_SECTION2:
3311 skip('(');
3312 parse_mult_str(&astr, "section name");
3313 ad->section = find_section(tcc_state, (char *)astr.data);
3314 skip(')');
3315 cstr_free(&astr);
3316 break;
3317 case TOK_ALIAS1:
3318 case TOK_ALIAS2:
3319 skip('(');
3320 parse_mult_str(&astr, "alias(\"target\")");
3321 ad->alias_target = /* save string as token, for later */
3322 tok_alloc((char*)astr.data, astr.size-1)->tok;
3323 skip(')');
3324 cstr_free(&astr);
3325 break;
3326 case TOK_VISIBILITY1:
3327 case TOK_VISIBILITY2:
3328 skip('(');
3329 parse_mult_str(&astr,
3330 "visibility(\"default|hidden|internal|protected\")");
3331 if (!strcmp (astr.data, "default"))
3332 ad->a.visibility = STV_DEFAULT;
3333 else if (!strcmp (astr.data, "hidden"))
3334 ad->a.visibility = STV_HIDDEN;
3335 else if (!strcmp (astr.data, "internal"))
3336 ad->a.visibility = STV_INTERNAL;
3337 else if (!strcmp (astr.data, "protected"))
3338 ad->a.visibility = STV_PROTECTED;
3339 else
3340 expect("visibility(\"default|hidden|internal|protected\")");
3341 skip(')');
3342 cstr_free(&astr);
3343 break;
3344 case TOK_ALIGNED1:
3345 case TOK_ALIGNED2:
3346 if (tok == '(') {
3347 next();
3348 n = expr_const();
3349 if (n <= 0 || (n & (n - 1)) != 0)
3350 tcc_error("alignment must be a positive power of two");
3351 skip(')');
3352 } else {
3353 n = MAX_ALIGN;
3355 ad->a.aligned = exact_log2p1(n);
3356 if (n != 1 << (ad->a.aligned - 1))
3357 tcc_error("alignment of %d is larger than implemented", n);
3358 break;
3359 case TOK_PACKED1:
3360 case TOK_PACKED2:
3361 ad->a.packed = 1;
3362 break;
3363 case TOK_WEAK1:
3364 case TOK_WEAK2:
3365 ad->a.weak = 1;
3366 break;
3367 case TOK_UNUSED1:
3368 case TOK_UNUSED2:
3369 /* currently, no need to handle it because tcc does not
3370 track unused objects */
3371 break;
3372 case TOK_NORETURN1:
3373 case TOK_NORETURN2:
3374 /* currently, no need to handle it because tcc does not
3375 track unused objects */
3376 break;
3377 case TOK_CDECL1:
3378 case TOK_CDECL2:
3379 case TOK_CDECL3:
3380 ad->f.func_call = FUNC_CDECL;
3381 break;
3382 case TOK_STDCALL1:
3383 case TOK_STDCALL2:
3384 case TOK_STDCALL3:
3385 ad->f.func_call = FUNC_STDCALL;
3386 break;
3387 #ifdef TCC_TARGET_I386
3388 case TOK_REGPARM1:
3389 case TOK_REGPARM2:
3390 skip('(');
3391 n = expr_const();
3392 if (n > 3)
3393 n = 3;
3394 else if (n < 0)
3395 n = 0;
3396 if (n > 0)
3397 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3398 skip(')');
3399 break;
3400 case TOK_FASTCALL1:
3401 case TOK_FASTCALL2:
3402 case TOK_FASTCALL3:
3403 ad->f.func_call = FUNC_FASTCALLW;
3404 break;
3405 #endif
3406 case TOK_MODE:
3407 skip('(');
3408 switch(tok) {
3409 case TOK_MODE_DI:
3410 ad->attr_mode = VT_LLONG + 1;
3411 break;
3412 case TOK_MODE_QI:
3413 ad->attr_mode = VT_BYTE + 1;
3414 break;
3415 case TOK_MODE_HI:
3416 ad->attr_mode = VT_SHORT + 1;
3417 break;
3418 case TOK_MODE_SI:
3419 case TOK_MODE_word:
3420 ad->attr_mode = VT_INT + 1;
3421 break;
3422 default:
3423 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3424 break;
3426 next();
3427 skip(')');
3428 break;
3429 case TOK_DLLEXPORT:
3430 ad->a.dllexport = 1;
3431 break;
3432 case TOK_DLLIMPORT:
3433 ad->a.dllimport = 1;
3434 break;
3435 default:
3436 if (tcc_state->warn_unsupported)
3437 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3438 /* skip parameters */
3439 if (tok == '(') {
3440 int parenthesis = 0;
3441 do {
3442 if (tok == '(')
3443 parenthesis++;
3444 else if (tok == ')')
3445 parenthesis--;
3446 next();
3447 } while (parenthesis && tok != -1);
3449 break;
3451 if (tok != ',')
3452 break;
3453 next();
3455 skip(')');
3456 skip(')');
3457 goto redo;
3460 static Sym * find_field (CType *type, int v)
3462 Sym *s = type->ref;
3463 v |= SYM_FIELD;
3464 while ((s = s->next) != NULL) {
3465 if ((s->v & SYM_FIELD) &&
3466 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3467 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3468 Sym *ret = find_field (&s->type, v);
3469 if (ret)
3470 return ret;
3472 if (s->v == v)
3473 break;
3475 return s;
3478 static void struct_add_offset (Sym *s, int offset)
3480 while ((s = s->next) != NULL) {
3481 if ((s->v & SYM_FIELD) &&
3482 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3483 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3484 struct_add_offset(s->type.ref, offset);
3485 } else
3486 s->c += offset;
3490 static void struct_layout(CType *type, AttributeDef *ad)
3492 int size, align, maxalign, offset, c, bit_pos, bit_size;
3493 int packed, a, bt, prevbt, prev_bit_size;
3494 int pcc = !tcc_state->ms_bitfields;
3495 int pragma_pack = *tcc_state->pack_stack_ptr;
3496 Sym *f;
3498 maxalign = 1;
3499 offset = 0;
3500 c = 0;
3501 bit_pos = 0;
3502 prevbt = VT_STRUCT; /* make it never match */
3503 prev_bit_size = 0;
3505 //#define BF_DEBUG
3507 for (f = type->ref->next; f; f = f->next) {
3508 if (f->type.t & VT_BITFIELD)
3509 bit_size = BIT_SIZE(f->type.t);
3510 else
3511 bit_size = -1;
3512 size = type_size(&f->type, &align);
3513 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3514 packed = 0;
3516 if (pcc && bit_size == 0) {
3517 /* in pcc mode, packing does not affect zero-width bitfields */
3519 } else {
3520 /* in pcc mode, attribute packed overrides if set. */
3521 if (pcc && (f->a.packed || ad->a.packed))
3522 align = packed = 1;
3524 /* pragma pack overrides align if lesser and packs bitfields always */
3525 if (pragma_pack) {
3526 packed = 1;
3527 if (pragma_pack < align)
3528 align = pragma_pack;
3529 /* in pcc mode pragma pack also overrides individual align */
3530 if (pcc && pragma_pack < a)
3531 a = 0;
3534 /* some individual align was specified */
3535 if (a)
3536 align = a;
3538 if (type->ref->type.t == VT_UNION) {
3539 if (pcc && bit_size >= 0)
3540 size = (bit_size + 7) >> 3;
3541 offset = 0;
3542 if (size > c)
3543 c = size;
3545 } else if (bit_size < 0) {
3546 if (pcc)
3547 c += (bit_pos + 7) >> 3;
3548 c = (c + align - 1) & -align;
3549 offset = c;
3550 if (size > 0)
3551 c += size;
3552 bit_pos = 0;
3553 prevbt = VT_STRUCT;
3554 prev_bit_size = 0;
3556 } else {
3557 /* A bit-field. Layout is more complicated. There are two
3558 options: PCC (GCC) compatible and MS compatible */
3559 if (pcc) {
3560 /* In PCC layout a bit-field is placed adjacent to the
3561 preceding bit-fields, except if:
3562 - it has zero-width
3563 - an individual alignment was given
3564 - it would overflow its base type container and
3565 there is no packing */
3566 if (bit_size == 0) {
3567 new_field:
3568 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3569 bit_pos = 0;
3570 } else if (f->a.aligned) {
3571 goto new_field;
3572 } else if (!packed) {
3573 int a8 = align * 8;
3574 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3575 if (ofs > size / align)
3576 goto new_field;
3579 /* in pcc mode, long long bitfields have type int if they fit */
3580 if (size == 8 && bit_size <= 32)
3581 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3583 while (bit_pos >= align * 8)
3584 c += align, bit_pos -= align * 8;
3585 offset = c;
3587 /* In PCC layout named bit-fields influence the alignment
3588 of the containing struct using the base types alignment,
3589 except for packed fields (which here have correct align). */
3590 if (f->v & SYM_FIRST_ANOM
3591 // && bit_size // ??? gcc on ARM/rpi does that
3593 align = 1;
3595 } else {
3596 bt = f->type.t & VT_BTYPE;
3597 if ((bit_pos + bit_size > size * 8)
3598 || (bit_size > 0) == (bt != prevbt)
3600 c = (c + align - 1) & -align;
3601 offset = c;
3602 bit_pos = 0;
3603 /* In MS bitfield mode a bit-field run always uses
3604 at least as many bits as the underlying type.
3605 To start a new run it's also required that this
3606 or the last bit-field had non-zero width. */
3607 if (bit_size || prev_bit_size)
3608 c += size;
3610 /* In MS layout the records alignment is normally
3611 influenced by the field, except for a zero-width
3612 field at the start of a run (but by further zero-width
3613 fields it is again). */
3614 if (bit_size == 0 && prevbt != bt)
3615 align = 1;
3616 prevbt = bt;
3617 prev_bit_size = bit_size;
3620 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3621 | (bit_pos << VT_STRUCT_SHIFT);
3622 bit_pos += bit_size;
3624 if (align > maxalign)
3625 maxalign = align;
3627 #ifdef BF_DEBUG
3628 printf("set field %s offset %-2d size %-2d align %-2d",
3629 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3630 if (f->type.t & VT_BITFIELD) {
3631 printf(" pos %-2d bits %-2d",
3632 BIT_POS(f->type.t),
3633 BIT_SIZE(f->type.t)
3636 printf("\n");
3637 #endif
3639 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3640 Sym *ass;
3641 /* An anonymous struct/union. Adjust member offsets
3642 to reflect the real offset of our containing struct.
3643 Also set the offset of this anon member inside
3644 the outer struct to be zero. Via this it
3645 works when accessing the field offset directly
3646 (from base object), as well as when recursing
3647 members in initializer handling. */
3648 int v2 = f->type.ref->v;
3649 if (!(v2 & SYM_FIELD) &&
3650 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3651 Sym **pps;
3652 /* This happens only with MS extensions. The
3653 anon member has a named struct type, so it
3654 potentially is shared with other references.
3655 We need to unshare members so we can modify
3656 them. */
3657 ass = f->type.ref;
3658 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3659 &f->type.ref->type, 0,
3660 f->type.ref->c);
3661 pps = &f->type.ref->next;
3662 while ((ass = ass->next) != NULL) {
3663 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3664 pps = &((*pps)->next);
3666 *pps = NULL;
3668 struct_add_offset(f->type.ref, offset);
3669 f->c = 0;
3670 } else {
3671 f->c = offset;
3674 f->r = 0;
3677 if (pcc)
3678 c += (bit_pos + 7) >> 3;
3680 /* store size and alignment */
3681 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3682 if (a < maxalign)
3683 a = maxalign;
3684 type->ref->r = a;
3685 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3686 /* can happen if individual align for some member was given. In
3687 this case MSVC ignores maxalign when aligning the size */
3688 a = pragma_pack;
3689 if (a < bt)
3690 a = bt;
3692 c = (c + a - 1) & -a;
3693 type->ref->c = c;
3695 #ifdef BF_DEBUG
3696 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3697 #endif
3699 /* check whether we can access bitfields by their type */
3700 for (f = type->ref->next; f; f = f->next) {
3701 int s, px, cx, c0;
3702 CType t;
3704 if (0 == (f->type.t & VT_BITFIELD))
3705 continue;
3706 f->type.ref = f;
3707 f->auxtype = -1;
3708 bit_size = BIT_SIZE(f->type.t);
3709 if (bit_size == 0)
3710 continue;
3711 bit_pos = BIT_POS(f->type.t);
3712 size = type_size(&f->type, &align);
3713 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3714 continue;
3716 /* try to access the field using a different type */
3717 c0 = -1, s = align = 1;
3718 for (;;) {
3719 px = f->c * 8 + bit_pos;
3720 cx = (px >> 3) & -align;
3721 px = px - (cx << 3);
3722 if (c0 == cx)
3723 break;
3724 s = (px + bit_size + 7) >> 3;
3725 if (s > 4) {
3726 t.t = VT_LLONG;
3727 } else if (s > 2) {
3728 t.t = VT_INT;
3729 } else if (s > 1) {
3730 t.t = VT_SHORT;
3731 } else {
3732 t.t = VT_BYTE;
3734 s = type_size(&t, &align);
3735 c0 = cx;
3738 if (px + bit_size <= s * 8 && cx + s <= c) {
3739 /* update offset and bit position */
3740 f->c = cx;
3741 bit_pos = px;
3742 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3743 | (bit_pos << VT_STRUCT_SHIFT);
3744 if (s != size)
3745 f->auxtype = t.t;
3746 #ifdef BF_DEBUG
3747 printf("FIX field %s offset %-2d size %-2d align %-2d "
3748 "pos %-2d bits %-2d\n",
3749 get_tok_str(f->v & ~SYM_FIELD, NULL),
3750 cx, s, align, px, bit_size);
3751 #endif
3752 } else {
3753 /* fall back to load/store single-byte wise */
3754 f->auxtype = VT_STRUCT;
3755 #ifdef BF_DEBUG
3756 printf("FIX field %s : load byte-wise\n",
3757 get_tok_str(f->v & ~SYM_FIELD, NULL));
3758 #endif
3763 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3764 static void struct_decl(CType *type, int u)
3766 int v, c, size, align, flexible;
3767 int bit_size, bsize, bt;
3768 Sym *s, *ss, **ps;
3769 AttributeDef ad, ad1;
3770 CType type1, btype;
3772 memset(&ad, 0, sizeof ad);
3773 next();
3774 parse_attribute(&ad);
3775 if (tok != '{') {
3776 v = tok;
3777 next();
3778 /* struct already defined ? return it */
3779 if (v < TOK_IDENT)
3780 expect("struct/union/enum name");
3781 s = struct_find(v);
3782 if (s && (s->sym_scope == local_scope || tok != '{')) {
3783 if (u == s->type.t)
3784 goto do_decl;
3785 if (u == VT_ENUM && IS_ENUM(s->type.t))
3786 goto do_decl;
3787 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3789 } else {
3790 v = anon_sym++;
3792 /* Record the original enum/struct/union token. */
3793 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3794 type1.ref = NULL;
3795 /* we put an undefined size for struct/union */
3796 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3797 s->r = 0; /* default alignment is zero as gcc */
3798 do_decl:
3799 type->t = s->type.t;
3800 type->ref = s;
3802 if (tok == '{') {
3803 next();
3804 if (s->c != -1)
3805 tcc_error("struct/union/enum already defined");
3806 /* cannot be empty */
3807 /* non empty enums are not allowed */
3808 ps = &s->next;
3809 if (u == VT_ENUM) {
3810 long long ll = 0, pl = 0, nl = 0;
3811 CType t;
3812 t.ref = s;
3813 /* enum symbols have static storage */
3814 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3815 for(;;) {
3816 v = tok;
3817 if (v < TOK_UIDENT)
3818 expect("identifier");
3819 ss = sym_find(v);
3820 if (ss && !local_stack)
3821 tcc_error("redefinition of enumerator '%s'",
3822 get_tok_str(v, NULL));
3823 next();
3824 if (tok == '=') {
3825 next();
3826 ll = expr_const64();
3828 ss = sym_push(v, &t, VT_CONST, 0);
3829 ss->enum_val = ll;
3830 *ps = ss, ps = &ss->next;
3831 if (ll < nl)
3832 nl = ll;
3833 if (ll > pl)
3834 pl = ll;
3835 if (tok != ',')
3836 break;
3837 next();
3838 ll++;
3839 /* NOTE: we accept a trailing comma */
3840 if (tok == '}')
3841 break;
3843 skip('}');
3844 /* set integral type of the enum */
3845 t.t = VT_INT;
3846 if (nl >= 0) {
3847 if (pl != (unsigned)pl)
3848 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3849 t.t |= VT_UNSIGNED;
3850 } else if (pl != (int)pl || nl != (int)nl)
3851 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3852 s->type.t = type->t = t.t | VT_ENUM;
3853 s->c = 0;
3854 /* set type for enum members */
3855 for (ss = s->next; ss; ss = ss->next) {
3856 ll = ss->enum_val;
3857 if (ll == (int)ll) /* default is int if it fits */
3858 continue;
3859 if (t.t & VT_UNSIGNED) {
3860 ss->type.t |= VT_UNSIGNED;
3861 if (ll == (unsigned)ll)
3862 continue;
3864 ss->type.t = (ss->type.t & ~VT_BTYPE)
3865 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3867 } else {
3868 c = 0;
3869 flexible = 0;
3870 while (tok != '}') {
3871 if (!parse_btype(&btype, &ad1)) {
3872 skip(';');
3873 continue;
3875 while (1) {
3876 if (flexible)
3877 tcc_error("flexible array member '%s' not at the end of struct",
3878 get_tok_str(v, NULL));
3879 bit_size = -1;
3880 v = 0;
3881 type1 = btype;
3882 if (tok != ':') {
3883 if (tok != ';')
3884 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3885 if (v == 0) {
3886 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3887 expect("identifier");
3888 else {
3889 int v = btype.ref->v;
3890 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3891 if (tcc_state->ms_extensions == 0)
3892 expect("identifier");
3896 if (type_size(&type1, &align) < 0) {
3897 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3898 flexible = 1;
3899 else
3900 tcc_error("field '%s' has incomplete type",
3901 get_tok_str(v, NULL));
3903 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3904 (type1.t & VT_STORAGE))
3905 tcc_error("invalid type for '%s'",
3906 get_tok_str(v, NULL));
3908 if (tok == ':') {
3909 next();
3910 bit_size = expr_const();
3911 /* XXX: handle v = 0 case for messages */
3912 if (bit_size < 0)
3913 tcc_error("negative width in bit-field '%s'",
3914 get_tok_str(v, NULL));
3915 if (v && bit_size == 0)
3916 tcc_error("zero width for bit-field '%s'",
3917 get_tok_str(v, NULL));
3918 parse_attribute(&ad1);
3920 size = type_size(&type1, &align);
3921 if (bit_size >= 0) {
3922 bt = type1.t & VT_BTYPE;
3923 if (bt != VT_INT &&
3924 bt != VT_BYTE &&
3925 bt != VT_SHORT &&
3926 bt != VT_BOOL &&
3927 bt != VT_LLONG)
3928 tcc_error("bitfields must have scalar type");
3929 bsize = size * 8;
3930 if (bit_size > bsize) {
3931 tcc_error("width of '%s' exceeds its type",
3932 get_tok_str(v, NULL));
3933 } else if (bit_size == bsize
3934 && !ad.a.packed && !ad1.a.packed) {
3935 /* no need for bit fields */
3937 } else if (bit_size == 64) {
3938 tcc_error("field width 64 not implemented");
3939 } else {
3940 type1.t = (type1.t & ~VT_STRUCT_MASK)
3941 | VT_BITFIELD
3942 | (bit_size << (VT_STRUCT_SHIFT + 6));
3945 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3946 /* Remember we've seen a real field to check
3947 for placement of flexible array member. */
3948 c = 1;
3950 /* If member is a struct or bit-field, enforce
3951 placing into the struct (as anonymous). */
3952 if (v == 0 &&
3953 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3954 bit_size >= 0)) {
3955 v = anon_sym++;
3957 if (v) {
3958 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3959 ss->a = ad1.a;
3960 *ps = ss;
3961 ps = &ss->next;
3963 if (tok == ';' || tok == TOK_EOF)
3964 break;
3965 skip(',');
3967 skip(';');
3969 skip('}');
3970 parse_attribute(&ad);
3971 struct_layout(type, &ad);
3976 static void sym_to_attr(AttributeDef *ad, Sym *s)
3978 if (s->a.aligned && 0 == ad->a.aligned)
3979 ad->a.aligned = s->a.aligned;
3980 if (s->f.func_call && 0 == ad->f.func_call)
3981 ad->f.func_call = s->f.func_call;
3982 if (s->f.func_type && 0 == ad->f.func_type)
3983 ad->f.func_type = s->f.func_type;
3984 if (s->a.packed)
3985 ad->a.packed = 1;
3988 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3989 are added to the element type, copied because it could be a typedef. */
3990 static void parse_btype_qualify(CType *type, int qualifiers)
3992 while (type->t & VT_ARRAY) {
3993 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3994 type = &type->ref->type;
3996 type->t |= qualifiers;
3999 /* return 0 if no type declaration. otherwise, return the basic type
4000 and skip it.
4002 static int parse_btype(CType *type, AttributeDef *ad)
4004 int t, u, bt, st, type_found, typespec_found, g;
4005 Sym *s;
4006 CType type1;
4008 memset(ad, 0, sizeof(AttributeDef));
4009 type_found = 0;
4010 typespec_found = 0;
4011 t = VT_INT;
4012 bt = st = -1;
4013 type->ref = NULL;
4015 while(1) {
4016 switch(tok) {
4017 case TOK_EXTENSION:
4018 /* currently, we really ignore extension */
4019 next();
4020 continue;
4022 /* basic types */
4023 case TOK_CHAR:
4024 u = VT_BYTE;
4025 basic_type:
4026 next();
4027 basic_type1:
4028 if (u == VT_SHORT || u == VT_LONG) {
4029 if (st != -1 || (bt != -1 && bt != VT_INT))
4030 tmbt: tcc_error("too many basic types");
4031 st = u;
4032 } else {
4033 if (bt != -1 || (st != -1 && u != VT_INT))
4034 goto tmbt;
4035 bt = u;
4037 if (u != VT_INT)
4038 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4039 typespec_found = 1;
4040 break;
4041 case TOK_VOID:
4042 u = VT_VOID;
4043 goto basic_type;
4044 case TOK_SHORT:
4045 u = VT_SHORT;
4046 goto basic_type;
4047 case TOK_INT:
4048 u = VT_INT;
4049 goto basic_type;
4050 case TOK_LONG:
4051 if ((t & VT_BTYPE) == VT_DOUBLE) {
4052 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4053 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4054 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4055 } else {
4056 u = VT_LONG;
4057 goto basic_type;
4059 next();
4060 break;
4061 #ifdef TCC_TARGET_ARM64
4062 case TOK_UINT128:
4063 /* GCC's __uint128_t appears in some Linux header files. Make it a
4064 synonym for long double to get the size and alignment right. */
4065 u = VT_LDOUBLE;
4066 goto basic_type;
4067 #endif
4068 case TOK_BOOL:
4069 u = VT_BOOL;
4070 goto basic_type;
4071 case TOK_FLOAT:
4072 u = VT_FLOAT;
4073 goto basic_type;
4074 case TOK_DOUBLE:
4075 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4076 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4077 } else {
4078 u = VT_DOUBLE;
4079 goto basic_type;
4081 next();
4082 break;
4083 case TOK_ENUM:
4084 struct_decl(&type1, VT_ENUM);
4085 basic_type2:
4086 u = type1.t;
4087 type->ref = type1.ref;
4088 goto basic_type1;
4089 case TOK_STRUCT:
4090 struct_decl(&type1, VT_STRUCT);
4091 goto basic_type2;
4092 case TOK_UNION:
4093 struct_decl(&type1, VT_UNION);
4094 goto basic_type2;
4096 /* type modifiers */
4097 case TOK_CONST1:
4098 case TOK_CONST2:
4099 case TOK_CONST3:
4100 type->t = t;
4101 parse_btype_qualify(type, VT_CONSTANT);
4102 t = type->t;
4103 next();
4104 break;
4105 case TOK_VOLATILE1:
4106 case TOK_VOLATILE2:
4107 case TOK_VOLATILE3:
4108 type->t = t;
4109 parse_btype_qualify(type, VT_VOLATILE);
4110 t = type->t;
4111 next();
4112 break;
4113 case TOK_SIGNED1:
4114 case TOK_SIGNED2:
4115 case TOK_SIGNED3:
4116 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4117 tcc_error("signed and unsigned modifier");
4118 t |= VT_DEFSIGN;
4119 next();
4120 typespec_found = 1;
4121 break;
4122 case TOK_REGISTER:
4123 case TOK_AUTO:
4124 case TOK_RESTRICT1:
4125 case TOK_RESTRICT2:
4126 case TOK_RESTRICT3:
4127 next();
4128 break;
4129 case TOK_UNSIGNED:
4130 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4131 tcc_error("signed and unsigned modifier");
4132 t |= VT_DEFSIGN | VT_UNSIGNED;
4133 next();
4134 typespec_found = 1;
4135 break;
4137 /* storage */
4138 case TOK_EXTERN:
4139 g = VT_EXTERN;
4140 goto storage;
4141 case TOK_STATIC:
4142 g = VT_STATIC;
4143 goto storage;
4144 case TOK_TYPEDEF:
4145 g = VT_TYPEDEF;
4146 goto storage;
4147 storage:
4148 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4149 tcc_error("multiple storage classes");
4150 t |= g;
4151 next();
4152 break;
4153 case TOK_INLINE1:
4154 case TOK_INLINE2:
4155 case TOK_INLINE3:
4156 t |= VT_INLINE;
4157 next();
4158 break;
4160 /* GNUC attribute */
4161 case TOK_ATTRIBUTE1:
4162 case TOK_ATTRIBUTE2:
4163 parse_attribute(ad);
4164 if (ad->attr_mode) {
4165 u = ad->attr_mode -1;
4166 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4168 break;
4169 /* GNUC typeof */
4170 case TOK_TYPEOF1:
4171 case TOK_TYPEOF2:
4172 case TOK_TYPEOF3:
4173 next();
4174 parse_expr_type(&type1);
4175 /* remove all storage modifiers except typedef */
4176 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4177 if (type1.ref)
4178 sym_to_attr(ad, type1.ref);
4179 goto basic_type2;
4180 default:
4181 if (typespec_found)
4182 goto the_end;
4183 s = sym_find(tok);
4184 if (!s || !(s->type.t & VT_TYPEDEF))
4185 goto the_end;
4186 t &= ~(VT_BTYPE|VT_LONG);
4187 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4188 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4189 type->ref = s->type.ref;
4190 if (t)
4191 parse_btype_qualify(type, t);
4192 t = type->t;
4193 /* get attributes from typedef */
4194 sym_to_attr(ad, s);
4195 next();
4196 typespec_found = 1;
4197 st = bt = -2;
4198 break;
4200 type_found = 1;
4202 the_end:
4203 if (tcc_state->char_is_unsigned) {
4204 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4205 t |= VT_UNSIGNED;
4207 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4208 bt = t & (VT_BTYPE|VT_LONG);
4209 if (bt == VT_LONG)
4210 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4211 #ifdef TCC_TARGET_PE
4212 if (bt == VT_LDOUBLE)
4213 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4214 #endif
4215 type->t = t;
4216 return type_found;
4219 /* convert a function parameter type (array to pointer and function to
4220 function pointer) */
4221 static inline void convert_parameter_type(CType *pt)
4223 /* remove const and volatile qualifiers (XXX: const could be used
4224 to indicate a const function parameter */
4225 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4226 /* array must be transformed to pointer according to ANSI C */
4227 pt->t &= ~VT_ARRAY;
4228 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4229 mk_pointer(pt);
4233 ST_FUNC void parse_asm_str(CString *astr)
4235 skip('(');
4236 parse_mult_str(astr, "string constant");
4239 /* Parse an asm label and return the token */
4240 static int asm_label_instr(void)
4242 int v;
4243 CString astr;
4245 next();
4246 parse_asm_str(&astr);
4247 skip(')');
4248 #ifdef ASM_DEBUG
4249 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4250 #endif
4251 v = tok_alloc(astr.data, astr.size - 1)->tok;
4252 cstr_free(&astr);
4253 return v;
4256 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4258 int n, l, t1, arg_size, align;
4259 Sym **plast, *s, *first;
4260 AttributeDef ad1;
4261 CType pt;
4263 if (tok == '(') {
4264 /* function type, or recursive declarator (return if so) */
4265 next();
4266 if (td && !(td & TYPE_ABSTRACT))
4267 return 0;
4268 if (tok == ')')
4269 l = 0;
4270 else if (parse_btype(&pt, &ad1))
4271 l = FUNC_NEW;
4272 else if (td)
4273 return 0;
4274 else
4275 l = FUNC_OLD;
4276 first = NULL;
4277 plast = &first;
4278 arg_size = 0;
4279 if (l) {
4280 for(;;) {
4281 /* read param name and compute offset */
4282 if (l != FUNC_OLD) {
4283 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4284 break;
4285 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4286 if ((pt.t & VT_BTYPE) == VT_VOID)
4287 tcc_error("parameter declared as void");
4288 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4289 } else {
4290 n = tok;
4291 if (n < TOK_UIDENT)
4292 expect("identifier");
4293 pt.t = VT_VOID; /* invalid type */
4294 next();
4296 convert_parameter_type(&pt);
4297 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4298 *plast = s;
4299 plast = &s->next;
4300 if (tok == ')')
4301 break;
4302 skip(',');
4303 if (l == FUNC_NEW && tok == TOK_DOTS) {
4304 l = FUNC_ELLIPSIS;
4305 next();
4306 break;
4308 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4309 tcc_error("invalid type");
4311 } else
4312 /* if no parameters, then old type prototype */
4313 l = FUNC_OLD;
4314 skip(')');
4315 /* NOTE: const is ignored in returned type as it has a special
4316 meaning in gcc / C++ */
4317 type->t &= ~VT_CONSTANT;
4318 /* some ancient pre-K&R C allows a function to return an array
4319 and the array brackets to be put after the arguments, such
4320 that "int c()[]" means something like "int[] c()" */
4321 if (tok == '[') {
4322 next();
4323 skip(']'); /* only handle simple "[]" */
4324 mk_pointer(type);
4326 /* we push a anonymous symbol which will contain the function prototype */
4327 ad->f.func_args = arg_size;
4328 ad->f.func_type = l;
4329 s = sym_push(SYM_FIELD, type, 0, 0);
4330 s->a = ad->a;
4331 s->f = ad->f;
4332 s->next = first;
4333 type->t = VT_FUNC;
4334 type->ref = s;
4335 } else if (tok == '[') {
4336 int saved_nocode_wanted = nocode_wanted;
4337 /* array definition */
4338 next();
4339 if (tok == TOK_RESTRICT1)
4340 next();
4341 n = -1;
4342 t1 = 0;
4343 if (tok != ']') {
4344 if (!local_stack || (storage & VT_STATIC))
4345 vpushi(expr_const());
4346 else {
4347 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4348 length must always be evaluated, even under nocode_wanted,
4349 so that its size slot is initialized (e.g. under sizeof
4350 or typeof). */
4351 nocode_wanted = 0;
4352 gexpr();
4354 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4355 n = vtop->c.i;
4356 if (n < 0)
4357 tcc_error("invalid array size");
4358 } else {
4359 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4360 tcc_error("size of variable length array should be an integer");
4361 t1 = VT_VLA;
4364 skip(']');
4365 /* parse next post type */
4366 post_type(type, ad, storage, 0);
4367 if (type->t == VT_FUNC)
4368 tcc_error("declaration of an array of functions");
4369 t1 |= type->t & VT_VLA;
4371 if (t1 & VT_VLA) {
4372 loc -= type_size(&int_type, &align);
4373 loc &= -align;
4374 n = loc;
4376 vla_runtime_type_size(type, &align);
4377 gen_op('*');
4378 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4379 vswap();
4380 vstore();
4382 if (n != -1)
4383 vpop();
4384 nocode_wanted = saved_nocode_wanted;
4386 /* we push an anonymous symbol which will contain the array
4387 element type */
4388 s = sym_push(SYM_FIELD, type, 0, n);
4389 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4390 type->ref = s;
4392 return 1;
4395 /* Parse a type declarator (except basic type), and return the type
4396 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4397 expected. 'type' should contain the basic type. 'ad' is the
4398 attribute definition of the basic type. It can be modified by
4399 type_decl(). If this (possibly abstract) declarator is a pointer chain
4400 it returns the innermost pointed to type (equals *type, but is a different
4401 pointer), otherwise returns type itself, that's used for recursive calls. */
4402 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4404 CType *post, *ret;
4405 int qualifiers, storage;
4407 /* recursive type, remove storage bits first, apply them later again */
4408 storage = type->t & VT_STORAGE;
4409 type->t &= ~VT_STORAGE;
4410 post = ret = type;
4412 while (tok == '*') {
4413 qualifiers = 0;
4414 redo:
4415 next();
4416 switch(tok) {
4417 case TOK_CONST1:
4418 case TOK_CONST2:
4419 case TOK_CONST3:
4420 qualifiers |= VT_CONSTANT;
4421 goto redo;
4422 case TOK_VOLATILE1:
4423 case TOK_VOLATILE2:
4424 case TOK_VOLATILE3:
4425 qualifiers |= VT_VOLATILE;
4426 goto redo;
4427 case TOK_RESTRICT1:
4428 case TOK_RESTRICT2:
4429 case TOK_RESTRICT3:
4430 goto redo;
4431 /* XXX: clarify attribute handling */
4432 case TOK_ATTRIBUTE1:
4433 case TOK_ATTRIBUTE2:
4434 parse_attribute(ad);
4435 break;
4437 mk_pointer(type);
4438 type->t |= qualifiers;
4439 if (ret == type)
4440 /* innermost pointed to type is the one for the first derivation */
4441 ret = pointed_type(type);
4444 if (tok == '(') {
4445 /* This is possibly a parameter type list for abstract declarators
4446 ('int ()'), use post_type for testing this. */
4447 if (!post_type(type, ad, 0, td)) {
4448 /* It's not, so it's a nested declarator, and the post operations
4449 apply to the innermost pointed to type (if any). */
4450 /* XXX: this is not correct to modify 'ad' at this point, but
4451 the syntax is not clear */
4452 parse_attribute(ad);
4453 post = type_decl(type, ad, v, td);
4454 skip(')');
4456 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4457 /* type identifier */
4458 *v = tok;
4459 next();
4460 } else {
4461 if (!(td & TYPE_ABSTRACT))
4462 expect("identifier");
4463 *v = 0;
4465 post_type(post, ad, storage, 0);
4466 parse_attribute(ad);
4467 type->t |= storage;
4468 return ret;
4471 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4472 ST_FUNC int lvalue_type(int t)
4474 int bt, r;
4475 r = VT_LVAL;
4476 bt = t & VT_BTYPE;
4477 if (bt == VT_BYTE || bt == VT_BOOL)
4478 r |= VT_LVAL_BYTE;
4479 else if (bt == VT_SHORT)
4480 r |= VT_LVAL_SHORT;
4481 else
4482 return r;
4483 if (t & VT_UNSIGNED)
4484 r |= VT_LVAL_UNSIGNED;
4485 return r;
4488 /* indirection with full error checking and bound check */
4489 ST_FUNC void indir(void)
4491 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4492 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4493 return;
4494 expect("pointer");
4496 if (vtop->r & VT_LVAL)
4497 gv(RC_INT);
4498 vtop->type = *pointed_type(&vtop->type);
4499 /* Arrays and functions are never lvalues */
4500 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4501 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4502 vtop->r |= lvalue_type(vtop->type.t);
4503 /* if bound checking, the referenced pointer must be checked */
4504 #ifdef CONFIG_TCC_BCHECK
4505 if (tcc_state->do_bounds_check)
4506 vtop->r |= VT_MUSTBOUND;
4507 #endif
4511 /* pass a parameter to a function and do type checking and casting */
4512 static void gfunc_param_typed(Sym *func, Sym *arg)
4514 int func_type;
4515 CType type;
4517 func_type = func->f.func_type;
4518 if (func_type == FUNC_OLD ||
4519 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4520 /* default casting : only need to convert float to double */
4521 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4522 gen_cast_s(VT_DOUBLE);
4523 } else if (vtop->type.t & VT_BITFIELD) {
4524 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4525 type.ref = vtop->type.ref;
4526 gen_cast(&type);
4528 } else if (arg == NULL) {
4529 tcc_error("too many arguments to function");
4530 } else {
4531 type = arg->type;
4532 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4533 gen_assign_cast(&type);
4537 /* parse an expression and return its type without any side effect. */
4538 static void expr_type(CType *type, void (*expr_fn)(void))
4540 nocode_wanted++;
4541 expr_fn();
4542 *type = vtop->type;
4543 vpop();
4544 nocode_wanted--;
4547 /* parse an expression of the form '(type)' or '(expr)' and return its
4548 type */
4549 static void parse_expr_type(CType *type)
4551 int n;
4552 AttributeDef ad;
4554 skip('(');
4555 if (parse_btype(type, &ad)) {
4556 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4557 } else {
4558 expr_type(type, gexpr);
4560 skip(')');
4563 static void parse_type(CType *type)
4565 AttributeDef ad;
4566 int n;
4568 if (!parse_btype(type, &ad)) {
4569 expect("type");
4571 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4574 static void parse_builtin_params(int nc, const char *args)
4576 char c, sep = '(';
4577 CType t;
4578 if (nc)
4579 nocode_wanted++;
4580 next();
4581 while ((c = *args++)) {
4582 skip(sep);
4583 sep = ',';
4584 switch (c) {
4585 case 'e': expr_eq(); continue;
4586 case 't': parse_type(&t); vpush(&t); continue;
4587 default: tcc_error("internal error"); break;
4590 skip(')');
4591 if (nc)
4592 nocode_wanted--;
4595 ST_FUNC void unary(void)
4597 int n, t, align, size, r, sizeof_caller;
4598 CType type;
4599 Sym *s;
4600 AttributeDef ad;
4602 sizeof_caller = in_sizeof;
4603 in_sizeof = 0;
4604 type.ref = NULL;
4605 /* XXX: GCC 2.95.3 does not generate a table although it should be
4606 better here */
4607 tok_next:
4608 switch(tok) {
4609 case TOK_EXTENSION:
4610 next();
4611 goto tok_next;
4612 case TOK_LCHAR:
4613 #ifdef TCC_TARGET_PE
4614 t = VT_SHORT|VT_UNSIGNED;
4615 goto push_tokc;
4616 #endif
4617 case TOK_CINT:
4618 case TOK_CCHAR:
4619 t = VT_INT;
4620 push_tokc:
4621 type.t = t;
4622 vsetc(&type, VT_CONST, &tokc);
4623 next();
4624 break;
4625 case TOK_CUINT:
4626 t = VT_INT | VT_UNSIGNED;
4627 goto push_tokc;
4628 case TOK_CLLONG:
4629 t = VT_LLONG;
4630 goto push_tokc;
4631 case TOK_CULLONG:
4632 t = VT_LLONG | VT_UNSIGNED;
4633 goto push_tokc;
4634 case TOK_CFLOAT:
4635 t = VT_FLOAT;
4636 goto push_tokc;
4637 case TOK_CDOUBLE:
4638 t = VT_DOUBLE;
4639 goto push_tokc;
4640 case TOK_CLDOUBLE:
4641 t = VT_LDOUBLE;
4642 goto push_tokc;
4643 case TOK_CLONG:
4644 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4645 goto push_tokc;
4646 case TOK_CULONG:
4647 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4648 goto push_tokc;
4649 case TOK___FUNCTION__:
4650 if (!gnu_ext)
4651 goto tok_identifier;
4652 /* fall thru */
4653 case