Implement function alignment via attributes
[tinycc.git] / tccgen.c
blob4f2e68bacc1e3b4ecb4ffe524f20f3f3a6c0eee3
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
395 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
396 name = buf1;
397 other |= ST_PE_STDCALL;
398 can_add_underscore = 0;
401 #endif
402 if (tcc_state->leading_underscore && can_add_underscore) {
403 buf1[0] = '_';
404 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
405 name = buf1;
407 if (sym->asm_label)
408 name = get_tok_str(sym->asm_label, NULL);
409 info = ELFW(ST_INFO)(sym_bind, sym_type);
410 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
411 } else {
412 esym = elfsym(sym);
413 esym->st_value = value;
414 esym->st_size = size;
415 esym->st_shndx = sh_num;
417 update_storage(sym);
420 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
421 addr_t value, unsigned long size)
423 int sh_num = section ? section->sh_num : SHN_UNDEF;
424 put_extern_sym2(sym, sh_num, value, size, 1);
427 /* add a new relocation entry to symbol 'sym' in section 's' */
428 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
429 addr_t addend)
431 int c = 0;
433 if (nocode_wanted && s == cur_text_section)
434 return;
436 if (sym) {
437 if (0 == sym->c)
438 put_extern_sym(sym, NULL, 0, 0);
439 c = sym->c;
442 /* now we can add ELF relocation info */
443 put_elf_reloca(symtab_section, s, offset, type, c, addend);
446 #if PTR_SIZE == 4
447 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
449 greloca(s, sym, offset, type, 0);
451 #endif
453 /* ------------------------------------------------------------------------- */
454 /* symbol allocator */
455 static Sym *__sym_malloc(void)
457 Sym *sym_pool, *sym, *last_sym;
458 int i;
460 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
461 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
463 last_sym = sym_free_first;
464 sym = sym_pool;
465 for(i = 0; i < SYM_POOL_NB; i++) {
466 sym->next = last_sym;
467 last_sym = sym;
468 sym++;
470 sym_free_first = last_sym;
471 return last_sym;
474 static inline Sym *sym_malloc(void)
476 Sym *sym;
477 #ifndef SYM_DEBUG
478 sym = sym_free_first;
479 if (!sym)
480 sym = __sym_malloc();
481 sym_free_first = sym->next;
482 return sym;
483 #else
484 sym = tcc_malloc(sizeof(Sym));
485 return sym;
486 #endif
489 ST_INLN void sym_free(Sym *sym)
491 #ifndef SYM_DEBUG
492 sym->next = sym_free_first;
493 sym_free_first = sym;
494 #else
495 tcc_free(sym);
496 #endif
499 /* push, without hashing */
500 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
502 Sym *s;
504 s = sym_malloc();
505 memset(s, 0, sizeof *s);
506 s->v = v;
507 s->type.t = t;
508 s->c = c;
509 /* add in stack */
510 s->prev = *ps;
511 *ps = s;
512 return s;
515 /* find a symbol and return its associated structure. 's' is the top
516 of the symbol stack */
517 ST_FUNC Sym *sym_find2(Sym *s, int v)
519 while (s) {
520 if (s->v == v)
521 return s;
522 else if (s->v == -1)
523 return NULL;
524 s = s->prev;
526 return NULL;
529 /* structure lookup */
530 ST_INLN Sym *struct_find(int v)
532 v -= TOK_IDENT;
533 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
534 return NULL;
535 return table_ident[v]->sym_struct;
538 /* find an identifier */
539 ST_INLN Sym *sym_find(int v)
541 v -= TOK_IDENT;
542 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
543 return NULL;
544 return table_ident[v]->sym_identifier;
547 /* push a given symbol on the symbol stack */
548 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
550 Sym *s, **ps;
551 TokenSym *ts;
553 if (local_stack)
554 ps = &local_stack;
555 else
556 ps = &global_stack;
557 s = sym_push2(ps, v, type->t, c);
558 s->type.ref = type->ref;
559 s->r = r;
560 /* don't record fields or anonymous symbols */
561 /* XXX: simplify */
562 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
563 /* record symbol in token array */
564 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
565 if (v & SYM_STRUCT)
566 ps = &ts->sym_struct;
567 else
568 ps = &ts->sym_identifier;
569 s->prev_tok = *ps;
570 *ps = s;
571 s->sym_scope = local_scope;
572 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
573 tcc_error("redeclaration of '%s'",
574 get_tok_str(v & ~SYM_STRUCT, NULL));
576 return s;
579 /* push a global identifier */
580 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
582 Sym *s, **ps;
583 s = sym_push2(&global_stack, v, t, c);
584 /* don't record anonymous symbol */
585 if (v < SYM_FIRST_ANOM) {
586 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
587 /* modify the top most local identifier, so that
588 sym_identifier will point to 's' when popped */
589 while (*ps != NULL && (*ps)->sym_scope)
590 ps = &(*ps)->prev_tok;
591 s->prev_tok = *ps;
592 *ps = s;
594 return s;
597 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
598 pop them yet from the list, but do remove them from the token array. */
599 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
601 Sym *s, *ss, **ps;
602 TokenSym *ts;
603 int v;
605 s = *ptop;
606 while(s != b) {
607 ss = s->prev;
608 v = s->v;
609 /* remove symbol in token array */
610 /* XXX: simplify */
611 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 *ps = s->prev_tok;
619 if (!keep)
620 sym_free(s);
621 s = ss;
623 if (!keep)
624 *ptop = b;
627 /* ------------------------------------------------------------------------- */
629 static void vsetc(CType *type, int r, CValue *vc)
631 int v;
633 if (vtop >= vstack + (VSTACK_SIZE - 1))
634 tcc_error("memory full (vstack)");
635 /* cannot let cpu flags if other instruction are generated. Also
636 avoid leaving VT_JMP anywhere except on the top of the stack
637 because it would complicate the code generator.
639 Don't do this when nocode_wanted. vtop might come from
640 !nocode_wanted regions (see 88_codeopt.c) and transforming
641 it to a register without actually generating code is wrong
642 as their value might still be used for real. All values
643 we push under nocode_wanted will eventually be popped
644 again, so that the VT_CMP/VT_JMP value will be in vtop
645 when code is unsuppressed again.
647 Same logic below in vswap(); */
648 if (vtop >= vstack && !nocode_wanted) {
649 v = vtop->r & VT_VALMASK;
650 if (v == VT_CMP || (v & ~1) == VT_JMP)
651 gv(RC_INT);
654 vtop++;
655 vtop->type = *type;
656 vtop->r = r;
657 vtop->r2 = VT_CONST;
658 vtop->c = *vc;
659 vtop->sym = NULL;
662 ST_FUNC void vswap(void)
664 SValue tmp;
665 /* cannot vswap cpu flags. See comment at vsetc() above */
666 if (vtop >= vstack && !nocode_wanted) {
667 int v = vtop->r & VT_VALMASK;
668 if (v == VT_CMP || (v & ~1) == VT_JMP)
669 gv(RC_INT);
671 tmp = vtop[0];
672 vtop[0] = vtop[-1];
673 vtop[-1] = tmp;
676 /* pop stack value */
677 ST_FUNC void vpop(void)
679 int v;
680 v = vtop->r & VT_VALMASK;
681 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
682 /* for x86, we need to pop the FP stack */
683 if (v == TREG_ST0) {
684 o(0xd8dd); /* fstp %st(0) */
685 } else
686 #endif
687 if (v == VT_JMP || v == VT_JMPI) {
688 /* need to put correct jump if && or || without test */
689 gsym(vtop->c.i);
691 vtop--;
694 /* push constant of type "type" with useless value */
695 ST_FUNC void vpush(CType *type)
697 vset(type, VT_CONST, 0);
700 /* push integer constant */
701 ST_FUNC void vpushi(int v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&int_type, VT_CONST, &cval);
708 /* push a pointer sized constant */
709 static void vpushs(addr_t v)
711 CValue cval;
712 cval.i = v;
713 vsetc(&size_type, VT_CONST, &cval);
716 /* push arbitrary 64bit constant */
717 ST_FUNC void vpush64(int ty, unsigned long long v)
719 CValue cval;
720 CType ctype;
721 ctype.t = ty;
722 ctype.ref = NULL;
723 cval.i = v;
724 vsetc(&ctype, VT_CONST, &cval);
727 /* push long long constant */
728 static inline void vpushll(long long v)
730 vpush64(VT_LLONG, v);
733 ST_FUNC void vset(CType *type, int r, int v)
735 CValue cval;
737 cval.i = v;
738 vsetc(type, r, &cval);
741 static void vseti(int r, int v)
743 CType type;
744 type.t = VT_INT;
745 type.ref = NULL;
746 vset(&type, r, v);
749 ST_FUNC void vpushv(SValue *v)
751 if (vtop >= vstack + (VSTACK_SIZE - 1))
752 tcc_error("memory full (vstack)");
753 vtop++;
754 *vtop = *v;
757 static void vdup(void)
759 vpushv(vtop);
762 /* rotate n first stack elements to the bottom
763 I1 ... In -> I2 ... In I1 [top is right]
765 ST_FUNC void vrotb(int n)
767 int i;
768 SValue tmp;
770 tmp = vtop[-n + 1];
771 for(i=-n+1;i!=0;i++)
772 vtop[i] = vtop[i+1];
773 vtop[0] = tmp;
776 /* rotate the n elements before entry e towards the top
777 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
779 ST_FUNC void vrote(SValue *e, int n)
781 int i;
782 SValue tmp;
784 tmp = *e;
785 for(i = 0;i < n - 1; i++)
786 e[-i] = e[-i - 1];
787 e[-n + 1] = tmp;
790 /* rotate n first stack elements to the top
791 I1 ... In -> In I1 ... I(n-1) [top is right]
793 ST_FUNC void vrott(int n)
795 vrote(vtop, n);
798 /* push a symbol value of TYPE */
799 static inline void vpushsym(CType *type, Sym *sym)
801 CValue cval;
802 cval.i = 0;
803 vsetc(type, VT_CONST | VT_SYM, &cval);
804 vtop->sym = sym;
807 /* Return a static symbol pointing to a section */
808 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
810 int v;
811 Sym *sym;
813 v = anon_sym++;
814 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
815 sym->type.ref = type->ref;
816 sym->r = VT_CONST | VT_SYM;
817 put_extern_sym(sym, sec, offset, size);
818 return sym;
821 /* push a reference to a section offset by adding a dummy symbol */
822 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
824 vpushsym(type, get_sym_ref(type, sec, offset, size));
827 /* define a new external reference to a symbol 'v' of type 'u' */
828 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
830 Sym *s;
832 s = sym_find(v);
833 if (!s) {
834 /* push forward reference */
835 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
836 s->type.ref = type->ref;
837 s->r = r | VT_CONST | VT_SYM;
838 } else if (IS_ASM_SYM(s)) {
839 s->type.t = type->t | (s->type.t & VT_EXTERN);
840 s->type.ref = type->ref;
841 update_storage(s);
843 return s;
846 /* Merge some type attributes. */
847 static void patch_type(Sym *sym, CType *type)
849 if (!(type->t & VT_EXTERN)) {
850 if (!(sym->type.t & VT_EXTERN))
851 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
852 sym->type.t &= ~VT_EXTERN;
855 if (IS_ASM_SYM(sym)) {
856 /* stay static if both are static */
857 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
858 sym->type.ref = type->ref;
861 if (!is_compatible_types(&sym->type, type)) {
862 tcc_error("incompatible types for redefinition of '%s'",
863 get_tok_str(sym->v, NULL));
865 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
866 int static_proto = sym->type.t & VT_STATIC;
867 /* warn if static follows non-static function declaration */
868 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
869 tcc_warning("static storage ignored for redefinition of '%s'",
870 get_tok_str(sym->v, NULL));
872 if (0 == (type->t & VT_EXTERN)) {
873 /* put complete type, use static from prototype */
874 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
875 if (type->t & VT_INLINE)
876 sym->type.t = type->t;
877 sym->type.ref = type->ref;
880 } else {
881 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
882 /* set array size if it was omitted in extern declaration */
883 if (sym->type.ref->c < 0)
884 sym->type.ref->c = type->ref->c;
885 else if (sym->type.ref->c != type->ref->c)
886 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
888 if ((type->t ^ sym->type.t) & VT_STATIC)
889 tcc_warning("storage mismatch for redefinition of '%s'",
890 get_tok_str(sym->v, NULL));
895 /* Merge some storage attributes. */
896 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
898 if (type)
899 patch_type(sym, type);
901 #ifdef TCC_TARGET_PE
902 if (sym->a.dllimport != ad->a.dllimport)
903 tcc_error("incompatible dll linkage for redefinition of '%s'",
904 get_tok_str(sym->v, NULL));
905 sym->a.dllexport |= ad->a.dllexport;
906 #endif
907 sym->a.weak |= ad->a.weak;
908 if (ad->a.visibility) {
909 int vis = sym->a.visibility;
910 int vis2 = ad->a.visibility;
911 if (vis == STV_DEFAULT)
912 vis = vis2;
913 else if (vis2 != STV_DEFAULT)
914 vis = (vis < vis2) ? vis : vis2;
915 sym->a.visibility = vis;
917 if (ad->a.aligned)
918 sym->a.aligned = ad->a.aligned;
919 if (ad->asm_label)
920 sym->asm_label = ad->asm_label;
921 update_storage(sym);
924 /* define a new external reference to a symbol 'v' */
925 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
927 Sym *s;
928 s = sym_find(v);
929 if (!s) {
930 /* push forward reference */
931 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
932 s->type.t |= VT_EXTERN;
933 s->a = ad->a;
934 s->sym_scope = 0;
935 } else {
936 if (s->type.ref == func_old_type.ref) {
937 s->type.ref = type->ref;
938 s->r = r | VT_CONST | VT_SYM;
939 s->type.t |= VT_EXTERN;
941 patch_storage(s, ad, type);
943 return s;
946 /* push a reference to global symbol v */
947 ST_FUNC void vpush_global_sym(CType *type, int v)
949 vpushsym(type, external_global_sym(v, type, 0));
952 /* save registers up to (vtop - n) stack entry */
953 ST_FUNC void save_regs(int n)
955 SValue *p, *p1;
956 for(p = vstack, p1 = vtop - n; p <= p1; p++)
957 save_reg(p->r);
960 /* save r to the memory stack, and mark it as being free */
961 ST_FUNC void save_reg(int r)
963 save_reg_upstack(r, 0);
966 /* save r to the memory stack, and mark it as being free,
967 if seen up to (vtop - n) stack entry */
968 ST_FUNC void save_reg_upstack(int r, int n)
970 int l, saved, size, align;
971 SValue *p, *p1, sv;
972 CType *type;
974 if ((r &= VT_VALMASK) >= VT_CONST)
975 return;
976 if (nocode_wanted)
977 return;
979 /* modify all stack values */
980 saved = 0;
981 l = 0;
982 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
983 if ((p->r & VT_VALMASK) == r ||
984 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
985 /* must save value on stack if not already done */
986 if (!saved) {
987 /* NOTE: must reload 'r' because r might be equal to r2 */
988 r = p->r & VT_VALMASK;
989 /* store register in the stack */
990 type = &p->type;
991 if ((p->r & VT_LVAL) ||
992 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
993 #if PTR_SIZE == 8
994 type = &char_pointer_type;
995 #else
996 type = &int_type;
997 #endif
998 size = type_size(type, &align);
999 loc = (loc - size) & -align;
1000 sv.type.t = type->t;
1001 sv.r = VT_LOCAL | VT_LVAL;
1002 sv.c.i = loc;
1003 store(r, &sv);
1004 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1005 /* x86 specific: need to pop fp register ST0 if saved */
1006 if (r == TREG_ST0) {
1007 o(0xd8dd); /* fstp %st(0) */
1009 #endif
1010 #if PTR_SIZE == 4
1011 /* special long long case */
1012 if ((type->t & VT_BTYPE) == VT_LLONG) {
1013 sv.c.i += 4;
1014 store(p->r2, &sv);
1016 #endif
1017 l = loc;
1018 saved = 1;
1020 /* mark that stack entry as being saved on the stack */
1021 if (p->r & VT_LVAL) {
1022 /* also clear the bounded flag because the
1023 relocation address of the function was stored in
1024 p->c.i */
1025 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1026 } else {
1027 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1029 p->r2 = VT_CONST;
1030 p->c.i = l;
1035 #ifdef TCC_TARGET_ARM
1036 /* find a register of class 'rc2' with at most one reference on stack.
1037 * If none, call get_reg(rc) */
1038 ST_FUNC int get_reg_ex(int rc, int rc2)
1040 int r;
1041 SValue *p;
1043 for(r=0;r<NB_REGS;r++) {
1044 if (reg_classes[r] & rc2) {
1045 int n;
1046 n=0;
1047 for(p = vstack; p <= vtop; p++) {
1048 if ((p->r & VT_VALMASK) == r ||
1049 (p->r2 & VT_VALMASK) == r)
1050 n++;
1052 if (n <= 1)
1053 return r;
1056 return get_reg(rc);
1058 #endif
1060 /* find a free register of class 'rc'. If none, save one register */
1061 ST_FUNC int get_reg(int rc)
1063 int r;
1064 SValue *p;
1066 /* find a free register */
1067 for(r=0;r<NB_REGS;r++) {
1068 if (reg_classes[r] & rc) {
1069 if (nocode_wanted)
1070 return r;
1071 for(p=vstack;p<=vtop;p++) {
1072 if ((p->r & VT_VALMASK) == r ||
1073 (p->r2 & VT_VALMASK) == r)
1074 goto notfound;
1076 return r;
1078 notfound: ;
1081 /* no register left : free the first one on the stack (VERY
1082 IMPORTANT to start from the bottom to ensure that we don't
1083 spill registers used in gen_opi()) */
1084 for(p=vstack;p<=vtop;p++) {
1085 /* look at second register (if long long) */
1086 r = p->r2 & VT_VALMASK;
1087 if (r < VT_CONST && (reg_classes[r] & rc))
1088 goto save_found;
1089 r = p->r & VT_VALMASK;
1090 if (r < VT_CONST && (reg_classes[r] & rc)) {
1091 save_found:
1092 save_reg(r);
1093 return r;
1096 /* Should never comes here */
1097 return -1;
1100 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1101 if needed */
1102 static void move_reg(int r, int s, int t)
1104 SValue sv;
1106 if (r != s) {
1107 save_reg(r);
1108 sv.type.t = t;
1109 sv.type.ref = NULL;
1110 sv.r = s;
1111 sv.c.i = 0;
1112 load(r, &sv);
1116 /* get address of vtop (vtop MUST BE an lvalue) */
1117 ST_FUNC void gaddrof(void)
1119 vtop->r &= ~VT_LVAL;
1120 /* tricky: if saved lvalue, then we can go back to lvalue */
1121 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1122 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1127 #ifdef CONFIG_TCC_BCHECK
1128 /* generate lvalue bound code */
1129 static void gbound(void)
1131 int lval_type;
1132 CType type1;
1134 vtop->r &= ~VT_MUSTBOUND;
1135 /* if lvalue, then use checking code before dereferencing */
1136 if (vtop->r & VT_LVAL) {
1137 /* if not VT_BOUNDED value, then make one */
1138 if (!(vtop->r & VT_BOUNDED)) {
1139 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1140 /* must save type because we must set it to int to get pointer */
1141 type1 = vtop->type;
1142 vtop->type.t = VT_PTR;
1143 gaddrof();
1144 vpushi(0);
1145 gen_bounded_ptr_add();
1146 vtop->r |= lval_type;
1147 vtop->type = type1;
1149 /* then check for dereferencing */
1150 gen_bounded_ptr_deref();
1153 #endif
1155 static void incr_bf_adr(int o)
1157 vtop->type = char_pointer_type;
1158 gaddrof();
1159 vpushi(o);
1160 gen_op('+');
1161 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1162 | (VT_BYTE|VT_UNSIGNED);
1163 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1164 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1167 /* single-byte load mode for packed or otherwise unaligned bitfields */
1168 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1170 int n, o, bits;
1171 save_reg_upstack(vtop->r, 1);
1172 vpush64(type->t & VT_BTYPE, 0); // B X
1173 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1174 do {
1175 vswap(); // X B
1176 incr_bf_adr(o);
1177 vdup(); // X B B
1178 n = 8 - bit_pos;
1179 if (n > bit_size)
1180 n = bit_size;
1181 if (bit_pos)
1182 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1183 if (n < 8)
1184 vpushi((1 << n) - 1), gen_op('&');
1185 gen_cast(type);
1186 if (bits)
1187 vpushi(bits), gen_op(TOK_SHL);
1188 vrotb(3); // B Y X
1189 gen_op('|'); // B X
1190 bits += n, bit_size -= n, o = 1;
1191 } while (bit_size);
1192 vswap(), vpop();
1193 if (!(type->t & VT_UNSIGNED)) {
1194 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1195 vpushi(n), gen_op(TOK_SHL);
1196 vpushi(n), gen_op(TOK_SAR);
1200 /* single-byte store mode for packed or otherwise unaligned bitfields */
1201 static void store_packed_bf(int bit_pos, int bit_size)
1203 int bits, n, o, m, c;
1205 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1206 vswap(); // X B
1207 save_reg_upstack(vtop->r, 1);
1208 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1209 do {
1210 incr_bf_adr(o); // X B
1211 vswap(); //B X
1212 c ? vdup() : gv_dup(); // B V X
1213 vrott(3); // X B V
1214 if (bits)
1215 vpushi(bits), gen_op(TOK_SHR);
1216 if (bit_pos)
1217 vpushi(bit_pos), gen_op(TOK_SHL);
1218 n = 8 - bit_pos;
1219 if (n > bit_size)
1220 n = bit_size;
1221 if (n < 8) {
1222 m = ((1 << n) - 1) << bit_pos;
1223 vpushi(m), gen_op('&'); // X B V1
1224 vpushv(vtop-1); // X B V1 B
1225 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1226 gen_op('&'); // X B V1 B1
1227 gen_op('|'); // X B V2
1229 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1230 vstore(), vpop(); // X B
1231 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1232 } while (bit_size);
1233 vpop(), vpop();
1236 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1238 int t;
1239 if (0 == sv->type.ref)
1240 return 0;
1241 t = sv->type.ref->auxtype;
1242 if (t != -1 && t != VT_STRUCT) {
1243 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1244 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1246 return t;
1249 /* store vtop a register belonging to class 'rc'. lvalues are
1250 converted to values. Cannot be used if cannot be converted to
1251 register value (such as structures). */
1252 ST_FUNC int gv(int rc)
1254 int r, bit_pos, bit_size, size, align, rc2;
1256 /* NOTE: get_reg can modify vstack[] */
1257 if (vtop->type.t & VT_BITFIELD) {
1258 CType type;
1260 bit_pos = BIT_POS(vtop->type.t);
1261 bit_size = BIT_SIZE(vtop->type.t);
1262 /* remove bit field info to avoid loops */
1263 vtop->type.t &= ~VT_STRUCT_MASK;
1265 type.ref = NULL;
1266 type.t = vtop->type.t & VT_UNSIGNED;
1267 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1268 type.t |= VT_UNSIGNED;
1270 r = adjust_bf(vtop, bit_pos, bit_size);
1272 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1273 type.t |= VT_LLONG;
1274 else
1275 type.t |= VT_INT;
1277 if (r == VT_STRUCT) {
1278 load_packed_bf(&type, bit_pos, bit_size);
1279 } else {
1280 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1281 /* cast to int to propagate signedness in following ops */
1282 gen_cast(&type);
1283 /* generate shifts */
1284 vpushi(bits - (bit_pos + bit_size));
1285 gen_op(TOK_SHL);
1286 vpushi(bits - bit_size);
1287 /* NOTE: transformed to SHR if unsigned */
1288 gen_op(TOK_SAR);
1290 r = gv(rc);
1291 } else {
1292 if (is_float(vtop->type.t) &&
1293 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1294 unsigned long offset;
1295 /* CPUs usually cannot use float constants, so we store them
1296 generically in data segment */
1297 size = type_size(&vtop->type, &align);
1298 if (NODATA_WANTED)
1299 size = 0, align = 1;
1300 offset = section_add(data_section, size, align);
1301 vpush_ref(&vtop->type, data_section, offset, size);
1302 vswap();
1303 init_putv(&vtop->type, data_section, offset);
1304 vtop->r |= VT_LVAL;
1306 #ifdef CONFIG_TCC_BCHECK
1307 if (vtop->r & VT_MUSTBOUND)
1308 gbound();
1309 #endif
1311 r = vtop->r & VT_VALMASK;
1312 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1313 #ifndef TCC_TARGET_ARM64
1314 if (rc == RC_IRET)
1315 rc2 = RC_LRET;
1316 #ifdef TCC_TARGET_X86_64
1317 else if (rc == RC_FRET)
1318 rc2 = RC_QRET;
1319 #endif
1320 #endif
1321 /* need to reload if:
1322 - constant
1323 - lvalue (need to dereference pointer)
1324 - already a register, but not in the right class */
1325 if (r >= VT_CONST
1326 || (vtop->r & VT_LVAL)
1327 || !(reg_classes[r] & rc)
1328 #if PTR_SIZE == 8
1329 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1330 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1331 #else
1332 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1333 #endif
1336 r = get_reg(rc);
1337 #if PTR_SIZE == 8
1338 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1339 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1340 #else
1341 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1342 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1343 unsigned long long ll;
1344 #endif
1345 int r2, original_type;
1346 original_type = vtop->type.t;
1347 /* two register type load : expand to two words
1348 temporarily */
1349 #if PTR_SIZE == 4
1350 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1351 /* load constant */
1352 ll = vtop->c.i;
1353 vtop->c.i = ll; /* first word */
1354 load(r, vtop);
1355 vtop->r = r; /* save register value */
1356 vpushi(ll >> 32); /* second word */
1357 } else
1358 #endif
1359 if (vtop->r & VT_LVAL) {
1360 /* We do not want to modifier the long long
1361 pointer here, so the safest (and less
1362 efficient) is to save all the other registers
1363 in the stack. XXX: totally inefficient. */
1364 #if 0
1365 save_regs(1);
1366 #else
1367 /* lvalue_save: save only if used further down the stack */
1368 save_reg_upstack(vtop->r, 1);
1369 #endif
1370 /* load from memory */
1371 vtop->type.t = load_type;
1372 load(r, vtop);
1373 vdup();
1374 vtop[-1].r = r; /* save register value */
1375 /* increment pointer to get second word */
1376 vtop->type.t = addr_type;
1377 gaddrof();
1378 vpushi(load_size);
1379 gen_op('+');
1380 vtop->r |= VT_LVAL;
1381 vtop->type.t = load_type;
1382 } else {
1383 /* move registers */
1384 load(r, vtop);
1385 vdup();
1386 vtop[-1].r = r; /* save register value */
1387 vtop->r = vtop[-1].r2;
1389 /* Allocate second register. Here we rely on the fact that
1390 get_reg() tries first to free r2 of an SValue. */
1391 r2 = get_reg(rc2);
1392 load(r2, vtop);
1393 vpop();
1394 /* write second register */
1395 vtop->r2 = r2;
1396 vtop->type.t = original_type;
1397 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1398 int t1, t;
1399 /* lvalue of scalar type : need to use lvalue type
1400 because of possible cast */
1401 t = vtop->type.t;
1402 t1 = t;
1403 /* compute memory access type */
1404 if (vtop->r & VT_LVAL_BYTE)
1405 t = VT_BYTE;
1406 else if (vtop->r & VT_LVAL_SHORT)
1407 t = VT_SHORT;
1408 if (vtop->r & VT_LVAL_UNSIGNED)
1409 t |= VT_UNSIGNED;
1410 vtop->type.t = t;
1411 load(r, vtop);
1412 /* restore wanted type */
1413 vtop->type.t = t1;
1414 } else {
1415 /* one register type load */
1416 load(r, vtop);
1419 vtop->r = r;
1420 #ifdef TCC_TARGET_C67
1421 /* uses register pairs for doubles */
1422 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1423 vtop->r2 = r+1;
1424 #endif
1426 return r;
1429 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1430 ST_FUNC void gv2(int rc1, int rc2)
1432 int v;
1434 /* generate more generic register first. But VT_JMP or VT_CMP
1435 values must be generated first in all cases to avoid possible
1436 reload errors */
1437 v = vtop[0].r & VT_VALMASK;
1438 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1439 vswap();
1440 gv(rc1);
1441 vswap();
1442 gv(rc2);
1443 /* test if reload is needed for first register */
1444 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1445 vswap();
1446 gv(rc1);
1447 vswap();
1449 } else {
1450 gv(rc2);
1451 vswap();
1452 gv(rc1);
1453 vswap();
1454 /* test if reload is needed for first register */
1455 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1456 gv(rc2);
1461 #ifndef TCC_TARGET_ARM64
1462 /* wrapper around RC_FRET to return a register by type */
1463 static int rc_fret(int t)
1465 #ifdef TCC_TARGET_X86_64
1466 if (t == VT_LDOUBLE) {
1467 return RC_ST0;
1469 #endif
1470 return RC_FRET;
1472 #endif
1474 /* wrapper around REG_FRET to return a register by type */
1475 static int reg_fret(int t)
1477 #ifdef TCC_TARGET_X86_64
1478 if (t == VT_LDOUBLE) {
1479 return TREG_ST0;
1481 #endif
1482 return REG_FRET;
1485 #if PTR_SIZE == 4
1486 /* expand 64bit on stack in two ints */
1487 static void lexpand(void)
1489 int u, v;
1490 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1491 v = vtop->r & (VT_VALMASK | VT_LVAL);
1492 if (v == VT_CONST) {
1493 vdup();
1494 vtop[0].c.i >>= 32;
1495 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1496 vdup();
1497 vtop[0].c.i += 4;
1498 } else {
1499 gv(RC_INT);
1500 vdup();
1501 vtop[0].r = vtop[-1].r2;
1502 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1504 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1506 #endif
1508 #ifdef TCC_TARGET_ARM
1509 /* expand long long on stack */
1510 ST_FUNC void lexpand_nr(void)
1512 int u,v;
1514 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1515 vdup();
1516 vtop->r2 = VT_CONST;
1517 vtop->type.t = VT_INT | u;
1518 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1519 if (v == VT_CONST) {
1520 vtop[-1].c.i = vtop->c.i;
1521 vtop->c.i = vtop->c.i >> 32;
1522 vtop->r = VT_CONST;
1523 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1524 vtop->c.i += 4;
1525 vtop->r = vtop[-1].r;
1526 } else if (v > VT_CONST) {
1527 vtop--;
1528 lexpand();
1529 } else
1530 vtop->r = vtop[-1].r2;
1531 vtop[-1].r2 = VT_CONST;
1532 vtop[-1].type.t = VT_INT | u;
1534 #endif
1536 #if PTR_SIZE == 4
1537 /* build a long long from two ints */
1538 static void lbuild(int t)
1540 gv2(RC_INT, RC_INT);
1541 vtop[-1].r2 = vtop[0].r;
1542 vtop[-1].type.t = t;
1543 vpop();
1545 #endif
1547 /* convert stack entry to register and duplicate its value in another
1548 register */
1549 static void gv_dup(void)
1551 int rc, t, r, r1;
1552 SValue sv;
1554 t = vtop->type.t;
1555 #if PTR_SIZE == 4
1556 if ((t & VT_BTYPE) == VT_LLONG) {
1557 if (t & VT_BITFIELD) {
1558 gv(RC_INT);
1559 t = vtop->type.t;
1561 lexpand();
1562 gv_dup();
1563 vswap();
1564 vrotb(3);
1565 gv_dup();
1566 vrotb(4);
1567 /* stack: H L L1 H1 */
1568 lbuild(t);
1569 vrotb(3);
1570 vrotb(3);
1571 vswap();
1572 lbuild(t);
1573 vswap();
1574 } else
1575 #endif
1577 /* duplicate value */
1578 rc = RC_INT;
1579 sv.type.t = VT_INT;
1580 if (is_float(t)) {
1581 rc = RC_FLOAT;
1582 #ifdef TCC_TARGET_X86_64
1583 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1584 rc = RC_ST0;
1586 #endif
1587 sv.type.t = t;
1589 r = gv(rc);
1590 r1 = get_reg(rc);
1591 sv.r = r;
1592 sv.c.i = 0;
1593 load(r1, &sv); /* move r to r1 */
1594 vdup();
1595 /* duplicates value */
1596 if (r != r1)
1597 vtop->r = r1;
1601 /* Generate value test
1603 * Generate a test for any value (jump, comparison and integers) */
1604 ST_FUNC int gvtst(int inv, int t)
1606 int v = vtop->r & VT_VALMASK;
1607 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1608 vpushi(0);
1609 gen_op(TOK_NE);
1611 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1612 /* constant jmp optimization */
1613 if ((vtop->c.i != 0) != inv)
1614 t = gjmp(t);
1615 vtop--;
1616 return t;
1618 return gtst(inv, t);
1621 #if PTR_SIZE == 4
1622 /* generate CPU independent (unsigned) long long operations */
1623 static void gen_opl(int op)
1625 int t, a, b, op1, c, i;
1626 int func;
1627 unsigned short reg_iret = REG_IRET;
1628 unsigned short reg_lret = REG_LRET;
1629 SValue tmp;
1631 switch(op) {
1632 case '/':
1633 case TOK_PDIV:
1634 func = TOK___divdi3;
1635 goto gen_func;
1636 case TOK_UDIV:
1637 func = TOK___udivdi3;
1638 goto gen_func;
1639 case '%':
1640 func = TOK___moddi3;
1641 goto gen_mod_func;
1642 case TOK_UMOD:
1643 func = TOK___umoddi3;
1644 gen_mod_func:
1645 #ifdef TCC_ARM_EABI
1646 reg_iret = TREG_R2;
1647 reg_lret = TREG_R3;
1648 #endif
1649 gen_func:
1650 /* call generic long long function */
1651 vpush_global_sym(&func_old_type, func);
1652 vrott(3);
1653 gfunc_call(2);
1654 vpushi(0);
1655 vtop->r = reg_iret;
1656 vtop->r2 = reg_lret;
1657 break;
1658 case '^':
1659 case '&':
1660 case '|':
1661 case '*':
1662 case '+':
1663 case '-':
1664 //pv("gen_opl A",0,2);
1665 t = vtop->type.t;
1666 vswap();
1667 lexpand();
1668 vrotb(3);
1669 lexpand();
1670 /* stack: L1 H1 L2 H2 */
1671 tmp = vtop[0];
1672 vtop[0] = vtop[-3];
1673 vtop[-3] = tmp;
1674 tmp = vtop[-2];
1675 vtop[-2] = vtop[-3];
1676 vtop[-3] = tmp;
1677 vswap();
1678 /* stack: H1 H2 L1 L2 */
1679 //pv("gen_opl B",0,4);
1680 if (op == '*') {
1681 vpushv(vtop - 1);
1682 vpushv(vtop - 1);
1683 gen_op(TOK_UMULL);
1684 lexpand();
1685 /* stack: H1 H2 L1 L2 ML MH */
1686 for(i=0;i<4;i++)
1687 vrotb(6);
1688 /* stack: ML MH H1 H2 L1 L2 */
1689 tmp = vtop[0];
1690 vtop[0] = vtop[-2];
1691 vtop[-2] = tmp;
1692 /* stack: ML MH H1 L2 H2 L1 */
1693 gen_op('*');
1694 vrotb(3);
1695 vrotb(3);
1696 gen_op('*');
1697 /* stack: ML MH M1 M2 */
1698 gen_op('+');
1699 gen_op('+');
1700 } else if (op == '+' || op == '-') {
1701 /* XXX: add non carry method too (for MIPS or alpha) */
1702 if (op == '+')
1703 op1 = TOK_ADDC1;
1704 else
1705 op1 = TOK_SUBC1;
1706 gen_op(op1);
1707 /* stack: H1 H2 (L1 op L2) */
1708 vrotb(3);
1709 vrotb(3);
1710 gen_op(op1 + 1); /* TOK_xxxC2 */
1711 } else {
1712 gen_op(op);
1713 /* stack: H1 H2 (L1 op L2) */
1714 vrotb(3);
1715 vrotb(3);
1716 /* stack: (L1 op L2) H1 H2 */
1717 gen_op(op);
1718 /* stack: (L1 op L2) (H1 op H2) */
1720 /* stack: L H */
1721 lbuild(t);
1722 break;
1723 case TOK_SAR:
1724 case TOK_SHR:
1725 case TOK_SHL:
1726 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1727 t = vtop[-1].type.t;
1728 vswap();
1729 lexpand();
1730 vrotb(3);
1731 /* stack: L H shift */
1732 c = (int)vtop->c.i;
1733 /* constant: simpler */
1734 /* NOTE: all comments are for SHL. the other cases are
1735 done by swapping words */
1736 vpop();
1737 if (op != TOK_SHL)
1738 vswap();
1739 if (c >= 32) {
1740 /* stack: L H */
1741 vpop();
1742 if (c > 32) {
1743 vpushi(c - 32);
1744 gen_op(op);
1746 if (op != TOK_SAR) {
1747 vpushi(0);
1748 } else {
1749 gv_dup();
1750 vpushi(31);
1751 gen_op(TOK_SAR);
1753 vswap();
1754 } else {
1755 vswap();
1756 gv_dup();
1757 /* stack: H L L */
1758 vpushi(c);
1759 gen_op(op);
1760 vswap();
1761 vpushi(32 - c);
1762 if (op == TOK_SHL)
1763 gen_op(TOK_SHR);
1764 else
1765 gen_op(TOK_SHL);
1766 vrotb(3);
1767 /* stack: L L H */
1768 vpushi(c);
1769 if (op == TOK_SHL)
1770 gen_op(TOK_SHL);
1771 else
1772 gen_op(TOK_SHR);
1773 gen_op('|');
1775 if (op != TOK_SHL)
1776 vswap();
1777 lbuild(t);
1778 } else {
1779 /* XXX: should provide a faster fallback on x86 ? */
1780 switch(op) {
1781 case TOK_SAR:
1782 func = TOK___ashrdi3;
1783 goto gen_func;
1784 case TOK_SHR:
1785 func = TOK___lshrdi3;
1786 goto gen_func;
1787 case TOK_SHL:
1788 func = TOK___ashldi3;
1789 goto gen_func;
1792 break;
1793 default:
1794 /* compare operations */
1795 t = vtop->type.t;
1796 vswap();
1797 lexpand();
1798 vrotb(3);
1799 lexpand();
1800 /* stack: L1 H1 L2 H2 */
1801 tmp = vtop[-1];
1802 vtop[-1] = vtop[-2];
1803 vtop[-2] = tmp;
1804 /* stack: L1 L2 H1 H2 */
1805 /* compare high */
1806 op1 = op;
1807 /* when values are equal, we need to compare low words. since
1808 the jump is inverted, we invert the test too. */
1809 if (op1 == TOK_LT)
1810 op1 = TOK_LE;
1811 else if (op1 == TOK_GT)
1812 op1 = TOK_GE;
1813 else if (op1 == TOK_ULT)
1814 op1 = TOK_ULE;
1815 else if (op1 == TOK_UGT)
1816 op1 = TOK_UGE;
1817 a = 0;
1818 b = 0;
1819 gen_op(op1);
1820 if (op == TOK_NE) {
1821 b = gvtst(0, 0);
1822 } else {
1823 a = gvtst(1, 0);
1824 if (op != TOK_EQ) {
1825 /* generate non equal test */
1826 vpushi(TOK_NE);
1827 vtop->r = VT_CMP;
1828 b = gvtst(0, 0);
1831 /* compare low. Always unsigned */
1832 op1 = op;
1833 if (op1 == TOK_LT)
1834 op1 = TOK_ULT;
1835 else if (op1 == TOK_LE)
1836 op1 = TOK_ULE;
1837 else if (op1 == TOK_GT)
1838 op1 = TOK_UGT;
1839 else if (op1 == TOK_GE)
1840 op1 = TOK_UGE;
1841 gen_op(op1);
1842 a = gvtst(1, a);
1843 gsym(b);
1844 vseti(VT_JMPI, a);
1845 break;
1848 #endif
1850 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1852 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1853 return (a ^ b) >> 63 ? -x : x;
1856 static int gen_opic_lt(uint64_t a, uint64_t b)
1858 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1861 /* handle integer constant optimizations and various machine
1862 independent opt */
1863 static void gen_opic(int op)
1865 SValue *v1 = vtop - 1;
1866 SValue *v2 = vtop;
1867 int t1 = v1->type.t & VT_BTYPE;
1868 int t2 = v2->type.t & VT_BTYPE;
1869 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1870 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1871 uint64_t l1 = c1 ? v1->c.i : 0;
1872 uint64_t l2 = c2 ? v2->c.i : 0;
1873 int shm = (t1 == VT_LLONG) ? 63 : 31;
1875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1876 l1 = ((uint32_t)l1 |
1877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1878 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1879 l2 = ((uint32_t)l2 |
1880 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1882 if (c1 && c2) {
1883 switch(op) {
1884 case '+': l1 += l2; break;
1885 case '-': l1 -= l2; break;
1886 case '&': l1 &= l2; break;
1887 case '^': l1 ^= l2; break;
1888 case '|': l1 |= l2; break;
1889 case '*': l1 *= l2; break;
1891 case TOK_PDIV:
1892 case '/':
1893 case '%':
1894 case TOK_UDIV:
1895 case TOK_UMOD:
1896 /* if division by zero, generate explicit division */
1897 if (l2 == 0) {
1898 if (const_wanted)
1899 tcc_error("division by zero in constant");
1900 goto general_case;
1902 switch(op) {
1903 default: l1 = gen_opic_sdiv(l1, l2); break;
1904 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1905 case TOK_UDIV: l1 = l1 / l2; break;
1906 case TOK_UMOD: l1 = l1 % l2; break;
1908 break;
1909 case TOK_SHL: l1 <<= (l2 & shm); break;
1910 case TOK_SHR: l1 >>= (l2 & shm); break;
1911 case TOK_SAR:
1912 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1913 break;
1914 /* tests */
1915 case TOK_ULT: l1 = l1 < l2; break;
1916 case TOK_UGE: l1 = l1 >= l2; break;
1917 case TOK_EQ: l1 = l1 == l2; break;
1918 case TOK_NE: l1 = l1 != l2; break;
1919 case TOK_ULE: l1 = l1 <= l2; break;
1920 case TOK_UGT: l1 = l1 > l2; break;
1921 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1922 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1923 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1924 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1925 /* logical */
1926 case TOK_LAND: l1 = l1 && l2; break;
1927 case TOK_LOR: l1 = l1 || l2; break;
1928 default:
1929 goto general_case;
1931 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1932 l1 = ((uint32_t)l1 |
1933 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1934 v1->c.i = l1;
1935 vtop--;
1936 } else {
1937 /* if commutative ops, put c2 as constant */
1938 if (c1 && (op == '+' || op == '&' || op == '^' ||
1939 op == '|' || op == '*')) {
1940 vswap();
1941 c2 = c1; //c = c1, c1 = c2, c2 = c;
1942 l2 = l1; //l = l1, l1 = l2, l2 = l;
1944 if (!const_wanted &&
1945 c1 && ((l1 == 0 &&
1946 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1947 (l1 == -1 && op == TOK_SAR))) {
1948 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1949 vtop--;
1950 } else if (!const_wanted &&
1951 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1952 (op == '|' &&
1953 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1954 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1955 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1956 if (l2 == 1)
1957 vtop->c.i = 0;
1958 vswap();
1959 vtop--;
1960 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1961 op == TOK_PDIV) &&
1962 l2 == 1) ||
1963 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1964 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1965 l2 == 0) ||
1966 (op == '&' &&
1967 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1968 /* filter out NOP operations like x*1, x-0, x&-1... */
1969 vtop--;
1970 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1971 /* try to use shifts instead of muls or divs */
1972 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1973 int n = -1;
1974 while (l2) {
1975 l2 >>= 1;
1976 n++;
1978 vtop->c.i = n;
1979 if (op == '*')
1980 op = TOK_SHL;
1981 else if (op == TOK_PDIV)
1982 op = TOK_SAR;
1983 else
1984 op = TOK_SHR;
1986 goto general_case;
1987 } else if (c2 && (op == '+' || op == '-') &&
1988 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1989 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1990 /* symbol + constant case */
1991 if (op == '-')
1992 l2 = -l2;
1993 l2 += vtop[-1].c.i;
1994 /* The backends can't always deal with addends to symbols
1995 larger than +-1<<31. Don't construct such. */
1996 if ((int)l2 != l2)
1997 goto general_case;
1998 vtop--;
1999 vtop->c.i = l2;
2000 } else {
2001 general_case:
2002 /* call low level op generator */
2003 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2004 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2005 gen_opl(op);
2006 else
2007 gen_opi(op);
2012 /* generate a floating point operation with constant propagation */
2013 static void gen_opif(int op)
2015 int c1, c2;
2016 SValue *v1, *v2;
2017 #if defined _MSC_VER && defined _AMD64_
2018 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2019 volatile
2020 #endif
2021 long double f1, f2;
2023 v1 = vtop - 1;
2024 v2 = vtop;
2025 /* currently, we cannot do computations with forward symbols */
2026 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2027 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2028 if (c1 && c2) {
2029 if (v1->type.t == VT_FLOAT) {
2030 f1 = v1->c.f;
2031 f2 = v2->c.f;
2032 } else if (v1->type.t == VT_DOUBLE) {
2033 f1 = v1->c.d;
2034 f2 = v2->c.d;
2035 } else {
2036 f1 = v1->c.ld;
2037 f2 = v2->c.ld;
2040 /* NOTE: we only do constant propagation if finite number (not
2041 NaN or infinity) (ANSI spec) */
2042 if (!ieee_finite(f1) || !ieee_finite(f2))
2043 goto general_case;
2045 switch(op) {
2046 case '+': f1 += f2; break;
2047 case '-': f1 -= f2; break;
2048 case '*': f1 *= f2; break;
2049 case '/':
2050 if (f2 == 0.0) {
2051 /* If not in initializer we need to potentially generate
2052 FP exceptions at runtime, otherwise we want to fold. */
2053 if (!const_wanted)
2054 goto general_case;
2056 f1 /= f2;
2057 break;
2058 /* XXX: also handles tests ? */
2059 default:
2060 goto general_case;
2062 /* XXX: overflow test ? */
2063 if (v1->type.t == VT_FLOAT) {
2064 v1->c.f = f1;
2065 } else if (v1->type.t == VT_DOUBLE) {
2066 v1->c.d = f1;
2067 } else {
2068 v1->c.ld = f1;
2070 vtop--;
2071 } else {
2072 general_case:
2073 gen_opf(op);
2077 static int pointed_size(CType *type)
2079 int align;
2080 return type_size(pointed_type(type), &align);
2083 static void vla_runtime_pointed_size(CType *type)
2085 int align;
2086 vla_runtime_type_size(pointed_type(type), &align);
2089 static inline int is_null_pointer(SValue *p)
2091 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2092 return 0;
2093 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2094 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2095 ((p->type.t & VT_BTYPE) == VT_PTR &&
2096 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2099 static inline int is_integer_btype(int bt)
2101 return (bt == VT_BYTE || bt == VT_SHORT ||
2102 bt == VT_INT || bt == VT_LLONG);
2105 /* check types for comparison or subtraction of pointers */
2106 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2108 CType *type1, *type2, tmp_type1, tmp_type2;
2109 int bt1, bt2;
2111 /* null pointers are accepted for all comparisons as gcc */
2112 if (is_null_pointer(p1) || is_null_pointer(p2))
2113 return;
2114 type1 = &p1->type;
2115 type2 = &p2->type;
2116 bt1 = type1->t & VT_BTYPE;
2117 bt2 = type2->t & VT_BTYPE;
2118 /* accept comparison between pointer and integer with a warning */
2119 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2120 if (op != TOK_LOR && op != TOK_LAND )
2121 tcc_warning("comparison between pointer and integer");
2122 return;
2125 /* both must be pointers or implicit function pointers */
2126 if (bt1 == VT_PTR) {
2127 type1 = pointed_type(type1);
2128 } else if (bt1 != VT_FUNC)
2129 goto invalid_operands;
2131 if (bt2 == VT_PTR) {
2132 type2 = pointed_type(type2);
2133 } else if (bt2 != VT_FUNC) {
2134 invalid_operands:
2135 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2137 if ((type1->t & VT_BTYPE) == VT_VOID ||
2138 (type2->t & VT_BTYPE) == VT_VOID)
2139 return;
2140 tmp_type1 = *type1;
2141 tmp_type2 = *type2;
2142 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2143 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2144 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2145 /* gcc-like error if '-' is used */
2146 if (op == '-')
2147 goto invalid_operands;
2148 else
2149 tcc_warning("comparison of distinct pointer types lacks a cast");
2153 /* generic gen_op: handles types problems */
2154 ST_FUNC void gen_op(int op)
2156 int u, t1, t2, bt1, bt2, t;
2157 CType type1;
2159 redo:
2160 t1 = vtop[-1].type.t;
2161 t2 = vtop[0].type.t;
2162 bt1 = t1 & VT_BTYPE;
2163 bt2 = t2 & VT_BTYPE;
2165 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2166 tcc_error("operation on a struct");
2167 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2168 if (bt2 == VT_FUNC) {
2169 mk_pointer(&vtop->type);
2170 gaddrof();
2172 if (bt1 == VT_FUNC) {
2173 vswap();
2174 mk_pointer(&vtop->type);
2175 gaddrof();
2176 vswap();
2178 goto redo;
2179 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2180 /* at least one operand is a pointer */
2181 /* relational op: must be both pointers */
2182 if (op >= TOK_ULT && op <= TOK_LOR) {
2183 check_comparison_pointer_types(vtop - 1, vtop, op);
2184 /* pointers are handled are unsigned */
2185 #if PTR_SIZE == 8
2186 t = VT_LLONG | VT_UNSIGNED;
2187 #else
2188 t = VT_INT | VT_UNSIGNED;
2189 #endif
2190 goto std_op;
2192 /* if both pointers, then it must be the '-' op */
2193 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2194 if (op != '-')
2195 tcc_error("cannot use pointers here");
2196 check_comparison_pointer_types(vtop - 1, vtop, op);
2197 /* XXX: check that types are compatible */
2198 if (vtop[-1].type.t & VT_VLA) {
2199 vla_runtime_pointed_size(&vtop[-1].type);
2200 } else {
2201 vpushi(pointed_size(&vtop[-1].type));
2203 vrott(3);
2204 gen_opic(op);
2205 vtop->type.t = ptrdiff_type.t;
2206 vswap();
2207 gen_op(TOK_PDIV);
2208 } else {
2209 /* exactly one pointer : must be '+' or '-'. */
2210 if (op != '-' && op != '+')
2211 tcc_error("cannot use pointers here");
2212 /* Put pointer as first operand */
2213 if (bt2 == VT_PTR) {
2214 vswap();
2215 t = t1, t1 = t2, t2 = t;
2217 #if PTR_SIZE == 4
2218 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2219 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2220 gen_cast_s(VT_INT);
2221 #endif
2222 type1 = vtop[-1].type;
2223 type1.t &= ~VT_ARRAY;
2224 if (vtop[-1].type.t & VT_VLA)
2225 vla_runtime_pointed_size(&vtop[-1].type);
2226 else {
2227 u = pointed_size(&vtop[-1].type);
2228 if (u < 0)
2229 tcc_error("unknown array element size");
2230 #if PTR_SIZE == 8
2231 vpushll(u);
2232 #else
2233 /* XXX: cast to int ? (long long case) */
2234 vpushi(u);
2235 #endif
2237 gen_op('*');
2238 #if 0
2239 /* #ifdef CONFIG_TCC_BCHECK
2240 The main reason to removing this code:
2241 #include <stdio.h>
2242 int main ()
2244 int v[10];
2245 int i = 10;
2246 int j = 9;
2247 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2248 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2250 When this code is on. then the output looks like
2251 v+i-j = 0xfffffffe
2252 v+(i-j) = 0xbff84000
2254 /* if evaluating constant expression, no code should be
2255 generated, so no bound check */
2256 if (tcc_state->do_bounds_check && !const_wanted) {
2257 /* if bounded pointers, we generate a special code to
2258 test bounds */
2259 if (op == '-') {
2260 vpushi(0);
2261 vswap();
2262 gen_op('-');
2264 gen_bounded_ptr_add();
2265 } else
2266 #endif
2268 gen_opic(op);
2270 /* put again type if gen_opic() swaped operands */
2271 vtop->type = type1;
2273 } else if (is_float(bt1) || is_float(bt2)) {
2274 /* compute bigger type and do implicit casts */
2275 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2276 t = VT_LDOUBLE;
2277 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2278 t = VT_DOUBLE;
2279 } else {
2280 t = VT_FLOAT;
2282 /* floats can only be used for a few operations */
2283 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2284 (op < TOK_ULT || op > TOK_GT))
2285 tcc_error("invalid operands for binary operation");
2286 goto std_op;
2287 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2288 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2289 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2290 t |= VT_UNSIGNED;
2291 t |= (VT_LONG & t1);
2292 goto std_op;
2293 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2294 /* cast to biggest op */
2295 t = VT_LLONG | VT_LONG;
2296 if (bt1 == VT_LLONG)
2297 t &= t1;
2298 if (bt2 == VT_LLONG)
2299 t &= t2;
2300 /* convert to unsigned if it does not fit in a long long */
2301 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2302 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2303 t |= VT_UNSIGNED;
2304 goto std_op;
2305 } else {
2306 /* integer operations */
2307 t = VT_INT | (VT_LONG & (t1 | t2));
2308 /* convert to unsigned if it does not fit in an integer */
2309 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2310 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2311 t |= VT_UNSIGNED;
2312 std_op:
2313 /* XXX: currently, some unsigned operations are explicit, so
2314 we modify them here */
2315 if (t & VT_UNSIGNED) {
2316 if (op == TOK_SAR)
2317 op = TOK_SHR;
2318 else if (op == '/')
2319 op = TOK_UDIV;
2320 else if (op == '%')
2321 op = TOK_UMOD;
2322 else if (op == TOK_LT)
2323 op = TOK_ULT;
2324 else if (op == TOK_GT)
2325 op = TOK_UGT;
2326 else if (op == TOK_LE)
2327 op = TOK_ULE;
2328 else if (op == TOK_GE)
2329 op = TOK_UGE;
2331 vswap();
2332 type1.t = t;
2333 type1.ref = NULL;
2334 gen_cast(&type1);
2335 vswap();
2336 /* special case for shifts and long long: we keep the shift as
2337 an integer */
2338 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2339 type1.t = VT_INT;
2340 gen_cast(&type1);
2341 if (is_float(t))
2342 gen_opif(op);
2343 else
2344 gen_opic(op);
2345 if (op >= TOK_ULT && op <= TOK_GT) {
2346 /* relational op: the result is an int */
2347 vtop->type.t = VT_INT;
2348 } else {
2349 vtop->type.t = t;
2352 // Make sure that we have converted to an rvalue:
2353 if (vtop->r & VT_LVAL)
2354 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2357 #ifndef TCC_TARGET_ARM
2358 /* generic itof for unsigned long long case */
2359 static void gen_cvt_itof1(int t)
2361 #ifdef TCC_TARGET_ARM64
2362 gen_cvt_itof(t);
2363 #else
2364 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2365 (VT_LLONG | VT_UNSIGNED)) {
2367 if (t == VT_FLOAT)
2368 vpush_global_sym(&func_old_type, TOK___floatundisf);
2369 #if LDOUBLE_SIZE != 8
2370 else if (t == VT_LDOUBLE)
2371 vpush_global_sym(&func_old_type, TOK___floatundixf);
2372 #endif
2373 else
2374 vpush_global_sym(&func_old_type, TOK___floatundidf);
2375 vrott(2);
2376 gfunc_call(1);
2377 vpushi(0);
2378 vtop->r = reg_fret(t);
2379 } else {
2380 gen_cvt_itof(t);
2382 #endif
2384 #endif
2386 /* generic ftoi for unsigned long long case */
2387 static void gen_cvt_ftoi1(int t)
2389 #ifdef TCC_TARGET_ARM64
2390 gen_cvt_ftoi(t);
2391 #else
2392 int st;
2394 if (t == (VT_LLONG | VT_UNSIGNED)) {
2395 /* not handled natively */
2396 st = vtop->type.t & VT_BTYPE;
2397 if (st == VT_FLOAT)
2398 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2399 #if LDOUBLE_SIZE != 8
2400 else if (st == VT_LDOUBLE)
2401 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2402 #endif
2403 else
2404 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2405 vrott(2);
2406 gfunc_call(1);
2407 vpushi(0);
2408 vtop->r = REG_IRET;
2409 vtop->r2 = REG_LRET;
2410 } else {
2411 gen_cvt_ftoi(t);
2413 #endif
2416 /* force char or short cast */
2417 static void force_charshort_cast(int t)
2419 int bits, dbt;
2421 /* cannot cast static initializers */
2422 if (STATIC_DATA_WANTED)
2423 return;
2425 dbt = t & VT_BTYPE;
2426 /* XXX: add optimization if lvalue : just change type and offset */
2427 if (dbt == VT_BYTE)
2428 bits = 8;
2429 else
2430 bits = 16;
2431 if (t & VT_UNSIGNED) {
2432 vpushi((1 << bits) - 1);
2433 gen_op('&');
2434 } else {
2435 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2436 bits = 64 - bits;
2437 else
2438 bits = 32 - bits;
2439 vpushi(bits);
2440 gen_op(TOK_SHL);
2441 /* result must be signed or the SAR is converted to an SHL
2442 This was not the case when "t" was a signed short
2443 and the last value on the stack was an unsigned int */
2444 vtop->type.t &= ~VT_UNSIGNED;
2445 vpushi(bits);
2446 gen_op(TOK_SAR);
2450 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2451 static void gen_cast_s(int t)
2453 CType type;
2454 type.t = t;
2455 type.ref = NULL;
2456 gen_cast(&type);
2459 static void gen_cast(CType *type)
2461 int sbt, dbt, sf, df, c, p;
2463 /* special delayed cast for char/short */
2464 /* XXX: in some cases (multiple cascaded casts), it may still
2465 be incorrect */
2466 if (vtop->r & VT_MUSTCAST) {
2467 vtop->r &= ~VT_MUSTCAST;
2468 force_charshort_cast(vtop->type.t);
2471 /* bitfields first get cast to ints */
2472 if (vtop->type.t & VT_BITFIELD) {
2473 gv(RC_INT);
2476 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2477 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2479 if (sbt != dbt) {
2480 sf = is_float(sbt);
2481 df = is_float(dbt);
2482 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2483 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2484 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2485 c &= dbt != VT_LDOUBLE;
2486 #endif
2487 if (c) {
2488 /* constant case: we can do it now */
2489 /* XXX: in ISOC, cannot do it if error in convert */
2490 if (sbt == VT_FLOAT)
2491 vtop->c.ld = vtop->c.f;
2492 else if (sbt == VT_DOUBLE)
2493 vtop->c.ld = vtop->c.d;
2495 if (df) {
2496 if ((sbt & VT_BTYPE) == VT_LLONG) {
2497 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2498 vtop->c.ld = vtop->c.i;
2499 else
2500 vtop->c.ld = -(long double)-vtop->c.i;
2501 } else if(!sf) {
2502 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2503 vtop->c.ld = (uint32_t)vtop->c.i;
2504 else
2505 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2508 if (dbt == VT_FLOAT)
2509 vtop->c.f = (float)vtop->c.ld;
2510 else if (dbt == VT_DOUBLE)
2511 vtop->c.d = (double)vtop->c.ld;
2512 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2513 vtop->c.i = vtop->c.ld;
2514 } else if (sf && dbt == VT_BOOL) {
2515 vtop->c.i = (vtop->c.ld != 0);
2516 } else {
2517 if(sf)
2518 vtop->c.i = vtop->c.ld;
2519 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2521 else if (sbt & VT_UNSIGNED)
2522 vtop->c.i = (uint32_t)vtop->c.i;
2523 #if PTR_SIZE == 8
2524 else if (sbt == VT_PTR)
2526 #endif
2527 else if (sbt != VT_LLONG)
2528 vtop->c.i = ((uint32_t)vtop->c.i |
2529 -(vtop->c.i & 0x80000000));
2531 if (dbt == (VT_LLONG|VT_UNSIGNED))
2533 else if (dbt == VT_BOOL)
2534 vtop->c.i = (vtop->c.i != 0);
2535 #if PTR_SIZE == 8
2536 else if (dbt == VT_PTR)
2538 #endif
2539 else if (dbt != VT_LLONG) {
2540 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2541 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2542 0xffffffff);
2543 vtop->c.i &= m;
2544 if (!(dbt & VT_UNSIGNED))
2545 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2548 } else if (p && dbt == VT_BOOL) {
2549 vtop->r = VT_CONST;
2550 vtop->c.i = 1;
2551 } else {
2552 /* non constant case: generate code */
2553 if (sf && df) {
2554 /* convert from fp to fp */
2555 gen_cvt_ftof(dbt);
2556 } else if (df) {
2557 /* convert int to fp */
2558 gen_cvt_itof1(dbt);
2559 } else if (sf) {
2560 /* convert fp to int */
2561 if (dbt == VT_BOOL) {
2562 vpushi(0);
2563 gen_op(TOK_NE);
2564 } else {
2565 /* we handle char/short/etc... with generic code */
2566 if (dbt != (VT_INT | VT_UNSIGNED) &&
2567 dbt != (VT_LLONG | VT_UNSIGNED) &&
2568 dbt != VT_LLONG)
2569 dbt = VT_INT;
2570 gen_cvt_ftoi1(dbt);
2571 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2572 /* additional cast for char/short... */
2573 vtop->type.t = dbt;
2574 gen_cast(type);
2577 #if PTR_SIZE == 4
2578 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2579 if ((sbt & VT_BTYPE) != VT_LLONG) {
2580 /* scalar to long long */
2581 /* machine independent conversion */
2582 gv(RC_INT);
2583 /* generate high word */
2584 if (sbt == (VT_INT | VT_UNSIGNED)) {
2585 vpushi(0);
2586 gv(RC_INT);
2587 } else {
2588 if (sbt == VT_PTR) {
2589 /* cast from pointer to int before we apply
2590 shift operation, which pointers don't support*/
2591 gen_cast_s(VT_INT);
2593 gv_dup();
2594 vpushi(31);
2595 gen_op(TOK_SAR);
2597 /* patch second register */
2598 vtop[-1].r2 = vtop->r;
2599 vpop();
2601 #else
2602 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2603 (dbt & VT_BTYPE) == VT_PTR ||
2604 (dbt & VT_BTYPE) == VT_FUNC) {
2605 if ((sbt & VT_BTYPE) != VT_LLONG &&
2606 (sbt & VT_BTYPE) != VT_PTR &&
2607 (sbt & VT_BTYPE) != VT_FUNC) {
2608 /* need to convert from 32bit to 64bit */
2609 gv(RC_INT);
2610 if (sbt != (VT_INT | VT_UNSIGNED)) {
2611 #if defined(TCC_TARGET_ARM64)
2612 gen_cvt_sxtw();
2613 #elif defined(TCC_TARGET_X86_64)
2614 int r = gv(RC_INT);
2615 /* x86_64 specific: movslq */
2616 o(0x6348);
2617 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2618 #else
2619 #error
2620 #endif
2623 #endif
2624 } else if (dbt == VT_BOOL) {
2625 /* scalar to bool */
2626 vpushi(0);
2627 gen_op(TOK_NE);
2628 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2629 (dbt & VT_BTYPE) == VT_SHORT) {
2630 if (sbt == VT_PTR) {
2631 vtop->type.t = VT_INT;
2632 tcc_warning("nonportable conversion from pointer to char/short");
2634 force_charshort_cast(dbt);
2635 } else if ((dbt & VT_BTYPE) == VT_INT) {
2636 /* scalar to int */
2637 if ((sbt & VT_BTYPE) == VT_LLONG) {
2638 #if PTR_SIZE == 4
2639 /* from long long: just take low order word */
2640 lexpand();
2641 vpop();
2642 #else
2643 vpushi(0xffffffff);
2644 vtop->type.t |= VT_UNSIGNED;
2645 gen_op('&');
2646 #endif
2648 /* if lvalue and single word type, nothing to do because
2649 the lvalue already contains the real type size (see
2650 VT_LVAL_xxx constants) */
2653 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2654 /* if we are casting between pointer types,
2655 we must update the VT_LVAL_xxx size */
2656 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2657 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2659 vtop->type = *type;
2662 /* return type size as known at compile time. Put alignment at 'a' */
2663 ST_FUNC int type_size(CType *type, int *a)
2665 Sym *s;
2666 int bt;
2668 bt = type->t & VT_BTYPE;
2669 if (bt == VT_STRUCT) {
2670 /* struct/union */
2671 s = type->ref;
2672 *a = s->r;
2673 return s->c;
2674 } else if (bt == VT_PTR) {
2675 if (type->t & VT_ARRAY) {
2676 int ts;
2678 s = type->ref;
2679 ts = type_size(&s->type, a);
2681 if (ts < 0 && s->c < 0)
2682 ts = -ts;
2684 return ts * s->c;
2685 } else {
2686 *a = PTR_SIZE;
2687 return PTR_SIZE;
2689 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2690 return -1; /* incomplete enum */
2691 } else if (bt == VT_LDOUBLE) {
2692 *a = LDOUBLE_ALIGN;
2693 return LDOUBLE_SIZE;
2694 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2695 #ifdef TCC_TARGET_I386
2696 #ifdef TCC_TARGET_PE
2697 *a = 8;
2698 #else
2699 *a = 4;
2700 #endif
2701 #elif defined(TCC_TARGET_ARM)
2702 #ifdef TCC_ARM_EABI
2703 *a = 8;
2704 #else
2705 *a = 4;
2706 #endif
2707 #else
2708 *a = 8;
2709 #endif
2710 return 8;
2711 } else if (bt == VT_INT || bt == VT_FLOAT) {
2712 *a = 4;
2713 return 4;
2714 } else if (bt == VT_SHORT) {
2715 *a = 2;
2716 return 2;
2717 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2718 *a = 8;
2719 return 16;
2720 } else {
2721 /* char, void, function, _Bool */
2722 *a = 1;
2723 return 1;
2727 /* push type size as known at runtime time on top of value stack. Put
2728 alignment at 'a' */
2729 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2731 if (type->t & VT_VLA) {
2732 type_size(&type->ref->type, a);
2733 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2734 } else {
2735 vpushi(type_size(type, a));
2739 static void vla_sp_restore(void) {
2740 if (vlas_in_scope) {
2741 gen_vla_sp_restore(vla_sp_loc);
2745 static void vla_sp_restore_root(void) {
2746 if (vlas_in_scope) {
2747 gen_vla_sp_restore(vla_sp_root_loc);
2751 /* return the pointed type of t */
2752 static inline CType *pointed_type(CType *type)
2754 return &type->ref->type;
2757 /* modify type so that its it is a pointer to type. */
2758 ST_FUNC void mk_pointer(CType *type)
2760 Sym *s;
2761 s = sym_push(SYM_FIELD, type, 0, -1);
2762 type->t = VT_PTR | (type->t & VT_STORAGE);
2763 type->ref = s;
2766 /* compare function types. OLD functions match any new functions */
2767 static int is_compatible_func(CType *type1, CType *type2)
2769 Sym *s1, *s2;
2771 s1 = type1->ref;
2772 s2 = type2->ref;
2773 if (!is_compatible_types(&s1->type, &s2->type))
2774 return 0;
2775 /* check func_call */
2776 if (s1->f.func_call != s2->f.func_call)
2777 return 0;
2778 /* XXX: not complete */
2779 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2780 return 1;
2781 if (s1->f.func_type != s2->f.func_type)
2782 return 0;
2783 while (s1 != NULL) {
2784 if (s2 == NULL)
2785 return 0;
2786 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2787 return 0;
2788 s1 = s1->next;
2789 s2 = s2->next;
2791 if (s2)
2792 return 0;
2793 return 1;
2796 /* return true if type1 and type2 are the same. If unqualified is
2797 true, qualifiers on the types are ignored.
2799 - enums are not checked as gcc __builtin_types_compatible_p ()
2801 static int compare_types(CType *type1, CType *type2, int unqualified)
2803 int bt1, t1, t2;
2805 t1 = type1->t & VT_TYPE;
2806 t2 = type2->t & VT_TYPE;
2807 if (unqualified) {
2808 /* strip qualifiers before comparing */
2809 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2810 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2813 /* Default Vs explicit signedness only matters for char */
2814 if ((t1 & VT_BTYPE) != VT_BYTE) {
2815 t1 &= ~VT_DEFSIGN;
2816 t2 &= ~VT_DEFSIGN;
2818 /* XXX: bitfields ? */
2819 if (t1 != t2)
2820 return 0;
2821 /* test more complicated cases */
2822 bt1 = t1 & VT_BTYPE;
2823 if (bt1 == VT_PTR) {
2824 type1 = pointed_type(type1);
2825 type2 = pointed_type(type2);
2826 return is_compatible_types(type1, type2);
2827 } else if (bt1 == VT_STRUCT) {
2828 return (type1->ref == type2->ref);
2829 } else if (bt1 == VT_FUNC) {
2830 return is_compatible_func(type1, type2);
2831 } else {
2832 return 1;
2836 /* return true if type1 and type2 are exactly the same (including
2837 qualifiers).
2839 static int is_compatible_types(CType *type1, CType *type2)
2841 return compare_types(type1,type2,0);
2844 /* return true if type1 and type2 are the same (ignoring qualifiers).
2846 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2848 return compare_types(type1,type2,1);
2851 /* print a type. If 'varstr' is not NULL, then the variable is also
2852 printed in the type */
2853 /* XXX: union */
2854 /* XXX: add array and function pointers */
2855 static void type_to_str(char *buf, int buf_size,
2856 CType *type, const char *varstr)
2858 int bt, v, t;
2859 Sym *s, *sa;
2860 char buf1[256];
2861 const char *tstr;
2863 t = type->t;
2864 bt = t & VT_BTYPE;
2865 buf[0] = '\0';
2867 if (t & VT_EXTERN)
2868 pstrcat(buf, buf_size, "extern ");
2869 if (t & VT_STATIC)
2870 pstrcat(buf, buf_size, "static ");
2871 if (t & VT_TYPEDEF)
2872 pstrcat(buf, buf_size, "typedef ");
2873 if (t & VT_INLINE)
2874 pstrcat(buf, buf_size, "inline ");
2875 if (t & VT_VOLATILE)
2876 pstrcat(buf, buf_size, "volatile ");
2877 if (t & VT_CONSTANT)
2878 pstrcat(buf, buf_size, "const ");
2880 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2881 || ((t & VT_UNSIGNED)
2882 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2883 && !IS_ENUM(t)
2885 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2887 buf_size -= strlen(buf);
2888 buf += strlen(buf);
2890 switch(bt) {
2891 case VT_VOID:
2892 tstr = "void";
2893 goto add_tstr;
2894 case VT_BOOL:
2895 tstr = "_Bool";
2896 goto add_tstr;
2897 case VT_BYTE:
2898 tstr = "char";
2899 goto add_tstr;
2900 case VT_SHORT:
2901 tstr = "short";
2902 goto add_tstr;
2903 case VT_INT:
2904 tstr = "int";
2905 goto maybe_long;
2906 case VT_LLONG:
2907 tstr = "long long";
2908 maybe_long:
2909 if (t & VT_LONG)
2910 tstr = "long";
2911 if (!IS_ENUM(t))
2912 goto add_tstr;
2913 tstr = "enum ";
2914 goto tstruct;
2915 case VT_FLOAT:
2916 tstr = "float";
2917 goto add_tstr;
2918 case VT_DOUBLE:
2919 tstr = "double";
2920 goto add_tstr;
2921 case VT_LDOUBLE:
2922 tstr = "long double";
2923 add_tstr:
2924 pstrcat(buf, buf_size, tstr);
2925 break;
2926 case VT_STRUCT:
2927 tstr = "struct ";
2928 if (IS_UNION(t))
2929 tstr = "union ";
2930 tstruct:
2931 pstrcat(buf, buf_size, tstr);
2932 v = type->ref->v & ~SYM_STRUCT;
2933 if (v >= SYM_FIRST_ANOM)
2934 pstrcat(buf, buf_size, "<anonymous>");
2935 else
2936 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2937 break;
2938 case VT_FUNC:
2939 s = type->ref;
2940 buf1[0]=0;
2941 if (varstr && '*' == *varstr) {
2942 pstrcat(buf1, sizeof(buf1), "(");
2943 pstrcat(buf1, sizeof(buf1), varstr);
2944 pstrcat(buf1, sizeof(buf1), ")");
2946 pstrcat(buf1, buf_size, "(");
2947 sa = s->next;
2948 while (sa != NULL) {
2949 char buf2[256];
2950 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2951 pstrcat(buf1, sizeof(buf1), buf2);
2952 sa = sa->next;
2953 if (sa)
2954 pstrcat(buf1, sizeof(buf1), ", ");
2956 if (s->f.func_type == FUNC_ELLIPSIS)
2957 pstrcat(buf1, sizeof(buf1), ", ...");
2958 pstrcat(buf1, sizeof(buf1), ")");
2959 type_to_str(buf, buf_size, &s->type, buf1);
2960 goto no_var;
2961 case VT_PTR:
2962 s = type->ref;
2963 if (t & VT_ARRAY) {
2964 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2965 type_to_str(buf, buf_size, &s->type, buf1);
2966 goto no_var;
2968 pstrcpy(buf1, sizeof(buf1), "*");
2969 if (t & VT_CONSTANT)
2970 pstrcat(buf1, buf_size, "const ");
2971 if (t & VT_VOLATILE)
2972 pstrcat(buf1, buf_size, "volatile ");
2973 if (varstr)
2974 pstrcat(buf1, sizeof(buf1), varstr);
2975 type_to_str(buf, buf_size, &s->type, buf1);
2976 goto no_var;
2978 if (varstr) {
2979 pstrcat(buf, buf_size, " ");
2980 pstrcat(buf, buf_size, varstr);
2982 no_var: ;
2985 /* verify type compatibility to store vtop in 'dt' type, and generate
2986 casts if needed. */
2987 static void gen_assign_cast(CType *dt)
2989 CType *st, *type1, *type2;
2990 char buf1[256], buf2[256];
2991 int dbt, sbt;
2993 st = &vtop->type; /* source type */
2994 dbt = dt->t & VT_BTYPE;
2995 sbt = st->t & VT_BTYPE;
2996 if (sbt == VT_VOID || dbt == VT_VOID) {
2997 if (sbt == VT_VOID && dbt == VT_VOID)
2998 ; /*
2999 It is Ok if both are void
3000 A test program:
3001 void func1() {}
3002 void func2() {
3003 return func1();
3005 gcc accepts this program
3007 else
3008 tcc_error("cannot cast from/to void");
3010 if (dt->t & VT_CONSTANT)
3011 tcc_warning("assignment of read-only location");
3012 switch(dbt) {
3013 case VT_PTR:
3014 /* special cases for pointers */
3015 /* '0' can also be a pointer */
3016 if (is_null_pointer(vtop))
3017 goto type_ok;
3018 /* accept implicit pointer to integer cast with warning */
3019 if (is_integer_btype(sbt)) {
3020 tcc_warning("assignment makes pointer from integer without a cast");
3021 goto type_ok;
3023 type1 = pointed_type(dt);
3024 /* a function is implicitly a function pointer */
3025 if (sbt == VT_FUNC) {
3026 if ((type1->t & VT_BTYPE) != VT_VOID &&
3027 !is_compatible_types(pointed_type(dt), st))
3028 tcc_warning("assignment from incompatible pointer type");
3029 goto type_ok;
3031 if (sbt != VT_PTR)
3032 goto error;
3033 type2 = pointed_type(st);
3034 if ((type1->t & VT_BTYPE) == VT_VOID ||
3035 (type2->t & VT_BTYPE) == VT_VOID) {
3036 /* void * can match anything */
3037 } else {
3038 //printf("types %08x %08x\n", type1->t, type2->t);
3039 /* exact type match, except for qualifiers */
3040 if (!is_compatible_unqualified_types(type1, type2)) {
3041 /* Like GCC don't warn by default for merely changes
3042 in pointer target signedness. Do warn for different
3043 base types, though, in particular for unsigned enums
3044 and signed int targets. */
3045 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
3046 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
3048 tcc_warning("assignment from incompatible pointer type");
3051 /* check const and volatile */
3052 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
3053 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
3054 tcc_warning("assignment discards qualifiers from pointer target type");
3055 break;
3056 case VT_BYTE:
3057 case VT_SHORT:
3058 case VT_INT:
3059 case VT_LLONG:
3060 if (sbt == VT_PTR || sbt == VT_FUNC) {
3061 tcc_warning("assignment makes integer from pointer without a cast");
3062 } else if (sbt == VT_STRUCT) {
3063 goto case_VT_STRUCT;
3065 /* XXX: more tests */
3066 break;
3067 case VT_STRUCT:
3068 case_VT_STRUCT:
3069 if (!is_compatible_unqualified_types(dt, st)) {
3070 error:
3071 type_to_str(buf1, sizeof(buf1), st, NULL);
3072 type_to_str(buf2, sizeof(buf2), dt, NULL);
3073 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3075 break;
3077 type_ok:
3078 gen_cast(dt);
3081 /* store vtop in lvalue pushed on stack */
3082 ST_FUNC void vstore(void)
3084 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3086 ft = vtop[-1].type.t;
3087 sbt = vtop->type.t & VT_BTYPE;
3088 dbt = ft & VT_BTYPE;
3089 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3090 (sbt == VT_INT && dbt == VT_SHORT))
3091 && !(vtop->type.t & VT_BITFIELD)) {
3092 /* optimize char/short casts */
3093 delayed_cast = VT_MUSTCAST;
3094 vtop->type.t = ft & VT_TYPE;
3095 /* XXX: factorize */
3096 if (ft & VT_CONSTANT)
3097 tcc_warning("assignment of read-only location");
3098 } else {
3099 delayed_cast = 0;
3100 if (!(ft & VT_BITFIELD))
3101 gen_assign_cast(&vtop[-1].type);
3104 if (sbt == VT_STRUCT) {
3105 /* if structure, only generate pointer */
3106 /* structure assignment : generate memcpy */
3107 /* XXX: optimize if small size */
3108 size = type_size(&vtop->type, &align);
3110 /* destination */
3111 vswap();
3112 vtop->type.t = VT_PTR;
3113 gaddrof();
3115 /* address of memcpy() */
3116 #ifdef TCC_ARM_EABI
3117 if(!(align & 7))
3118 vpush_global_sym(&func_old_type, TOK_memcpy8);
3119 else if(!(align & 3))
3120 vpush_global_sym(&func_old_type, TOK_memcpy4);
3121 else
3122 #endif
3123 /* Use memmove, rather than memcpy, as dest and src may be same: */
3124 vpush_global_sym(&func_old_type, TOK_memmove);
3126 vswap();
3127 /* source */
3128 vpushv(vtop - 2);
3129 vtop->type.t = VT_PTR;
3130 gaddrof();
3131 /* type size */
3132 vpushi(size);
3133 gfunc_call(3);
3135 /* leave source on stack */
3136 } else if (ft & VT_BITFIELD) {
3137 /* bitfield store handling */
3139 /* save lvalue as expression result (example: s.b = s.a = n;) */
3140 vdup(), vtop[-1] = vtop[-2];
3142 bit_pos = BIT_POS(ft);
3143 bit_size = BIT_SIZE(ft);
3144 /* remove bit field info to avoid loops */
3145 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3147 if ((ft & VT_BTYPE) == VT_BOOL) {
3148 gen_cast(&vtop[-1].type);
3149 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3152 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3153 if (r == VT_STRUCT) {
3154 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3155 store_packed_bf(bit_pos, bit_size);
3156 } else {
3157 unsigned long long mask = (1ULL << bit_size) - 1;
3158 if ((ft & VT_BTYPE) != VT_BOOL) {
3159 /* mask source */
3160 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3161 vpushll(mask);
3162 else
3163 vpushi((unsigned)mask);
3164 gen_op('&');
3166 /* shift source */
3167 vpushi(bit_pos);
3168 gen_op(TOK_SHL);
3169 vswap();
3170 /* duplicate destination */
3171 vdup();
3172 vrott(3);
3173 /* load destination, mask and or with source */
3174 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3175 vpushll(~(mask << bit_pos));
3176 else
3177 vpushi(~((unsigned)mask << bit_pos));
3178 gen_op('&');
3179 gen_op('|');
3180 /* store result */
3181 vstore();
3182 /* ... and discard */
3183 vpop();
3185 } else if (dbt == VT_VOID) {
3186 --vtop;
3187 } else {
3188 #ifdef CONFIG_TCC_BCHECK
3189 /* bound check case */
3190 if (vtop[-1].r & VT_MUSTBOUND) {
3191 vswap();
3192 gbound();
3193 vswap();
3195 #endif
3196 rc = RC_INT;
3197 if (is_float(ft)) {
3198 rc = RC_FLOAT;
3199 #ifdef TCC_TARGET_X86_64
3200 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3201 rc = RC_ST0;
3202 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3203 rc = RC_FRET;
3205 #endif
3207 r = gv(rc); /* generate value */
3208 /* if lvalue was saved on stack, must read it */
3209 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3210 SValue sv;
3211 t = get_reg(RC_INT);
3212 #if PTR_SIZE == 8
3213 sv.type.t = VT_PTR;
3214 #else
3215 sv.type.t = VT_INT;
3216 #endif
3217 sv.r = VT_LOCAL | VT_LVAL;
3218 sv.c.i = vtop[-1].c.i;
3219 load(t, &sv);
3220 vtop[-1].r = t | VT_LVAL;
3222 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3223 #if PTR_SIZE == 8
3224 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3225 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3226 #else
3227 if ((ft & VT_BTYPE) == VT_LLONG) {
3228 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3229 #endif
3230 vtop[-1].type.t = load_type;
3231 store(r, vtop - 1);
3232 vswap();
3233 /* convert to int to increment easily */
3234 vtop->type.t = addr_type;
3235 gaddrof();
3236 vpushi(load_size);
3237 gen_op('+');
3238 vtop->r |= VT_LVAL;
3239 vswap();
3240 vtop[-1].type.t = load_type;
3241 /* XXX: it works because r2 is spilled last ! */
3242 store(vtop->r2, vtop - 1);
3243 } else {
3244 store(r, vtop - 1);
3247 vswap();
3248 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3249 vtop->r |= delayed_cast;
3253 /* post defines POST/PRE add. c is the token ++ or -- */
3254 ST_FUNC void inc(int post, int c)
3256 test_lvalue();
3257 vdup(); /* save lvalue */
3258 if (post) {
3259 gv_dup(); /* duplicate value */
3260 vrotb(3);
3261 vrotb(3);
3263 /* add constant */
3264 vpushi(c - TOK_MID);
3265 gen_op('+');
3266 vstore(); /* store value */
3267 if (post)
3268 vpop(); /* if post op, return saved value */
3271 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3273 /* read the string */
3274 if (tok != TOK_STR)
3275 expect(msg);
3276 cstr_new(astr);
3277 while (tok == TOK_STR) {
3278 /* XXX: add \0 handling too ? */
3279 cstr_cat(astr, tokc.str.data, -1);
3280 next();
3282 cstr_ccat(astr, '\0');
3285 /* If I is >= 1 and a power of two, returns log2(i)+1.
3286 If I is 0 returns 0. */
3287 static int exact_log2p1(int i)
3289 int ret;
3290 if (!i)
3291 return 0;
3292 for (ret = 1; i >= 1 << 8; ret += 8)
3293 i >>= 8;
3294 if (i >= 1 << 4)
3295 ret += 4, i >>= 4;
3296 if (i >= 1 << 2)
3297 ret += 2, i >>= 2;
3298 if (i >= 1 << 1)
3299 ret++;
3300 return ret;
3303 /* Parse __attribute__((...)) GNUC extension. */
3304 static void parse_attribute(AttributeDef *ad)
3306 int t, n;
3307 CString astr;
3309 redo:
3310 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3311 return;
3312 next();
3313 skip('(');
3314 skip('(');
3315 while (tok != ')') {
3316 if (tok < TOK_IDENT)
3317 expect("attribute name");
3318 t = tok;
3319 next();
3320 switch(t) {
3321 case TOK_SECTION1:
3322 case TOK_SECTION2:
3323 skip('(');
3324 parse_mult_str(&astr, "section name");
3325 ad->section = find_section(tcc_state, (char *)astr.data);
3326 skip(')');
3327 cstr_free(&astr);
3328 break;
3329 case TOK_ALIAS1:
3330 case TOK_ALIAS2:
3331 skip('(');
3332 parse_mult_str(&astr, "alias(\"target\")");
3333 ad->alias_target = /* save string as token, for later */
3334 tok_alloc((char*)astr.data, astr.size-1)->tok;
3335 skip(')');
3336 cstr_free(&astr);
3337 break;
3338 case TOK_VISIBILITY1:
3339 case TOK_VISIBILITY2:
3340 skip('(');
3341 parse_mult_str(&astr,
3342 "visibility(\"default|hidden|internal|protected\")");
3343 if (!strcmp (astr.data, "default"))
3344 ad->a.visibility = STV_DEFAULT;
3345 else if (!strcmp (astr.data, "hidden"))
3346 ad->a.visibility = STV_HIDDEN;
3347 else if (!strcmp (astr.data, "internal"))
3348 ad->a.visibility = STV_INTERNAL;
3349 else if (!strcmp (astr.data, "protected"))
3350 ad->a.visibility = STV_PROTECTED;
3351 else
3352 expect("visibility(\"default|hidden|internal|protected\")");
3353 skip(')');
3354 cstr_free(&astr);
3355 break;
3356 case TOK_ALIGNED1:
3357 case TOK_ALIGNED2:
3358 if (tok == '(') {
3359 next();
3360 n = expr_const();
3361 if (n <= 0 || (n & (n - 1)) != 0)
3362 tcc_error("alignment must be a positive power of two");
3363 skip(')');
3364 } else {
3365 n = MAX_ALIGN;
3367 ad->a.aligned = exact_log2p1(n);
3368 if (n != 1 << (ad->a.aligned - 1))
3369 tcc_error("alignment of %d is larger than implemented", n);
3370 break;
3371 case TOK_PACKED1:
3372 case TOK_PACKED2:
3373 ad->a.packed = 1;
3374 break;
3375 case TOK_WEAK1:
3376 case TOK_WEAK2:
3377 ad->a.weak = 1;
3378 break;
3379 case TOK_UNUSED1:
3380 case TOK_UNUSED2:
3381 /* currently, no need to handle it because tcc does not
3382 track unused objects */
3383 break;
3384 case TOK_NORETURN1:
3385 case TOK_NORETURN2:
3386 /* currently, no need to handle it because tcc does not
3387 track unused objects */
3388 break;
3389 case TOK_CDECL1:
3390 case TOK_CDECL2:
3391 case TOK_CDECL3:
3392 ad->f.func_call = FUNC_CDECL;
3393 break;
3394 case TOK_STDCALL1:
3395 case TOK_STDCALL2:
3396 case TOK_STDCALL3:
3397 ad->f.func_call = FUNC_STDCALL;
3398 break;
3399 #ifdef TCC_TARGET_I386
3400 case TOK_REGPARM1:
3401 case TOK_REGPARM2:
3402 skip('(');
3403 n = expr_const();
3404 if (n > 3)
3405 n = 3;
3406 else if (n < 0)
3407 n = 0;
3408 if (n > 0)
3409 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3410 skip(')');
3411 break;
3412 case TOK_FASTCALL1:
3413 case TOK_FASTCALL2:
3414 case TOK_FASTCALL3:
3415 ad->f.func_call = FUNC_FASTCALLW;
3416 break;
3417 #endif
3418 case TOK_MODE:
3419 skip('(');
3420 switch(tok) {
3421 case TOK_MODE_DI:
3422 ad->attr_mode = VT_LLONG + 1;
3423 break;
3424 case TOK_MODE_QI:
3425 ad->attr_mode = VT_BYTE + 1;
3426 break;
3427 case TOK_MODE_HI:
3428 ad->attr_mode = VT_SHORT + 1;
3429 break;
3430 case TOK_MODE_SI:
3431 case TOK_MODE_word:
3432 ad->attr_mode = VT_INT + 1;
3433 break;
3434 default:
3435 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3436 break;
3438 next();
3439 skip(')');
3440 break;
3441 case TOK_DLLEXPORT:
3442 ad->a.dllexport = 1;
3443 break;
3444 case TOK_DLLIMPORT:
3445 ad->a.dllimport = 1;
3446 break;
3447 default:
3448 if (tcc_state->warn_unsupported)
3449 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3450 /* skip parameters */
3451 if (tok == '(') {
3452 int parenthesis = 0;
3453 do {
3454 if (tok == '(')
3455 parenthesis++;
3456 else if (tok == ')')
3457 parenthesis--;
3458 next();
3459 } while (parenthesis && tok != -1);
3461 break;
3463 if (tok != ',')
3464 break;
3465 next();
3467 skip(')');
3468 skip(')');
3469 goto redo;
3472 static Sym * find_field (CType *type, int v)
3474 Sym *s = type->ref;
3475 v |= SYM_FIELD;
3476 while ((s = s->next) != NULL) {
3477 if ((s->v & SYM_FIELD) &&
3478 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3479 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3480 Sym *ret = find_field (&s->type, v);
3481 if (ret)
3482 return ret;
3484 if (s->v == v)
3485 break;
3487 return s;
3490 static void struct_add_offset (Sym *s, int offset)
3492 while ((s = s->next) != NULL) {
3493 if ((s->v & SYM_FIELD) &&
3494 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3495 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3496 struct_add_offset(s->type.ref, offset);
3497 } else
3498 s->c += offset;
3502 static void struct_layout(CType *type, AttributeDef *ad)
3504 int size, align, maxalign, offset, c, bit_pos, bit_size;
3505 int packed, a, bt, prevbt, prev_bit_size;
3506 int pcc = !tcc_state->ms_bitfields;
3507 int pragma_pack = *tcc_state->pack_stack_ptr;
3508 Sym *f;
3510 maxalign = 1;
3511 offset = 0;
3512 c = 0;
3513 bit_pos = 0;
3514 prevbt = VT_STRUCT; /* make it never match */
3515 prev_bit_size = 0;
3517 //#define BF_DEBUG
3519 for (f = type->ref->next; f; f = f->next) {
3520 if (f->type.t & VT_BITFIELD)
3521 bit_size = BIT_SIZE(f->type.t);
3522 else
3523 bit_size = -1;
3524 size = type_size(&f->type, &align);
3525 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3526 packed = 0;
3528 if (pcc && bit_size == 0) {
3529 /* in pcc mode, packing does not affect zero-width bitfields */
3531 } else {
3532 /* in pcc mode, attribute packed overrides if set. */
3533 if (pcc && (f->a.packed || ad->a.packed))
3534 align = packed = 1;
3536 /* pragma pack overrides align if lesser and packs bitfields always */
3537 if (pragma_pack) {
3538 packed = 1;
3539 if (pragma_pack < align)
3540 align = pragma_pack;
3541 /* in pcc mode pragma pack also overrides individual align */
3542 if (pcc && pragma_pack < a)
3543 a = 0;
3546 /* some individual align was specified */
3547 if (a)
3548 align = a;
3550 if (type->ref->type.t == VT_UNION) {
3551 if (pcc && bit_size >= 0)
3552 size = (bit_size + 7) >> 3;
3553 offset = 0;
3554 if (size > c)
3555 c = size;
3557 } else if (bit_size < 0) {
3558 if (pcc)
3559 c += (bit_pos + 7) >> 3;
3560 c = (c + align - 1) & -align;
3561 offset = c;
3562 if (size > 0)
3563 c += size;
3564 bit_pos = 0;
3565 prevbt = VT_STRUCT;
3566 prev_bit_size = 0;
3568 } else {
3569 /* A bit-field. Layout is more complicated. There are two
3570 options: PCC (GCC) compatible and MS compatible */
3571 if (pcc) {
3572 /* In PCC layout a bit-field is placed adjacent to the
3573 preceding bit-fields, except if:
3574 - it has zero-width
3575 - an individual alignment was given
3576 - it would overflow its base type container and
3577 there is no packing */
3578 if (bit_size == 0) {
3579 new_field:
3580 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3581 bit_pos = 0;
3582 } else if (f->a.aligned) {
3583 goto new_field;
3584 } else if (!packed) {
3585 int a8 = align * 8;
3586 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3587 if (ofs > size / align)
3588 goto new_field;
3591 /* in pcc mode, long long bitfields have type int if they fit */
3592 if (size == 8 && bit_size <= 32)
3593 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3595 while (bit_pos >= align * 8)
3596 c += align, bit_pos -= align * 8;
3597 offset = c;
3599 /* In PCC layout named bit-fields influence the alignment
3600 of the containing struct using the base types alignment,
3601 except for packed fields (which here have correct align). */
3602 if (f->v & SYM_FIRST_ANOM
3603 // && bit_size // ??? gcc on ARM/rpi does that
3605 align = 1;
3607 } else {
3608 bt = f->type.t & VT_BTYPE;
3609 if ((bit_pos + bit_size > size * 8)
3610 || (bit_size > 0) == (bt != prevbt)
3612 c = (c + align - 1) & -align;
3613 offset = c;
3614 bit_pos = 0;
3615 /* In MS bitfield mode a bit-field run always uses
3616 at least as many bits as the underlying type.
3617 To start a new run it's also required that this
3618 or the last bit-field had non-zero width. */
3619 if (bit_size || prev_bit_size)
3620 c += size;
3622 /* In MS layout the records alignment is normally
3623 influenced by the field, except for a zero-width
3624 field at the start of a run (but by further zero-width
3625 fields it is again). */
3626 if (bit_size == 0 && prevbt != bt)
3627 align = 1;
3628 prevbt = bt;
3629 prev_bit_size = bit_size;
3632 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3633 | (bit_pos << VT_STRUCT_SHIFT);
3634 bit_pos += bit_size;
3636 if (align > maxalign)
3637 maxalign = align;
3639 #ifdef BF_DEBUG
3640 printf("set field %s offset %-2d size %-2d align %-2d",
3641 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3642 if (f->type.t & VT_BITFIELD) {
3643 printf(" pos %-2d bits %-2d",
3644 BIT_POS(f->type.t),
3645 BIT_SIZE(f->type.t)
3648 printf("\n");
3649 #endif
3651 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3652 Sym *ass;
3653 /* An anonymous struct/union. Adjust member offsets
3654 to reflect the real offset of our containing struct.
3655 Also set the offset of this anon member inside
3656 the outer struct to be zero. Via this it
3657 works when accessing the field offset directly
3658 (from base object), as well as when recursing
3659 members in initializer handling. */
3660 int v2 = f->type.ref->v;
3661 if (!(v2 & SYM_FIELD) &&
3662 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3663 Sym **pps;
3664 /* This happens only with MS extensions. The
3665 anon member has a named struct type, so it
3666 potentially is shared with other references.
3667 We need to unshare members so we can modify
3668 them. */
3669 ass = f->type.ref;
3670 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3671 &f->type.ref->type, 0,
3672 f->type.ref->c);
3673 pps = &f->type.ref->next;
3674 while ((ass = ass->next) != NULL) {
3675 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3676 pps = &((*pps)->next);
3678 *pps = NULL;
3680 struct_add_offset(f->type.ref, offset);
3681 f->c = 0;
3682 } else {
3683 f->c = offset;
3686 f->r = 0;
3689 if (pcc)
3690 c += (bit_pos + 7) >> 3;
3692 /* store size and alignment */
3693 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3694 if (a < maxalign)
3695 a = maxalign;
3696 type->ref->r = a;
3697 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3698 /* can happen if individual align for some member was given. In
3699 this case MSVC ignores maxalign when aligning the size */
3700 a = pragma_pack;
3701 if (a < bt)
3702 a = bt;
3704 c = (c + a - 1) & -a;
3705 type->ref->c = c;
3707 #ifdef BF_DEBUG
3708 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3709 #endif
3711 /* check whether we can access bitfields by their type */
3712 for (f = type->ref->next; f; f = f->next) {
3713 int s, px, cx, c0;
3714 CType t;
3716 if (0 == (f->type.t & VT_BITFIELD))
3717 continue;
3718 f->type.ref = f;
3719 f->auxtype = -1;
3720 bit_size = BIT_SIZE(f->type.t);
3721 if (bit_size == 0)
3722 continue;
3723 bit_pos = BIT_POS(f->type.t);
3724 size = type_size(&f->type, &align);
3725 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3726 continue;
3728 /* try to access the field using a different type */
3729 c0 = -1, s = align = 1;
3730 for (;;) {
3731 px = f->c * 8 + bit_pos;
3732 cx = (px >> 3) & -align;
3733 px = px - (cx << 3);
3734 if (c0 == cx)
3735 break;
3736 s = (px + bit_size + 7) >> 3;
3737 if (s > 4) {
3738 t.t = VT_LLONG;
3739 } else if (s > 2) {
3740 t.t = VT_INT;
3741 } else if (s > 1) {
3742 t.t = VT_SHORT;
3743 } else {
3744 t.t = VT_BYTE;
3746 s = type_size(&t, &align);
3747 c0 = cx;
3750 if (px + bit_size <= s * 8 && cx + s <= c) {
3751 /* update offset and bit position */
3752 f->c = cx;
3753 bit_pos = px;
3754 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3755 | (bit_pos << VT_STRUCT_SHIFT);
3756 if (s != size)
3757 f->auxtype = t.t;
3758 #ifdef BF_DEBUG
3759 printf("FIX field %s offset %-2d size %-2d align %-2d "
3760 "pos %-2d bits %-2d\n",
3761 get_tok_str(f->v & ~SYM_FIELD, NULL),
3762 cx, s, align, px, bit_size);
3763 #endif
3764 } else {
3765 /* fall back to load/store single-byte wise */
3766 f->auxtype = VT_STRUCT;
3767 #ifdef BF_DEBUG
3768 printf("FIX field %s : load byte-wise\n",
3769 get_tok_str(f->v & ~SYM_FIELD, NULL));
3770 #endif
3775 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3776 static void struct_decl(CType *type, int u)
3778 int v, c, size, align, flexible;
3779 int bit_size, bsize, bt;
3780 Sym *s, *ss, **ps;
3781 AttributeDef ad, ad1;
3782 CType type1, btype;
3784 memset(&ad, 0, sizeof ad);
3785 next();
3786 parse_attribute(&ad);
3787 if (tok != '{') {
3788 v = tok;
3789 next();
3790 /* struct already defined ? return it */
3791 if (v < TOK_IDENT)
3792 expect("struct/union/enum name");
3793 s = struct_find(v);
3794 if (s && (s->sym_scope == local_scope || tok != '{')) {
3795 if (u == s->type.t)
3796 goto do_decl;
3797 if (u == VT_ENUM && IS_ENUM(s->type.t))
3798 goto do_decl;
3799 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3801 } else {
3802 v = anon_sym++;
3804 /* Record the original enum/struct/union token. */
3805 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3806 type1.ref = NULL;
3807 /* we put an undefined size for struct/union */
3808 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3809 s->r = 0; /* default alignment is zero as gcc */
3810 do_decl:
3811 type->t = s->type.t;
3812 type->ref = s;
3814 if (tok == '{') {
3815 next();
3816 if (s->c != -1)
3817 tcc_error("struct/union/enum already defined");
3818 /* cannot be empty */
3819 /* non empty enums are not allowed */
3820 ps = &s->next;
3821 if (u == VT_ENUM) {
3822 long long ll = 0, pl = 0, nl = 0;
3823 CType t;
3824 t.ref = s;
3825 /* enum symbols have static storage */
3826 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3827 for(;;) {
3828 v = tok;
3829 if (v < TOK_UIDENT)
3830 expect("identifier");
3831 ss = sym_find(v);
3832 if (ss && !local_stack)
3833 tcc_error("redefinition of enumerator '%s'",
3834 get_tok_str(v, NULL));
3835 next();
3836 if (tok == '=') {
3837 next();
3838 ll = expr_const64();
3840 ss = sym_push(v, &t, VT_CONST, 0);
3841 ss->enum_val = ll;
3842 *ps = ss, ps = &ss->next;
3843 if (ll < nl)
3844 nl = ll;
3845 if (ll > pl)
3846 pl = ll;
3847 if (tok != ',')
3848 break;
3849 next();
3850 ll++;
3851 /* NOTE: we accept a trailing comma */
3852 if (tok == '}')
3853 break;
3855 skip('}');
3856 /* set integral type of the enum */
3857 t.t = VT_INT;
3858 if (nl >= 0) {
3859 if (pl != (unsigned)pl)
3860 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3861 t.t |= VT_UNSIGNED;
3862 } else if (pl != (int)pl || nl != (int)nl)
3863 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3864 s->type.t = type->t = t.t | VT_ENUM;
3865 s->c = 0;
3866 /* set type for enum members */
3867 for (ss = s->next; ss; ss = ss->next) {
3868 ll = ss->enum_val;
3869 if (ll == (int)ll) /* default is int if it fits */
3870 continue;
3871 if (t.t & VT_UNSIGNED) {
3872 ss->type.t |= VT_UNSIGNED;
3873 if (ll == (unsigned)ll)
3874 continue;
3876 ss->type.t = (ss->type.t & ~VT_BTYPE)
3877 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3879 } else {
3880 c = 0;
3881 flexible = 0;
3882 while (tok != '}') {
3883 if (!parse_btype(&btype, &ad1)) {
3884 skip(';');
3885 continue;
3887 while (1) {
3888 if (flexible)
3889 tcc_error("flexible array member '%s' not at the end of struct",
3890 get_tok_str(v, NULL));
3891 bit_size = -1;
3892 v = 0;
3893 type1 = btype;
3894 if (tok != ':') {
3895 if (tok != ';')
3896 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3897 if (v == 0) {
3898 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3899 expect("identifier");
3900 else {
3901 int v = btype.ref->v;
3902 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3903 if (tcc_state->ms_extensions == 0)
3904 expect("identifier");
3908 if (type_size(&type1, &align) < 0) {
3909 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3910 flexible = 1;
3911 else
3912 tcc_error("field '%s' has incomplete type",
3913 get_tok_str(v, NULL));
3915 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3916 (type1.t & VT_STORAGE))
3917 tcc_error("invalid type for '%s'",
3918 get_tok_str(v, NULL));
3920 if (tok == ':') {
3921 next();
3922 bit_size = expr_const();
3923 /* XXX: handle v = 0 case for messages */
3924 if (bit_size < 0)
3925 tcc_error("negative width in bit-field '%s'",
3926 get_tok_str(v, NULL));
3927 if (v && bit_size == 0)
3928 tcc_error("zero width for bit-field '%s'",
3929 get_tok_str(v, NULL));
3930 parse_attribute(&ad1);
3932 size = type_size(&type1, &align);
3933 if (bit_size >= 0) {
3934 bt = type1.t & VT_BTYPE;
3935 if (bt != VT_INT &&
3936 bt != VT_BYTE &&
3937 bt != VT_SHORT &&
3938 bt != VT_BOOL &&
3939 bt != VT_LLONG)
3940 tcc_error("bitfields must have scalar type");
3941 bsize = size * 8;
3942 if (bit_size > bsize) {
3943 tcc_error("width of '%s' exceeds its type",
3944 get_tok_str(v, NULL));
3945 } else if (bit_size == bsize
3946 && !ad.a.packed && !ad1.a.packed) {
3947 /* no need for bit fields */
3949 } else if (bit_size == 64) {
3950 tcc_error("field width 64 not implemented");
3951 } else {
3952 type1.t = (type1.t & ~VT_STRUCT_MASK)
3953 | VT_BITFIELD
3954 | (bit_size << (VT_STRUCT_SHIFT + 6));
3957 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3958 /* Remember we've seen a real field to check
3959 for placement of flexible array member. */
3960 c = 1;
3962 /* If member is a struct or bit-field, enforce
3963 placing into the struct (as anonymous). */
3964 if (v == 0 &&
3965 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3966 bit_size >= 0)) {
3967 v = anon_sym++;
3969 if (v) {
3970 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3971 ss->a = ad1.a;
3972 *ps = ss;
3973 ps = &ss->next;
3975 if (tok == ';' || tok == TOK_EOF)
3976 break;
3977 skip(',');
3979 skip(';');
3981 skip('}');
3982 parse_attribute(&ad);
3983 struct_layout(type, &ad);
3988 static void sym_to_attr(AttributeDef *ad, Sym *s)
3990 if (s->a.aligned && 0 == ad->a.aligned)
3991 ad->a.aligned = s->a.aligned;
3992 if (s->f.func_call && 0 == ad->f.func_call)
3993 ad->f.func_call = s->f.func_call;
3994 if (s->f.func_type && 0 == ad->f.func_type)
3995 ad->f.func_type = s->f.func_type;
3996 if (s->a.packed)
3997 ad->a.packed = 1;
4000 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4001 are added to the element type, copied because it could be a typedef. */
4002 static void parse_btype_qualify(CType *type, int qualifiers)
4004 while (type->t & VT_ARRAY) {
4005 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4006 type = &type->ref->type;
4008 type->t |= qualifiers;
4011 /* return 0 if no type declaration. otherwise, return the basic type
4012 and skip it.
4014 static int parse_btype(CType *type, AttributeDef *ad)
4016 int t, u, bt, st, type_found, typespec_found, g;
4017 Sym *s;
4018 CType type1;
4020 memset(ad, 0, sizeof(AttributeDef));
4021 type_found = 0;
4022 typespec_found = 0;
4023 t = VT_INT;
4024 bt = st = -1;
4025 type->ref = NULL;
4027 while(1) {
4028 switch(tok) {
4029 case TOK_EXTENSION:
4030 /* currently, we really ignore extension */
4031 next();
4032 continue;
4034 /* basic types */
4035 case TOK_CHAR:
4036 u = VT_BYTE;
4037 basic_type:
4038 next();
4039 basic_type1:
4040 if (u == VT_SHORT || u == VT_LONG) {
4041 if (st != -1 || (bt != -1 && bt != VT_INT))
4042 tmbt: tcc_error("too many basic types");
4043 st = u;
4044 } else {
4045 if (bt != -1 || (st != -1 && u != VT_INT))
4046 goto tmbt;
4047 bt = u;
4049 if (u != VT_INT)
4050 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4051 typespec_found = 1;
4052 break;
4053 case TOK_VOID:
4054 u = VT_VOID;
4055 goto basic_type;
4056 case TOK_SHORT:
4057 u = VT_SHORT;
4058 goto basic_type;
4059 case TOK_INT:
4060 u = VT_INT;
4061 goto basic_type;
4062 case TOK_LONG:
4063 if ((t & VT_BTYPE) == VT_DOUBLE) {
4064 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4065 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4066 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4067 } else {
4068 u = VT_LONG;
4069 goto basic_type;
4071 next();
4072 break;
4073 #ifdef TCC_TARGET_ARM64
4074 case TOK_UINT128:
4075 /* GCC's __uint128_t appears in some Linux header files. Make it a
4076 synonym for long double to get the size and alignment right. */
4077 u = VT_LDOUBLE;
4078 goto basic_type;
4079 #endif
4080 case TOK_BOOL:
4081 u = VT_BOOL;
4082 goto basic_type;
4083 case TOK_FLOAT:
4084 u = VT_FLOAT;
4085 goto basic_type;
4086 case TOK_DOUBLE:
4087 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4088 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4089 } else {
4090 u = VT_DOUBLE;
4091 goto basic_type;
4093 next();
4094 break;
4095 case TOK_ENUM:
4096 struct_decl(&type1, VT_ENUM);
4097 basic_type2:
4098 u = type1.t;
4099 type->ref = type1.ref;
4100 goto basic_type1;
4101 case TOK_STRUCT:
4102 struct_decl(&type1, VT_STRUCT);
4103 goto basic_type2;
4104 case TOK_UNION:
4105 struct_decl(&type1, VT_UNION);
4106 goto basic_type2;
4108 /* type modifiers */
4109 case TOK_CONST1:
4110 case TOK_CONST2:
4111 case TOK_CONST3:
4112 type->t = t;
4113 parse_btype_qualify(type, VT_CONSTANT);
4114 t = type->t;
4115 next();
4116 break;
4117 case TOK_VOLATILE1:
4118 case TOK_VOLATILE2:
4119 case TOK_VOLATILE3:
4120 type->t = t;
4121 parse_btype_qualify(type, VT_VOLATILE);
4122 t = type->t;
4123 next();
4124 break;
4125 case TOK_SIGNED1:
4126 case TOK_SIGNED2:
4127 case TOK_SIGNED3:
4128 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4129 tcc_error("signed and unsigned modifier");
4130 t |= VT_DEFSIGN;
4131 next();
4132 typespec_found = 1;
4133 break;
4134 case TOK_REGISTER:
4135 case TOK_AUTO:
4136 case TOK_RESTRICT1:
4137 case TOK_RESTRICT2:
4138 case TOK_RESTRICT3:
4139 next();
4140 break;
4141 case TOK_UNSIGNED:
4142 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4143 tcc_error("signed and unsigned modifier");
4144 t |= VT_DEFSIGN | VT_UNSIGNED;
4145 next();
4146 typespec_found = 1;
4147 break;
4149 /* storage */
4150 case TOK_EXTERN:
4151 g = VT_EXTERN;
4152 goto storage;
4153 case TOK_STATIC:
4154 g = VT_STATIC;
4155 goto storage;
4156 case TOK_TYPEDEF:
4157 g = VT_TYPEDEF;
4158 goto storage;
4159 storage:
4160 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4161 tcc_error("multiple storage classes");
4162 t |= g;
4163 next();
4164 break;
4165 case TOK_INLINE1:
4166 case TOK_INLINE2:
4167 case TOK_INLINE3:
4168 t |= VT_INLINE;
4169 next();
4170 break;
4172 /* GNUC attribute */
4173 case TOK_ATTRIBUTE1:
4174 case TOK_ATTRIBUTE2:
4175 parse_attribute(ad);
4176 if (ad->attr_mode) {
4177 u = ad->attr_mode -1;
4178 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4180 break;
4181 /* GNUC typeof */
4182 case TOK_TYPEOF1:
4183 case TOK_TYPEOF2:
4184 case TOK_TYPEOF3:
4185 next();
4186 parse_expr_type(&type1);
4187 /* remove all storage modifiers except typedef */
4188 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4189 if (type1.ref)
4190 sym_to_attr(ad, type1.ref);
4191 goto basic_type2;
4192 default:
4193 if (typespec_found)
4194 goto the_end;
4195 s = sym_find(tok);
4196 if (!s || !(s->type.t & VT_TYPEDEF))
4197 goto the_end;
4198 t &= ~(VT_BTYPE|VT_LONG);
4199 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4200 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4201 type->ref = s->type.ref;
4202 if (t)
4203 parse_btype_qualify(type, t);
4204 t = type->t;
4205 /* get attributes from typedef */
4206 sym_to_attr(ad, s);
4207 next();
4208 typespec_found = 1;
4209 st = bt = -2;
4210 break;
4212 type_found = 1;
4214 the_end:
4215 if (tcc_state->char_is_unsigned) {
4216 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4217 t |= VT_UNSIGNED;
4219 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4220 bt = t & (VT_BTYPE|VT_LONG);
4221 if (bt == VT_LONG)
4222 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4223 #ifdef TCC_TARGET_PE
4224 if (bt == VT_LDOUBLE)
4225 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4226 #endif
4227 type->t = t;
4228 return type_found;
4231 /* convert a function parameter type (array to pointer and function to
4232 function pointer) */
4233 static inline void convert_parameter_type(CType *pt)
4235 /* remove const and volatile qualifiers (XXX: const could be used
4236 to indicate a const function parameter */
4237 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4238 /* array must be transformed to pointer according to ANSI C */
4239 pt->t &= ~VT_ARRAY;
4240 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4241 mk_pointer(pt);
4245 ST_FUNC void parse_asm_str(CString *astr)
4247 skip('(');
4248 parse_mult_str(astr, "string constant");
4251 /* Parse an asm label and return the token */
4252 static int asm_label_instr(void)
4254 int v;
4255 CString astr;
4257 next();
4258 parse_asm_str(&astr);
4259 skip(')');
4260 #ifdef ASM_DEBUG
4261 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4262 #endif
4263 v = tok_alloc(astr.data, astr.size - 1)->tok;
4264 cstr_free(&astr);
4265 return v;
4268 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4270 int n, l, t1, arg_size, align;
4271 Sym **plast, *s, *first;
4272 AttributeDef ad1;
4273 CType pt;
4275 if (tok == '(') {
4276 /* function type, or recursive declarator (return if so) */
4277 next();
4278 if (td && !(td & TYPE_ABSTRACT))
4279 return 0;
4280 if (tok == ')')
4281 l = 0;
4282 else if (parse_btype(&pt, &ad1))
4283 l = FUNC_NEW;
4284 else if (td)
4285 return 0;
4286 else
4287 l = FUNC_OLD;
4288 first = NULL;
4289 plast = &first;
4290 arg_size = 0;
4291 if (l) {
4292 for(;;) {
4293 /* read param name and compute offset */
4294 if (l != FUNC_OLD) {
4295 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4296 break;
4297 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4298 if ((pt.t & VT_BTYPE) == VT_VOID)
4299 tcc_error("parameter declared as void");
4300 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4301 } else {
4302 n = tok;
4303 if (n < TOK_UIDENT)
4304 expect("identifier");
4305 pt.t = VT_VOID; /* invalid type */
4306 next();
4308 convert_parameter_type(&pt);
4309 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4310 *plast = s;
4311 plast = &s->next;
4312 if (tok == ')')
4313 break;
4314 skip(',');
4315 if (l == FUNC_NEW && tok == TOK_DOTS) {
4316 l = FUNC_ELLIPSIS;
4317 next();
4318 break;
4320 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4321 tcc_error("invalid type");
4323 } else
4324 /* if no parameters, then old type prototype */
4325 l = FUNC_OLD;
4326 skip(')');
4327 /* NOTE: const is ignored in returned type as it has a special
4328 meaning in gcc / C++ */
4329 type->t &= ~VT_CONSTANT;
4330 /* some ancient pre-K&R C allows a function to return an array
4331 and the array brackets to be put after the arguments, such
4332 that "int c()[]" means something like "int[] c()" */
4333 if (tok == '[') {
4334 next();
4335 skip(']'); /* only handle simple "[]" */
4336 mk_pointer(type);
4338 /* we push a anonymous symbol which will contain the function prototype */
4339 ad->f.func_args = arg_size;
4340 ad->f.func_type = l;
4341 s = sym_push(SYM_FIELD, type, 0, 0);
4342 s->a = ad->a;
4343 s->f = ad->f;
4344 s->next = first;
4345 type->t = VT_FUNC;
4346 type->ref = s;
4347 } else if (tok == '[') {
4348 int saved_nocode_wanted = nocode_wanted;
4349 /* array definition */
4350 next();
4351 while (1) {
4352 /* XXX The optional type-quals and static should only be accepted
4353 in parameter decls. The '*' as well, and then even only
4354 in prototypes (not function defs). */
4355 switch (tok) {
4356 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4357 case TOK_CONST1:
4358 case TOK_VOLATILE1:
4359 case TOK_STATIC:
4360 case '*':
4361 next();
4362 continue;
4363 default:
4364 break;
4366 break;
4368 n = -1;
4369 t1 = 0;
4370 if (tok != ']') {
4371 if (!local_stack || (storage & VT_STATIC))
4372 vpushi(expr_const());
4373 else {
4374 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4375 length must always be evaluated, even under nocode_wanted,
4376 so that its size slot is initialized (e.g. under sizeof
4377 or typeof). */
4378 nocode_wanted = 0;
4379 gexpr();
4381 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4382 n = vtop->c.i;
4383 if (n < 0)
4384 tcc_error("invalid array size");
4385 } else {
4386 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4387 tcc_error("size of variable length array should be an integer");
4388 t1 = VT_VLA;
4391 skip(']');
4392 /* parse next post type */
4393 post_type(type, ad, storage, 0);
4394 if (type->t == VT_FUNC)
4395 tcc_error("declaration of an array of functions");
4396 t1 |= type->t & VT_VLA;
4398 if (t1 & VT_VLA) {
4399 loc -= type_size(&int_type, &align);
4400 loc &= -align;
4401 n = loc;
4403 vla_runtime_type_size(type, &align);
4404 gen_op('*');
4405 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4406 vswap();
4407 vstore();
4409 if (n != -1)
4410 vpop();
4411 nocode_wanted = saved_nocode_wanted;
4413 /* we push an anonymous symbol which will contain the array
4414 element type */
4415 s = sym_push(SYM_FIELD, type, 0, n);
4416 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4417 type->ref = s;
4419 return 1;
4422 /* Parse a type declarator (except basic type), and return the type
4423 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4424 expected. 'type' should contain the basic type. 'ad' is the
4425 attribute definition of the basic type. It can be modified by
4426 type_decl(). If this (possibly abstract) declarator is a pointer chain
4427 it returns the innermost pointed to type (equals *type, but is a different
4428 pointer), otherwise returns type itself, that's used for recursive calls. */
4429 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4431 CType *post, *ret;
4432 int qualifiers, storage;
4434 /* recursive type, remove storage bits first, apply them later again */
4435 storage = type->t & VT_STORAGE;
4436 type->t &= ~VT_STORAGE;
4437 post = ret = type;
4439 while (tok == '*') {
4440 qualifiers = 0;
4441 redo:
4442 next();
4443 switch(tok) {
4444 case TOK_CONST1:
4445 case TOK_CONST2:
4446 case TOK_CONST3:
4447 qualifiers |= VT_CONSTANT;
4448 goto redo;
4449 case TOK_VOLATILE1:
4450 case TOK_VOLATILE2:
4451 case TOK_VOLATILE3:
4452 qualifiers |= VT_VOLATILE;
4453 goto redo;
4454 case TOK_RESTRICT1:
4455 case TOK_RESTRICT2:
4456 case TOK_RESTRICT3:
4457 goto redo;
4458 /* XXX: clarify attribute handling */
4459 case TOK_ATTRIBUTE1:
4460 case TOK_ATTRIBUTE2:
4461 parse_attribute(ad);
4462 break;
4464 mk_pointer(type);
4465 type->t |= qualifiers;
4466 if (ret == type)
4467 /* innermost pointed to type is the one for the first derivation */
4468 ret = pointed_type(type);
4471 if (tok == '(') {
4472 /* This is possibly a parameter type list for abstract declarators
4473 ('int ()'), use post_type for testing this. */
4474 if (!post_type(type, ad, 0, td)) {
4475 /* It's not, so it's a nested declarator, and the post operations
4476 apply to the innermost pointed to type (if any). */
4477 /* XXX: this is not correct to modify 'ad' at this point, but
4478 the syntax is not clear */
4479 parse_attribute(ad);
4480 post = type_decl(type, ad, v, td);
4481 skip(')');
4483 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4484 /* type identifier */
4485 *v = tok;
4486 next();
4487 } else {
4488 if (!(td & TYPE_ABSTRACT))
4489 expect("identifier");
4490 *v = 0;
4492 post_type(post, ad, storage, 0);
4493 parse_attribute(ad);
4494 type->t |= storage;
4495 return ret;
4498 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4499 ST_FUNC int lvalue_type(int t)
4501 int bt, r;
4502 r = VT_LVAL;
4503 bt = t & VT_BTYPE;
4504 if (bt == VT_BYTE || bt == VT_BOOL)
4505 r |= VT_LVAL_BYTE;
4506 else if (bt == VT_SHORT)
4507 r |= VT_LVAL_SHORT;
4508 else
4509 return r;
4510 if (t & VT_UNSIGNED)
4511 r |= VT_LVAL_UNSIGNED;
4512 return r;
4515 /* indirection with full error checking and bound check */
4516 ST_FUNC void indir(void)
4518 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4519 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4520 return;
4521 expect("pointer");
4523 if (vtop->r & VT_LVAL)
4524 gv(RC_INT);
4525 vtop->type = *pointed_type(&vtop->type);
4526 /* Arrays and functions are never lvalues */
4527 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4528 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4529 vtop->r |= lvalue_type(vtop->type.t);
4530 /* if bound checking, the referenced pointer must be checked */
4531 #ifdef CONFIG_TCC_BCHECK
4532 if (tcc_state->do_bounds_check)
4533 vtop->r |= VT_MUSTBOUND;
4534 #endif
4538 /* pass a parameter to a function and do type checking and casting */
4539 static void gfunc_param_typed(Sym *func, Sym *arg)
4541 int func_type;
4542 CType type;
4544 func_type = func->f.func_type;
4545 if (func_type == FUNC_OLD ||
4546 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4547 /* default casting : only need to convert float to double */
4548 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4549 gen_cast_s(VT_DOUBLE);
4550 } else if (vtop->type.t & VT_BITFIELD) {
4551 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4552 type.ref = vtop->type.ref;
4553 gen_cast(&type);
4555 } else if (arg == NULL) {
4556 tcc_error("too many arguments to function");
4557 } else {
4558 type = arg->type;
4559 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4560 gen_assign_cast(&type);
4564 /* parse an expression and return its type without any side effect. */
4565 static void expr_type(CType *type, void (*expr_fn)(void))
4567 nocode_wanted++;
4568 expr_fn();
4569 *type = vtop->type;
4570 vpop();
4571 nocode_wanted--;
4574 /* parse an expression of the form '(type)' or '(expr)' and return its
4575 type */
4576 static void parse_expr_type(CType *type)
4578 int n;
4579 AttributeDef ad;
4581 skip('(');
4582 if (parse_btype(type, &ad)) {
4583 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4584 } else {
4585 expr_type(type, gexpr);
4587 skip(')');
4590 static void parse_type(CType *type)
4592 AttributeDef ad;
4593 int n;
4595 if (!parse_btype(type, &ad)) {
4596 expect("type");
4598 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4601 static void parse_builtin_params(int nc, const char *args)
4603 char c, sep = '(';
4604 CType t;
4605 if (nc)
4606 nocode_wanted++;
4607 next();
4608 while ((c = *args++)) {
4609 skip(sep);
4610 sep = ',';
4611 switch (c) {
4612 case 'e': expr_eq(); continue;
4613 case 't': parse_type(&t); vpush(&t); continue;
4614 default: tcc_error("internal error"); break;
4617 skip(')');
4618 if (nc)
4619 nocode_wanted--;
4622 ST_FUNC void unary(void)
4624 int n, t, align, size, r, sizeof_caller;
4625 CType type;
4626 Sym *s;
4627 AttributeDef ad;
4629 sizeof_caller = in_sizeof;
4630 in_sizeof = 0;
4631 type.ref = NULL;
4632 /* XXX: GCC 2.95.3 does not generate a table although it should be
4633 better here */
4634 tok_next:
4635 switch(tok) {
4636 case TOK_EXTENSION:
4637 next();
4638 goto tok_next;
4639 case TOK_LCHAR:
4640 #ifdef TCC_TARGET_PE
4641 t = VT_SHORT|VT_UNSIGNED;
4642 goto push_tokc;
4643 #endif
4644 case TOK_CINT:
4645 case TOK_CCHAR: