Adjust testcase for PIE compilers
[tinycc.git] / tccgen.c
blob7d554b5b163297c90b5d93f7cddb6ff55f08718f
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
58 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
59 ST_DATA int func_vc;
60 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
61 ST_DATA const char *funcname;
62 ST_DATA int g_debug;
64 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
66 ST_DATA struct switch_t {
67 struct case_t {
68 int64_t v1, v2;
69 int sym;
70 } **p; int n; /* list of case ranges */
71 int def_sym; /* default symbol */
72 } *cur_switch; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static void gen_cast_s(int t);
78 static inline CType *pointed_type(CType *type);
79 static int is_compatible_types(CType *type1, CType *type2);
80 static int parse_btype(CType *type, AttributeDef *ad);
81 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
82 static void parse_expr_type(CType *type);
83 static void init_putv(CType *type, Section *sec, unsigned long c);
84 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
85 static void block(int *bsym, int *csym, int is_expr);
86 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
87 static void decl(int l);
88 static int decl0(int l, int is_for_loop_init, Sym *);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType *type, int *a);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType *type1, CType *type2);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty, unsigned long long v);
96 static void vpush(CType *type);
97 static int gvtst(int inv, int t);
98 static void gen_inline_functions(TCCState *s);
99 static void skip_or_save_block(TokenString **str);
100 static void gv_dup(void);
102 ST_INLN int is_float(int t)
104 int bt;
105 bt = t & VT_BTYPE;
106 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC int ieee_finite(double d)
114 int p[4];
115 memcpy(p, &d, sizeof(double));
116 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
123 #endif
125 ST_FUNC void test_lvalue(void)
127 if (!(vtop->r & VT_LVAL))
128 expect("lvalue");
131 ST_FUNC void check_vstack(void)
133 if (pvtop != vtop)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
140 #if 0
141 void pv (const char *lbl, int a, int b)
143 int i;
144 for (i = a; i < a + b; ++i) {
145 SValue *p = &vtop[-i];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
150 #endif
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC void tcc_debug_start(TCCState *s1)
156 if (s1->do_debug) {
157 char buf[512];
159 /* file info: full path + filename */
160 section_sym = put_elf_sym(symtab_section, 0, 0,
161 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
162 text_section->sh_num, NULL);
163 getcwd(buf, sizeof(buf));
164 #ifdef _WIN32
165 normalize_slashes(buf);
166 #endif
167 pstrcat(buf, sizeof(buf), "/");
168 put_stabs_r(buf, N_SO, 0, 0,
169 text_section->data_offset, text_section, section_sym);
170 put_stabs_r(file->filename, N_SO, 0, 0,
171 text_section->data_offset, text_section, section_sym);
172 last_ind = 0;
173 last_line_num = 0;
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section, 0, 0,
179 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
180 SHN_ABS, file->filename);
183 /* put end of translation unit info */
184 ST_FUNC void tcc_debug_end(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 put_stabs_r(NULL, N_SO, 0, 0,
189 text_section->data_offset, text_section, section_sym);
193 /* generate line number info */
194 ST_FUNC void tcc_debug_line(TCCState *s1)
196 if (!s1->do_debug)
197 return;
198 if ((last_line_num != file->line_num || last_ind != ind)) {
199 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
200 last_ind = ind;
201 last_line_num = file->line_num;
205 /* put function symbol */
206 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
208 char buf[512];
210 if (!s1->do_debug)
211 return;
213 /* stabs info */
214 /* XXX: we put here a dummy type */
215 snprintf(buf, sizeof(buf), "%s:%c1",
216 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
217 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
218 cur_text_section, sym->c);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE, 0, file->line_num, 0);
222 last_ind = 0;
223 last_line_num = 0;
226 /* put function size */
227 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
229 if (!s1->do_debug)
230 return;
231 put_stabn(N_FUN, 0, 0, size);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC int tccgen_compile(TCCState *s1)
237 cur_text_section = NULL;
238 funcname = "";
239 anon_sym = SYM_FIRST_ANOM;
240 section_sym = 0;
241 const_wanted = 0;
242 nocode_wanted = 0x80000000;
244 /* define some often used types */
245 int_type.t = VT_INT;
246 char_pointer_type.t = VT_BYTE;
247 mk_pointer(&char_pointer_type);
248 #if PTR_SIZE == 4
249 size_type.t = VT_INT | VT_UNSIGNED;
250 ptrdiff_type.t = VT_INT;
251 #elif LONG_SIZE == 4
252 size_type.t = VT_LLONG | VT_UNSIGNED;
253 ptrdiff_type.t = VT_LLONG;
254 #else
255 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
256 ptrdiff_type.t = VT_LONG | VT_LLONG;
257 #endif
258 func_old_type.t = VT_FUNC;
259 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
260 func_old_type.ref->f.func_call = FUNC_CDECL;
261 func_old_type.ref->f.func_type = FUNC_OLD;
263 tcc_debug_start(s1);
265 #ifdef TCC_TARGET_ARM
266 arm_init(s1);
267 #endif
269 #ifdef INC_DEBUG
270 printf("%s: **** new file\n", file->filename);
271 #endif
273 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
274 next();
275 decl(VT_CONST);
276 gen_inline_functions(s1);
277 check_vstack();
278 /* end of translation unit info */
279 tcc_debug_end(s1);
280 return 0;
283 /* ------------------------------------------------------------------------- */
284 /* apply storage attributes to Elf symbol */
286 static void update_storage(Sym *sym)
288 ElfW(Sym) *esym;
289 if (0 == sym->c)
290 return;
291 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
292 if (sym->a.visibility)
293 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
294 | sym->a.visibility;
295 if (sym->a.weak)
296 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
297 #ifdef TCC_TARGET_PE
298 if (sym->a.dllimport)
299 esym->st_other |= ST_PE_IMPORT;
300 if (sym->a.dllexport)
301 esym->st_other |= ST_PE_EXPORT;
302 #endif
303 #if 0
304 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
305 get_tok_str(sym->v, NULL),
306 sym->a.visibility,
307 sym->a.weak,
308 sym->a.dllexport,
309 sym->a.dllimport
311 #endif
314 /* ------------------------------------------------------------------------- */
315 /* update sym->c so that it points to an external symbol in section
316 'section' with value 'value' */
318 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
319 addr_t value, unsigned long size,
320 int can_add_underscore)
322 int sym_type, sym_bind, sh_num, info, other, t;
323 ElfW(Sym) *esym;
324 const char *name;
325 char buf1[256];
326 #ifdef CONFIG_TCC_BCHECK
327 char buf[32];
328 #endif
330 if (section == NULL)
331 sh_num = SHN_UNDEF;
332 else if (section == SECTION_ABS)
333 sh_num = SHN_ABS;
334 else
335 sh_num = section->sh_num;
337 if (!sym->c) {
338 name = get_tok_str(sym->v, NULL);
339 #ifdef CONFIG_TCC_BCHECK
340 if (tcc_state->do_bounds_check) {
341 /* XXX: avoid doing that for statics ? */
342 /* if bound checking is activated, we change some function
343 names by adding the "__bound" prefix */
344 switch(sym->v) {
345 #ifdef TCC_TARGET_PE
346 /* XXX: we rely only on malloc hooks */
347 case TOK_malloc:
348 case TOK_free:
349 case TOK_realloc:
350 case TOK_memalign:
351 case TOK_calloc:
352 #endif
353 case TOK_memcpy:
354 case TOK_memmove:
355 case TOK_memset:
356 case TOK_strlen:
357 case TOK_strcpy:
358 case TOK_alloca:
359 strcpy(buf, "__bound_");
360 strcat(buf, name);
361 name = buf;
362 break;
365 #endif
366 t = sym->type.t;
367 if ((t & VT_BTYPE) == VT_FUNC) {
368 sym_type = STT_FUNC;
369 } else if ((t & VT_BTYPE) == VT_VOID) {
370 sym_type = STT_NOTYPE;
371 } else {
372 sym_type = STT_OBJECT;
374 if (t & VT_STATIC)
375 sym_bind = STB_LOCAL;
376 else
377 sym_bind = STB_GLOBAL;
378 other = 0;
379 #ifdef TCC_TARGET_PE
380 if (sym_type == STT_FUNC && sym->type.ref) {
381 Sym *ref = sym->type.ref;
382 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
383 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
384 name = buf1;
385 other |= ST_PE_STDCALL;
386 can_add_underscore = 0;
389 #endif
390 if (tcc_state->leading_underscore && can_add_underscore) {
391 buf1[0] = '_';
392 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
393 name = buf1;
395 if (sym->asm_label)
396 name = get_tok_str(sym->asm_label, NULL);
397 info = ELFW(ST_INFO)(sym_bind, sym_type);
398 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
399 } else {
400 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
401 esym->st_value = value;
402 esym->st_size = size;
403 esym->st_shndx = sh_num;
405 update_storage(sym);
408 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
409 addr_t value, unsigned long size)
411 put_extern_sym2(sym, section, value, size, 1);
414 /* add a new relocation entry to symbol 'sym' in section 's' */
415 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
416 addr_t addend)
418 int c = 0;
420 if (nocode_wanted && s == cur_text_section)
421 return;
423 if (sym) {
424 if (0 == sym->c)
425 put_extern_sym(sym, NULL, 0, 0);
426 c = sym->c;
429 /* now we can add ELF relocation info */
430 put_elf_reloca(symtab_section, s, offset, type, c, addend);
433 #if PTR_SIZE == 4
434 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
436 greloca(s, sym, offset, type, 0);
438 #endif
440 /* ------------------------------------------------------------------------- */
441 /* symbol allocator */
442 static Sym *__sym_malloc(void)
444 Sym *sym_pool, *sym, *last_sym;
445 int i;
447 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
448 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
450 last_sym = sym_free_first;
451 sym = sym_pool;
452 for(i = 0; i < SYM_POOL_NB; i++) {
453 sym->next = last_sym;
454 last_sym = sym;
455 sym++;
457 sym_free_first = last_sym;
458 return last_sym;
461 static inline Sym *sym_malloc(void)
463 Sym *sym;
464 #ifndef SYM_DEBUG
465 sym = sym_free_first;
466 if (!sym)
467 sym = __sym_malloc();
468 sym_free_first = sym->next;
469 return sym;
470 #else
471 sym = tcc_malloc(sizeof(Sym));
472 return sym;
473 #endif
476 ST_INLN void sym_free(Sym *sym)
478 #ifndef SYM_DEBUG
479 sym->next = sym_free_first;
480 sym_free_first = sym;
481 #else
482 tcc_free(sym);
483 #endif
486 /* push, without hashing */
487 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
489 Sym *s;
491 s = sym_malloc();
492 memset(s, 0, sizeof *s);
493 s->v = v;
494 s->type.t = t;
495 s->c = c;
496 /* add in stack */
497 s->prev = *ps;
498 *ps = s;
499 return s;
502 /* find a symbol and return its associated structure. 's' is the top
503 of the symbol stack */
504 ST_FUNC Sym *sym_find2(Sym *s, int v)
506 while (s) {
507 if (s->v == v)
508 return s;
509 else if (s->v == -1)
510 return NULL;
511 s = s->prev;
513 return NULL;
516 /* structure lookup */
517 ST_INLN Sym *struct_find(int v)
519 v -= TOK_IDENT;
520 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
521 return NULL;
522 return table_ident[v]->sym_struct;
525 /* find an identifier */
526 ST_INLN Sym *sym_find(int v)
528 v -= TOK_IDENT;
529 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
530 return NULL;
531 return table_ident[v]->sym_identifier;
534 /* push a given symbol on the symbol stack */
535 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
537 Sym *s, **ps;
538 TokenSym *ts;
540 if (local_stack)
541 ps = &local_stack;
542 else
543 ps = &global_stack;
544 s = sym_push2(ps, v, type->t, c);
545 s->type.ref = type->ref;
546 s->r = r;
547 /* don't record fields or anonymous symbols */
548 /* XXX: simplify */
549 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
550 /* record symbol in token array */
551 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
552 if (v & SYM_STRUCT)
553 ps = &ts->sym_struct;
554 else
555 ps = &ts->sym_identifier;
556 s->prev_tok = *ps;
557 *ps = s;
558 s->sym_scope = local_scope;
559 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
560 tcc_error("redeclaration of '%s'",
561 get_tok_str(v & ~SYM_STRUCT, NULL));
563 return s;
566 /* push a global identifier */
567 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
569 Sym *s, **ps;
570 s = sym_push2(&global_stack, v, t, c);
571 /* don't record anonymous symbol */
572 if (v < SYM_FIRST_ANOM) {
573 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
574 /* modify the top most local identifier, so that
575 sym_identifier will point to 's' when popped */
576 while (*ps != NULL)
577 ps = &(*ps)->prev_tok;
578 s->prev_tok = NULL;
579 *ps = s;
581 return s;
584 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
585 pop them yet from the list, but do remove them from the token array. */
586 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
588 Sym *s, *ss, **ps;
589 TokenSym *ts;
590 int v;
592 s = *ptop;
593 while(s != b) {
594 ss = s->prev;
595 v = s->v;
596 /* remove symbol in token array */
597 /* XXX: simplify */
598 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
599 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
600 if (v & SYM_STRUCT)
601 ps = &ts->sym_struct;
602 else
603 ps = &ts->sym_identifier;
604 *ps = s->prev_tok;
606 if (!keep)
607 sym_free(s);
608 s = ss;
610 if (!keep)
611 *ptop = b;
614 /* ------------------------------------------------------------------------- */
616 static void vsetc(CType *type, int r, CValue *vc)
618 int v;
620 if (vtop >= vstack + (VSTACK_SIZE - 1))
621 tcc_error("memory full (vstack)");
622 /* cannot let cpu flags if other instruction are generated. Also
623 avoid leaving VT_JMP anywhere except on the top of the stack
624 because it would complicate the code generator.
626 Don't do this when nocode_wanted. vtop might come from
627 !nocode_wanted regions (see 88_codeopt.c) and transforming
628 it to a register without actually generating code is wrong
629 as their value might still be used for real. All values
630 we push under nocode_wanted will eventually be popped
631 again, so that the VT_CMP/VT_JMP value will be in vtop
632 when code is unsuppressed again.
634 Same logic below in vswap(); */
635 if (vtop >= vstack && !nocode_wanted) {
636 v = vtop->r & VT_VALMASK;
637 if (v == VT_CMP || (v & ~1) == VT_JMP)
638 gv(RC_INT);
641 vtop++;
642 vtop->type = *type;
643 vtop->r = r;
644 vtop->r2 = VT_CONST;
645 vtop->c = *vc;
646 vtop->sym = NULL;
649 ST_FUNC void vswap(void)
651 SValue tmp;
652 /* cannot vswap cpu flags. See comment at vsetc() above */
653 if (vtop >= vstack && !nocode_wanted) {
654 int v = vtop->r & VT_VALMASK;
655 if (v == VT_CMP || (v & ~1) == VT_JMP)
656 gv(RC_INT);
658 tmp = vtop[0];
659 vtop[0] = vtop[-1];
660 vtop[-1] = tmp;
663 /* pop stack value */
664 ST_FUNC void vpop(void)
666 int v;
667 v = vtop->r & VT_VALMASK;
668 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
669 /* for x86, we need to pop the FP stack */
670 if (v == TREG_ST0) {
671 o(0xd8dd); /* fstp %st(0) */
672 } else
673 #endif
674 if (v == VT_JMP || v == VT_JMPI) {
675 /* need to put correct jump if && or || without test */
676 gsym(vtop->c.i);
678 vtop--;
681 /* push constant of type "type" with useless value */
682 ST_FUNC void vpush(CType *type)
684 vset(type, VT_CONST, 0);
687 /* push integer constant */
688 ST_FUNC void vpushi(int v)
690 CValue cval;
691 cval.i = v;
692 vsetc(&int_type, VT_CONST, &cval);
695 /* push a pointer sized constant */
696 static void vpushs(addr_t v)
698 CValue cval;
699 cval.i = v;
700 vsetc(&size_type, VT_CONST, &cval);
703 /* push arbitrary 64bit constant */
704 ST_FUNC void vpush64(int ty, unsigned long long v)
706 CValue cval;
707 CType ctype;
708 ctype.t = ty;
709 ctype.ref = NULL;
710 cval.i = v;
711 vsetc(&ctype, VT_CONST, &cval);
714 /* push long long constant */
715 static inline void vpushll(long long v)
717 vpush64(VT_LLONG, v);
720 ST_FUNC void vset(CType *type, int r, int v)
722 CValue cval;
724 cval.i = v;
725 vsetc(type, r, &cval);
728 static void vseti(int r, int v)
730 CType type;
731 type.t = VT_INT;
732 type.ref = NULL;
733 vset(&type, r, v);
736 ST_FUNC void vpushv(SValue *v)
738 if (vtop >= vstack + (VSTACK_SIZE - 1))
739 tcc_error("memory full (vstack)");
740 vtop++;
741 *vtop = *v;
744 static void vdup(void)
746 vpushv(vtop);
749 /* rotate n first stack elements to the bottom
750 I1 ... In -> I2 ... In I1 [top is right]
752 ST_FUNC void vrotb(int n)
754 int i;
755 SValue tmp;
757 tmp = vtop[-n + 1];
758 for(i=-n+1;i!=0;i++)
759 vtop[i] = vtop[i+1];
760 vtop[0] = tmp;
763 /* rotate the n elements before entry e towards the top
764 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
766 ST_FUNC void vrote(SValue *e, int n)
768 int i;
769 SValue tmp;
771 tmp = *e;
772 for(i = 0;i < n - 1; i++)
773 e[-i] = e[-i - 1];
774 e[-n + 1] = tmp;
777 /* rotate n first stack elements to the top
778 I1 ... In -> In I1 ... I(n-1) [top is right]
780 ST_FUNC void vrott(int n)
782 vrote(vtop, n);
785 /* push a symbol value of TYPE */
786 static inline void vpushsym(CType *type, Sym *sym)
788 CValue cval;
789 cval.i = 0;
790 vsetc(type, VT_CONST | VT_SYM, &cval);
791 vtop->sym = sym;
794 /* Return a static symbol pointing to a section */
795 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
797 int v;
798 Sym *sym;
800 v = anon_sym++;
801 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
802 sym->type.ref = type->ref;
803 sym->r = VT_CONST | VT_SYM;
804 put_extern_sym(sym, sec, offset, size);
805 return sym;
808 /* push a reference to a section offset by adding a dummy symbol */
809 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
811 vpushsym(type, get_sym_ref(type, sec, offset, size));
814 /* define a new external reference to a symbol 'v' of type 'u' */
815 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
817 Sym *s;
819 s = sym_find(v);
820 if (!s) {
821 /* push forward reference */
822 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
823 s->type.ref = type->ref;
824 s->r = r | VT_CONST | VT_SYM;
826 return s;
829 /* Merge some storage attributes. */
830 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
832 if (type && !is_compatible_types(&sym->type, type))
833 tcc_error("incompatible types for redefinition of '%s'",
834 get_tok_str(sym->v, NULL));
835 #ifdef TCC_TARGET_PE
836 if (sym->a.dllimport != ad->a.dllimport)
837 tcc_error("incompatible dll linkage for redefinition of '%s'",
838 get_tok_str(sym->v, NULL));
839 #endif
840 sym->a.dllexport |= ad->a.dllexport;
841 sym->a.weak |= ad->a.weak;
842 if (ad->a.visibility) {
843 int vis = sym->a.visibility;
844 int vis2 = ad->a.visibility;
845 if (vis == STV_DEFAULT)
846 vis = vis2;
847 else if (vis2 != STV_DEFAULT)
848 vis = (vis < vis2) ? vis : vis2;
849 sym->a.visibility = vis;
851 if (ad->a.aligned)
852 sym->a.aligned = ad->a.aligned;
853 if (ad->asm_label)
854 sym->asm_label = ad->asm_label;
855 update_storage(sym);
858 /* define a new external reference to a symbol 'v' */
859 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
861 Sym *s;
862 s = sym_find(v);
863 if (!s) {
864 /* push forward reference */
865 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
866 s->type.t |= VT_EXTERN;
867 s->a = ad->a;
868 s->sym_scope = 0;
869 } else {
870 if (s->type.ref == func_old_type.ref) {
871 s->type.ref = type->ref;
872 s->r = r | VT_CONST | VT_SYM;
873 s->type.t |= VT_EXTERN;
875 patch_storage(s, ad, type);
877 return s;
880 /* push a reference to global symbol v */
881 ST_FUNC void vpush_global_sym(CType *type, int v)
883 vpushsym(type, external_global_sym(v, type, 0));
886 /* save registers up to (vtop - n) stack entry */
887 ST_FUNC void save_regs(int n)
889 SValue *p, *p1;
890 for(p = vstack, p1 = vtop - n; p <= p1; p++)
891 save_reg(p->r);
894 /* save r to the memory stack, and mark it as being free */
895 ST_FUNC void save_reg(int r)
897 save_reg_upstack(r, 0);
900 /* save r to the memory stack, and mark it as being free,
901 if seen up to (vtop - n) stack entry */
902 ST_FUNC void save_reg_upstack(int r, int n)
904 int l, saved, size, align;
905 SValue *p, *p1, sv;
906 CType *type;
908 if ((r &= VT_VALMASK) >= VT_CONST)
909 return;
910 if (nocode_wanted)
911 return;
913 /* modify all stack values */
914 saved = 0;
915 l = 0;
916 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
917 if ((p->r & VT_VALMASK) == r ||
918 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
919 /* must save value on stack if not already done */
920 if (!saved) {
921 /* NOTE: must reload 'r' because r might be equal to r2 */
922 r = p->r & VT_VALMASK;
923 /* store register in the stack */
924 type = &p->type;
925 if ((p->r & VT_LVAL) ||
926 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
927 #if PTR_SIZE == 8
928 type = &char_pointer_type;
929 #else
930 type = &int_type;
931 #endif
932 size = type_size(type, &align);
933 loc = (loc - size) & -align;
934 sv.type.t = type->t;
935 sv.r = VT_LOCAL | VT_LVAL;
936 sv.c.i = loc;
937 store(r, &sv);
938 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
939 /* x86 specific: need to pop fp register ST0 if saved */
940 if (r == TREG_ST0) {
941 o(0xd8dd); /* fstp %st(0) */
943 #endif
944 #if PTR_SIZE == 4
945 /* special long long case */
946 if ((type->t & VT_BTYPE) == VT_LLONG) {
947 sv.c.i += 4;
948 store(p->r2, &sv);
950 #endif
951 l = loc;
952 saved = 1;
954 /* mark that stack entry as being saved on the stack */
955 if (p->r & VT_LVAL) {
956 /* also clear the bounded flag because the
957 relocation address of the function was stored in
958 p->c.i */
959 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
960 } else {
961 p->r = lvalue_type(p->type.t) | VT_LOCAL;
963 p->r2 = VT_CONST;
964 p->c.i = l;
969 #ifdef TCC_TARGET_ARM
970 /* find a register of class 'rc2' with at most one reference on stack.
971 * If none, call get_reg(rc) */
972 ST_FUNC int get_reg_ex(int rc, int rc2)
974 int r;
975 SValue *p;
977 for(r=0;r<NB_REGS;r++) {
978 if (reg_classes[r] & rc2) {
979 int n;
980 n=0;
981 for(p = vstack; p <= vtop; p++) {
982 if ((p->r & VT_VALMASK) == r ||
983 (p->r2 & VT_VALMASK) == r)
984 n++;
986 if (n <= 1)
987 return r;
990 return get_reg(rc);
992 #endif
994 /* find a free register of class 'rc'. If none, save one register */
995 ST_FUNC int get_reg(int rc)
997 int r;
998 SValue *p;
1000 /* find a free register */
1001 for(r=0;r<NB_REGS;r++) {
1002 if (reg_classes[r] & rc) {
1003 if (nocode_wanted)
1004 return r;
1005 for(p=vstack;p<=vtop;p++) {
1006 if ((p->r & VT_VALMASK) == r ||
1007 (p->r2 & VT_VALMASK) == r)
1008 goto notfound;
1010 return r;
1012 notfound: ;
1015 /* no register left : free the first one on the stack (VERY
1016 IMPORTANT to start from the bottom to ensure that we don't
1017 spill registers used in gen_opi()) */
1018 for(p=vstack;p<=vtop;p++) {
1019 /* look at second register (if long long) */
1020 r = p->r2 & VT_VALMASK;
1021 if (r < VT_CONST && (reg_classes[r] & rc))
1022 goto save_found;
1023 r = p->r & VT_VALMASK;
1024 if (r < VT_CONST && (reg_classes[r] & rc)) {
1025 save_found:
1026 save_reg(r);
1027 return r;
1030 /* Should never comes here */
1031 return -1;
1034 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1035 if needed */
1036 static void move_reg(int r, int s, int t)
1038 SValue sv;
1040 if (r != s) {
1041 save_reg(r);
1042 sv.type.t = t;
1043 sv.type.ref = NULL;
1044 sv.r = s;
1045 sv.c.i = 0;
1046 load(r, &sv);
1050 /* get address of vtop (vtop MUST BE an lvalue) */
1051 ST_FUNC void gaddrof(void)
1053 vtop->r &= ~VT_LVAL;
1054 /* tricky: if saved lvalue, then we can go back to lvalue */
1055 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1056 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1061 #ifdef CONFIG_TCC_BCHECK
1062 /* generate lvalue bound code */
1063 static void gbound(void)
1065 int lval_type;
1066 CType type1;
1068 vtop->r &= ~VT_MUSTBOUND;
1069 /* if lvalue, then use checking code before dereferencing */
1070 if (vtop->r & VT_LVAL) {
1071 /* if not VT_BOUNDED value, then make one */
1072 if (!(vtop->r & VT_BOUNDED)) {
1073 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1074 /* must save type because we must set it to int to get pointer */
1075 type1 = vtop->type;
1076 vtop->type.t = VT_PTR;
1077 gaddrof();
1078 vpushi(0);
1079 gen_bounded_ptr_add();
1080 vtop->r |= lval_type;
1081 vtop->type = type1;
1083 /* then check for dereferencing */
1084 gen_bounded_ptr_deref();
1087 #endif
1089 static void incr_bf_adr(int o)
1091 vtop->type = char_pointer_type;
1092 gaddrof();
1093 vpushi(o);
1094 gen_op('+');
1095 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1096 | (VT_BYTE|VT_UNSIGNED);
1097 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1098 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1101 /* single-byte load mode for packed or otherwise unaligned bitfields */
1102 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1104 int n, o, bits;
1105 save_reg_upstack(vtop->r, 1);
1106 vpush64(type->t & VT_BTYPE, 0); // B X
1107 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1108 do {
1109 vswap(); // X B
1110 incr_bf_adr(o);
1111 vdup(); // X B B
1112 n = 8 - bit_pos;
1113 if (n > bit_size)
1114 n = bit_size;
1115 if (bit_pos)
1116 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1117 if (n < 8)
1118 vpushi((1 << n) - 1), gen_op('&');
1119 gen_cast(type);
1120 if (bits)
1121 vpushi(bits), gen_op(TOK_SHL);
1122 vrotb(3); // B Y X
1123 gen_op('|'); // B X
1124 bits += n, bit_size -= n, o = 1;
1125 } while (bit_size);
1126 vswap(), vpop();
1127 if (!(type->t & VT_UNSIGNED)) {
1128 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1129 vpushi(n), gen_op(TOK_SHL);
1130 vpushi(n), gen_op(TOK_SAR);
1134 /* single-byte store mode for packed or otherwise unaligned bitfields */
1135 static void store_packed_bf(int bit_pos, int bit_size)
1137 int bits, n, o, m, c;
1139 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1140 vswap(); // X B
1141 save_reg_upstack(vtop->r, 1);
1142 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1143 do {
1144 incr_bf_adr(o); // X B
1145 vswap(); //B X
1146 c ? vdup() : gv_dup(); // B V X
1147 vrott(3); // X B V
1148 if (bits)
1149 vpushi(bits), gen_op(TOK_SHR);
1150 if (bit_pos)
1151 vpushi(bit_pos), gen_op(TOK_SHL);
1152 n = 8 - bit_pos;
1153 if (n > bit_size)
1154 n = bit_size;
1155 if (n < 8) {
1156 m = ((1 << n) - 1) << bit_pos;
1157 vpushi(m), gen_op('&'); // X B V1
1158 vpushv(vtop-1); // X B V1 B
1159 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1160 gen_op('&'); // X B V1 B1
1161 gen_op('|'); // X B V2
1163 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1164 vstore(), vpop(); // X B
1165 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1166 } while (bit_size);
1167 vpop(), vpop();
1170 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1172 int t;
1173 if (0 == sv->type.ref)
1174 return 0;
1175 t = sv->type.ref->auxtype;
1176 if (t != -1 && t != VT_STRUCT) {
1177 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1178 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1180 return t;
1183 /* store vtop a register belonging to class 'rc'. lvalues are
1184 converted to values. Cannot be used if cannot be converted to
1185 register value (such as structures). */
1186 ST_FUNC int gv(int rc)
1188 int r, bit_pos, bit_size, size, align, rc2;
1190 /* NOTE: get_reg can modify vstack[] */
1191 if (vtop->type.t & VT_BITFIELD) {
1192 CType type;
1194 bit_pos = BIT_POS(vtop->type.t);
1195 bit_size = BIT_SIZE(vtop->type.t);
1196 /* remove bit field info to avoid loops */
1197 vtop->type.t &= ~VT_STRUCT_MASK;
1199 type.ref = NULL;
1200 type.t = vtop->type.t & VT_UNSIGNED;
1201 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1202 type.t |= VT_UNSIGNED;
1204 r = adjust_bf(vtop, bit_pos, bit_size);
1206 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1207 type.t |= VT_LLONG;
1208 else
1209 type.t |= VT_INT;
1211 if (r == VT_STRUCT) {
1212 load_packed_bf(&type, bit_pos, bit_size);
1213 } else {
1214 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1215 /* cast to int to propagate signedness in following ops */
1216 gen_cast(&type);
1217 /* generate shifts */
1218 vpushi(bits - (bit_pos + bit_size));
1219 gen_op(TOK_SHL);
1220 vpushi(bits - bit_size);
1221 /* NOTE: transformed to SHR if unsigned */
1222 gen_op(TOK_SAR);
1224 r = gv(rc);
1225 } else {
1226 if (is_float(vtop->type.t) &&
1227 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1228 unsigned long offset;
1229 /* CPUs usually cannot use float constants, so we store them
1230 generically in data segment */
1231 size = type_size(&vtop->type, &align);
1232 if (NODATA_WANTED)
1233 size = 0, align = 1;
1234 offset = section_add(data_section, size, align);
1235 vpush_ref(&vtop->type, data_section, offset, size);
1236 vswap();
1237 init_putv(&vtop->type, data_section, offset);
1238 vtop->r |= VT_LVAL;
1240 #ifdef CONFIG_TCC_BCHECK
1241 if (vtop->r & VT_MUSTBOUND)
1242 gbound();
1243 #endif
1245 r = vtop->r & VT_VALMASK;
1246 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1247 #ifndef TCC_TARGET_ARM64
1248 if (rc == RC_IRET)
1249 rc2 = RC_LRET;
1250 #ifdef TCC_TARGET_X86_64
1251 else if (rc == RC_FRET)
1252 rc2 = RC_QRET;
1253 #endif
1254 #endif
1255 /* need to reload if:
1256 - constant
1257 - lvalue (need to dereference pointer)
1258 - already a register, but not in the right class */
1259 if (r >= VT_CONST
1260 || (vtop->r & VT_LVAL)
1261 || !(reg_classes[r] & rc)
1262 #if PTR_SIZE == 8
1263 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1264 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1265 #else
1266 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1267 #endif
1270 r = get_reg(rc);
1271 #if PTR_SIZE == 8
1272 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1273 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1274 #else
1275 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1276 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1277 unsigned long long ll;
1278 #endif
1279 int r2, original_type;
1280 original_type = vtop->type.t;
1281 /* two register type load : expand to two words
1282 temporarily */
1283 #if PTR_SIZE == 4
1284 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1285 /* load constant */
1286 ll = vtop->c.i;
1287 vtop->c.i = ll; /* first word */
1288 load(r, vtop);
1289 vtop->r = r; /* save register value */
1290 vpushi(ll >> 32); /* second word */
1291 } else
1292 #endif
1293 if (vtop->r & VT_LVAL) {
1294 /* We do not want to modifier the long long
1295 pointer here, so the safest (and less
1296 efficient) is to save all the other registers
1297 in the stack. XXX: totally inefficient. */
1298 #if 0
1299 save_regs(1);
1300 #else
1301 /* lvalue_save: save only if used further down the stack */
1302 save_reg_upstack(vtop->r, 1);
1303 #endif
1304 /* load from memory */
1305 vtop->type.t = load_type;
1306 load(r, vtop);
1307 vdup();
1308 vtop[-1].r = r; /* save register value */
1309 /* increment pointer to get second word */
1310 vtop->type.t = addr_type;
1311 gaddrof();
1312 vpushi(load_size);
1313 gen_op('+');
1314 vtop->r |= VT_LVAL;
1315 vtop->type.t = load_type;
1316 } else {
1317 /* move registers */
1318 load(r, vtop);
1319 vdup();
1320 vtop[-1].r = r; /* save register value */
1321 vtop->r = vtop[-1].r2;
1323 /* Allocate second register. Here we rely on the fact that
1324 get_reg() tries first to free r2 of an SValue. */
1325 r2 = get_reg(rc2);
1326 load(r2, vtop);
1327 vpop();
1328 /* write second register */
1329 vtop->r2 = r2;
1330 vtop->type.t = original_type;
1331 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1332 int t1, t;
1333 /* lvalue of scalar type : need to use lvalue type
1334 because of possible cast */
1335 t = vtop->type.t;
1336 t1 = t;
1337 /* compute memory access type */
1338 if (vtop->r & VT_LVAL_BYTE)
1339 t = VT_BYTE;
1340 else if (vtop->r & VT_LVAL_SHORT)
1341 t = VT_SHORT;
1342 if (vtop->r & VT_LVAL_UNSIGNED)
1343 t |= VT_UNSIGNED;
1344 vtop->type.t = t;
1345 load(r, vtop);
1346 /* restore wanted type */
1347 vtop->type.t = t1;
1348 } else {
1349 /* one register type load */
1350 load(r, vtop);
1353 vtop->r = r;
1354 #ifdef TCC_TARGET_C67
1355 /* uses register pairs for doubles */
1356 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1357 vtop->r2 = r+1;
1358 #endif
1360 return r;
1363 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1364 ST_FUNC void gv2(int rc1, int rc2)
1366 int v;
1368 /* generate more generic register first. But VT_JMP or VT_CMP
1369 values must be generated first in all cases to avoid possible
1370 reload errors */
1371 v = vtop[0].r & VT_VALMASK;
1372 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1373 vswap();
1374 gv(rc1);
1375 vswap();
1376 gv(rc2);
1377 /* test if reload is needed for first register */
1378 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1379 vswap();
1380 gv(rc1);
1381 vswap();
1383 } else {
1384 gv(rc2);
1385 vswap();
1386 gv(rc1);
1387 vswap();
1388 /* test if reload is needed for first register */
1389 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1390 gv(rc2);
1395 #ifndef TCC_TARGET_ARM64
1396 /* wrapper around RC_FRET to return a register by type */
1397 static int rc_fret(int t)
1399 #ifdef TCC_TARGET_X86_64
1400 if (t == VT_LDOUBLE) {
1401 return RC_ST0;
1403 #endif
1404 return RC_FRET;
1406 #endif
1408 /* wrapper around REG_FRET to return a register by type */
1409 static int reg_fret(int t)
1411 #ifdef TCC_TARGET_X86_64
1412 if (t == VT_LDOUBLE) {
1413 return TREG_ST0;
1415 #endif
1416 return REG_FRET;
1419 #if PTR_SIZE == 4
1420 /* expand 64bit on stack in two ints */
1421 static void lexpand(void)
1423 int u, v;
1424 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1425 v = vtop->r & (VT_VALMASK | VT_LVAL);
1426 if (v == VT_CONST) {
1427 vdup();
1428 vtop[0].c.i >>= 32;
1429 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1430 vdup();
1431 vtop[0].c.i += 4;
1432 } else {
1433 gv(RC_INT);
1434 vdup();
1435 vtop[0].r = vtop[-1].r2;
1436 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1438 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1440 #endif
1442 #ifdef TCC_TARGET_ARM
1443 /* expand long long on stack */
1444 ST_FUNC void lexpand_nr(void)
1446 int u,v;
1448 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1449 vdup();
1450 vtop->r2 = VT_CONST;
1451 vtop->type.t = VT_INT | u;
1452 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1453 if (v == VT_CONST) {
1454 vtop[-1].c.i = vtop->c.i;
1455 vtop->c.i = vtop->c.i >> 32;
1456 vtop->r = VT_CONST;
1457 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1458 vtop->c.i += 4;
1459 vtop->r = vtop[-1].r;
1460 } else if (v > VT_CONST) {
1461 vtop--;
1462 lexpand();
1463 } else
1464 vtop->r = vtop[-1].r2;
1465 vtop[-1].r2 = VT_CONST;
1466 vtop[-1].type.t = VT_INT | u;
1468 #endif
1470 #if PTR_SIZE == 4
1471 /* build a long long from two ints */
1472 static void lbuild(int t)
1474 gv2(RC_INT, RC_INT);
1475 vtop[-1].r2 = vtop[0].r;
1476 vtop[-1].type.t = t;
1477 vpop();
1479 #endif
1481 /* convert stack entry to register and duplicate its value in another
1482 register */
1483 static void gv_dup(void)
1485 int rc, t, r, r1;
1486 SValue sv;
1488 t = vtop->type.t;
1489 #if PTR_SIZE == 4
1490 if ((t & VT_BTYPE) == VT_LLONG) {
1491 if (t & VT_BITFIELD) {
1492 gv(RC_INT);
1493 t = vtop->type.t;
1495 lexpand();
1496 gv_dup();
1497 vswap();
1498 vrotb(3);
1499 gv_dup();
1500 vrotb(4);
1501 /* stack: H L L1 H1 */
1502 lbuild(t);
1503 vrotb(3);
1504 vrotb(3);
1505 vswap();
1506 lbuild(t);
1507 vswap();
1508 } else
1509 #endif
1511 /* duplicate value */
1512 rc = RC_INT;
1513 sv.type.t = VT_INT;
1514 if (is_float(t)) {
1515 rc = RC_FLOAT;
1516 #ifdef TCC_TARGET_X86_64
1517 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1518 rc = RC_ST0;
1520 #endif
1521 sv.type.t = t;
1523 r = gv(rc);
1524 r1 = get_reg(rc);
1525 sv.r = r;
1526 sv.c.i = 0;
1527 load(r1, &sv); /* move r to r1 */
1528 vdup();
1529 /* duplicates value */
1530 if (r != r1)
1531 vtop->r = r1;
1535 /* Generate value test
1537 * Generate a test for any value (jump, comparison and integers) */
1538 ST_FUNC int gvtst(int inv, int t)
1540 int v = vtop->r & VT_VALMASK;
1541 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1542 vpushi(0);
1543 gen_op(TOK_NE);
1545 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1546 /* constant jmp optimization */
1547 if ((vtop->c.i != 0) != inv)
1548 t = gjmp(t);
1549 vtop--;
1550 return t;
1552 return gtst(inv, t);
1555 #if PTR_SIZE == 4
1556 /* generate CPU independent (unsigned) long long operations */
1557 static void gen_opl(int op)
1559 int t, a, b, op1, c, i;
1560 int func;
1561 unsigned short reg_iret = REG_IRET;
1562 unsigned short reg_lret = REG_LRET;
1563 SValue tmp;
1565 switch(op) {
1566 case '/':
1567 case TOK_PDIV:
1568 func = TOK___divdi3;
1569 goto gen_func;
1570 case TOK_UDIV:
1571 func = TOK___udivdi3;
1572 goto gen_func;
1573 case '%':
1574 func = TOK___moddi3;
1575 goto gen_mod_func;
1576 case TOK_UMOD:
1577 func = TOK___umoddi3;
1578 gen_mod_func:
1579 #ifdef TCC_ARM_EABI
1580 reg_iret = TREG_R2;
1581 reg_lret = TREG_R3;
1582 #endif
1583 gen_func:
1584 /* call generic long long function */
1585 vpush_global_sym(&func_old_type, func);
1586 vrott(3);
1587 gfunc_call(2);
1588 vpushi(0);
1589 vtop->r = reg_iret;
1590 vtop->r2 = reg_lret;
1591 break;
1592 case '^':
1593 case '&':
1594 case '|':
1595 case '*':
1596 case '+':
1597 case '-':
1598 //pv("gen_opl A",0,2);
1599 t = vtop->type.t;
1600 vswap();
1601 lexpand();
1602 vrotb(3);
1603 lexpand();
1604 /* stack: L1 H1 L2 H2 */
1605 tmp = vtop[0];
1606 vtop[0] = vtop[-3];
1607 vtop[-3] = tmp;
1608 tmp = vtop[-2];
1609 vtop[-2] = vtop[-3];
1610 vtop[-3] = tmp;
1611 vswap();
1612 /* stack: H1 H2 L1 L2 */
1613 //pv("gen_opl B",0,4);
1614 if (op == '*') {
1615 vpushv(vtop - 1);
1616 vpushv(vtop - 1);
1617 gen_op(TOK_UMULL);
1618 lexpand();
1619 /* stack: H1 H2 L1 L2 ML MH */
1620 for(i=0;i<4;i++)
1621 vrotb(6);
1622 /* stack: ML MH H1 H2 L1 L2 */
1623 tmp = vtop[0];
1624 vtop[0] = vtop[-2];
1625 vtop[-2] = tmp;
1626 /* stack: ML MH H1 L2 H2 L1 */
1627 gen_op('*');
1628 vrotb(3);
1629 vrotb(3);
1630 gen_op('*');
1631 /* stack: ML MH M1 M2 */
1632 gen_op('+');
1633 gen_op('+');
1634 } else if (op == '+' || op == '-') {
1635 /* XXX: add non carry method too (for MIPS or alpha) */
1636 if (op == '+')
1637 op1 = TOK_ADDC1;
1638 else
1639 op1 = TOK_SUBC1;
1640 gen_op(op1);
1641 /* stack: H1 H2 (L1 op L2) */
1642 vrotb(3);
1643 vrotb(3);
1644 gen_op(op1 + 1); /* TOK_xxxC2 */
1645 } else {
1646 gen_op(op);
1647 /* stack: H1 H2 (L1 op L2) */
1648 vrotb(3);
1649 vrotb(3);
1650 /* stack: (L1 op L2) H1 H2 */
1651 gen_op(op);
1652 /* stack: (L1 op L2) (H1 op H2) */
1654 /* stack: L H */
1655 lbuild(t);
1656 break;
1657 case TOK_SAR:
1658 case TOK_SHR:
1659 case TOK_SHL:
1660 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1661 t = vtop[-1].type.t;
1662 vswap();
1663 lexpand();
1664 vrotb(3);
1665 /* stack: L H shift */
1666 c = (int)vtop->c.i;
1667 /* constant: simpler */
1668 /* NOTE: all comments are for SHL. the other cases are
1669 done by swapping words */
1670 vpop();
1671 if (op != TOK_SHL)
1672 vswap();
1673 if (c >= 32) {
1674 /* stack: L H */
1675 vpop();
1676 if (c > 32) {
1677 vpushi(c - 32);
1678 gen_op(op);
1680 if (op != TOK_SAR) {
1681 vpushi(0);
1682 } else {
1683 gv_dup();
1684 vpushi(31);
1685 gen_op(TOK_SAR);
1687 vswap();
1688 } else {
1689 vswap();
1690 gv_dup();
1691 /* stack: H L L */
1692 vpushi(c);
1693 gen_op(op);
1694 vswap();
1695 vpushi(32 - c);
1696 if (op == TOK_SHL)
1697 gen_op(TOK_SHR);
1698 else
1699 gen_op(TOK_SHL);
1700 vrotb(3);
1701 /* stack: L L H */
1702 vpushi(c);
1703 if (op == TOK_SHL)
1704 gen_op(TOK_SHL);
1705 else
1706 gen_op(TOK_SHR);
1707 gen_op('|');
1709 if (op != TOK_SHL)
1710 vswap();
1711 lbuild(t);
1712 } else {
1713 /* XXX: should provide a faster fallback on x86 ? */
1714 switch(op) {
1715 case TOK_SAR:
1716 func = TOK___ashrdi3;
1717 goto gen_func;
1718 case TOK_SHR:
1719 func = TOK___lshrdi3;
1720 goto gen_func;
1721 case TOK_SHL:
1722 func = TOK___ashldi3;
1723 goto gen_func;
1726 break;
1727 default:
1728 /* compare operations */
1729 t = vtop->type.t;
1730 vswap();
1731 lexpand();
1732 vrotb(3);
1733 lexpand();
1734 /* stack: L1 H1 L2 H2 */
1735 tmp = vtop[-1];
1736 vtop[-1] = vtop[-2];
1737 vtop[-2] = tmp;
1738 /* stack: L1 L2 H1 H2 */
1739 /* compare high */
1740 op1 = op;
1741 /* when values are equal, we need to compare low words. since
1742 the jump is inverted, we invert the test too. */
1743 if (op1 == TOK_LT)
1744 op1 = TOK_LE;
1745 else if (op1 == TOK_GT)
1746 op1 = TOK_GE;
1747 else if (op1 == TOK_ULT)
1748 op1 = TOK_ULE;
1749 else if (op1 == TOK_UGT)
1750 op1 = TOK_UGE;
1751 a = 0;
1752 b = 0;
1753 gen_op(op1);
1754 if (op == TOK_NE) {
1755 b = gvtst(0, 0);
1756 } else {
1757 a = gvtst(1, 0);
1758 if (op != TOK_EQ) {
1759 /* generate non equal test */
1760 vpushi(TOK_NE);
1761 vtop->r = VT_CMP;
1762 b = gvtst(0, 0);
1765 /* compare low. Always unsigned */
1766 op1 = op;
1767 if (op1 == TOK_LT)
1768 op1 = TOK_ULT;
1769 else if (op1 == TOK_LE)
1770 op1 = TOK_ULE;
1771 else if (op1 == TOK_GT)
1772 op1 = TOK_UGT;
1773 else if (op1 == TOK_GE)
1774 op1 = TOK_UGE;
1775 gen_op(op1);
1776 a = gvtst(1, a);
1777 gsym(b);
1778 vseti(VT_JMPI, a);
1779 break;
1782 #endif
1784 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1786 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1787 return (a ^ b) >> 63 ? -x : x;
1790 static int gen_opic_lt(uint64_t a, uint64_t b)
1792 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1795 /* handle integer constant optimizations and various machine
1796 independent opt */
1797 static void gen_opic(int op)
1799 SValue *v1 = vtop - 1;
1800 SValue *v2 = vtop;
1801 int t1 = v1->type.t & VT_BTYPE;
1802 int t2 = v2->type.t & VT_BTYPE;
1803 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1804 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1805 uint64_t l1 = c1 ? v1->c.i : 0;
1806 uint64_t l2 = c2 ? v2->c.i : 0;
1807 int shm = (t1 == VT_LLONG) ? 63 : 31;
1809 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1810 l1 = ((uint32_t)l1 |
1811 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1812 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1813 l2 = ((uint32_t)l2 |
1814 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1816 if (c1 && c2) {
1817 switch(op) {
1818 case '+': l1 += l2; break;
1819 case '-': l1 -= l2; break;
1820 case '&': l1 &= l2; break;
1821 case '^': l1 ^= l2; break;
1822 case '|': l1 |= l2; break;
1823 case '*': l1 *= l2; break;
1825 case TOK_PDIV:
1826 case '/':
1827 case '%':
1828 case TOK_UDIV:
1829 case TOK_UMOD:
1830 /* if division by zero, generate explicit division */
1831 if (l2 == 0) {
1832 if (const_wanted)
1833 tcc_error("division by zero in constant");
1834 goto general_case;
1836 switch(op) {
1837 default: l1 = gen_opic_sdiv(l1, l2); break;
1838 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1839 case TOK_UDIV: l1 = l1 / l2; break;
1840 case TOK_UMOD: l1 = l1 % l2; break;
1842 break;
1843 case TOK_SHL: l1 <<= (l2 & shm); break;
1844 case TOK_SHR: l1 >>= (l2 & shm); break;
1845 case TOK_SAR:
1846 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1847 break;
1848 /* tests */
1849 case TOK_ULT: l1 = l1 < l2; break;
1850 case TOK_UGE: l1 = l1 >= l2; break;
1851 case TOK_EQ: l1 = l1 == l2; break;
1852 case TOK_NE: l1 = l1 != l2; break;
1853 case TOK_ULE: l1 = l1 <= l2; break;
1854 case TOK_UGT: l1 = l1 > l2; break;
1855 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1856 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1857 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1858 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1859 /* logical */
1860 case TOK_LAND: l1 = l1 && l2; break;
1861 case TOK_LOR: l1 = l1 || l2; break;
1862 default:
1863 goto general_case;
1865 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1866 l1 = ((uint32_t)l1 |
1867 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1868 v1->c.i = l1;
1869 vtop--;
1870 } else {
1871 /* if commutative ops, put c2 as constant */
1872 if (c1 && (op == '+' || op == '&' || op == '^' ||
1873 op == '|' || op == '*')) {
1874 vswap();
1875 c2 = c1; //c = c1, c1 = c2, c2 = c;
1876 l2 = l1; //l = l1, l1 = l2, l2 = l;
1878 if (!const_wanted &&
1879 c1 && ((l1 == 0 &&
1880 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1881 (l1 == -1 && op == TOK_SAR))) {
1882 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1883 vtop--;
1884 } else if (!const_wanted &&
1885 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1886 (op == '|' &&
1887 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1888 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1889 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1890 if (l2 == 1)
1891 vtop->c.i = 0;
1892 vswap();
1893 vtop--;
1894 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1895 op == TOK_PDIV) &&
1896 l2 == 1) ||
1897 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1898 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1899 l2 == 0) ||
1900 (op == '&' &&
1901 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1902 /* filter out NOP operations like x*1, x-0, x&-1... */
1903 vtop--;
1904 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1905 /* try to use shifts instead of muls or divs */
1906 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1907 int n = -1;
1908 while (l2) {
1909 l2 >>= 1;
1910 n++;
1912 vtop->c.i = n;
1913 if (op == '*')
1914 op = TOK_SHL;
1915 else if (op == TOK_PDIV)
1916 op = TOK_SAR;
1917 else
1918 op = TOK_SHR;
1920 goto general_case;
1921 } else if (c2 && (op == '+' || op == '-') &&
1922 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1923 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1924 /* symbol + constant case */
1925 if (op == '-')
1926 l2 = -l2;
1927 l2 += vtop[-1].c.i;
1928 /* The backends can't always deal with addends to symbols
1929 larger than +-1<<31. Don't construct such. */
1930 if ((int)l2 != l2)
1931 goto general_case;
1932 vtop--;
1933 vtop->c.i = l2;
1934 } else {
1935 general_case:
1936 /* call low level op generator */
1937 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1938 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1939 gen_opl(op);
1940 else
1941 gen_opi(op);
1946 /* generate a floating point operation with constant propagation */
1947 static void gen_opif(int op)
1949 int c1, c2;
1950 SValue *v1, *v2;
1951 #if defined _MSC_VER && defined _AMD64_
1952 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1953 volatile
1954 #endif
1955 long double f1, f2;
1957 v1 = vtop - 1;
1958 v2 = vtop;
1959 /* currently, we cannot do computations with forward symbols */
1960 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1961 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1962 if (c1 && c2) {
1963 if (v1->type.t == VT_FLOAT) {
1964 f1 = v1->c.f;
1965 f2 = v2->c.f;
1966 } else if (v1->type.t == VT_DOUBLE) {
1967 f1 = v1->c.d;
1968 f2 = v2->c.d;
1969 } else {
1970 f1 = v1->c.ld;
1971 f2 = v2->c.ld;
1974 /* NOTE: we only do constant propagation if finite number (not
1975 NaN or infinity) (ANSI spec) */
1976 if (!ieee_finite(f1) || !ieee_finite(f2))
1977 goto general_case;
1979 switch(op) {
1980 case '+': f1 += f2; break;
1981 case '-': f1 -= f2; break;
1982 case '*': f1 *= f2; break;
1983 case '/':
1984 if (f2 == 0.0) {
1985 if (const_wanted)
1986 tcc_error("division by zero in constant");
1987 goto general_case;
1989 f1 /= f2;
1990 break;
1991 /* XXX: also handles tests ? */
1992 default:
1993 goto general_case;
1995 /* XXX: overflow test ? */
1996 if (v1->type.t == VT_FLOAT) {
1997 v1->c.f = f1;
1998 } else if (v1->type.t == VT_DOUBLE) {
1999 v1->c.d = f1;
2000 } else {
2001 v1->c.ld = f1;
2003 vtop--;
2004 } else {
2005 general_case:
2006 gen_opf(op);
2010 static int pointed_size(CType *type)
2012 int align;
2013 return type_size(pointed_type(type), &align);
2016 static void vla_runtime_pointed_size(CType *type)
2018 int align;
2019 vla_runtime_type_size(pointed_type(type), &align);
2022 static inline int is_null_pointer(SValue *p)
2024 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2025 return 0;
2026 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2027 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2028 ((p->type.t & VT_BTYPE) == VT_PTR &&
2029 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2032 static inline int is_integer_btype(int bt)
2034 return (bt == VT_BYTE || bt == VT_SHORT ||
2035 bt == VT_INT || bt == VT_LLONG);
2038 /* check types for comparison or subtraction of pointers */
2039 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2041 CType *type1, *type2, tmp_type1, tmp_type2;
2042 int bt1, bt2;
2044 /* null pointers are accepted for all comparisons as gcc */
2045 if (is_null_pointer(p1) || is_null_pointer(p2))
2046 return;
2047 type1 = &p1->type;
2048 type2 = &p2->type;
2049 bt1 = type1->t & VT_BTYPE;
2050 bt2 = type2->t & VT_BTYPE;
2051 /* accept comparison between pointer and integer with a warning */
2052 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2053 if (op != TOK_LOR && op != TOK_LAND )
2054 tcc_warning("comparison between pointer and integer");
2055 return;
2058 /* both must be pointers or implicit function pointers */
2059 if (bt1 == VT_PTR) {
2060 type1 = pointed_type(type1);
2061 } else if (bt1 != VT_FUNC)
2062 goto invalid_operands;
2064 if (bt2 == VT_PTR) {
2065 type2 = pointed_type(type2);
2066 } else if (bt2 != VT_FUNC) {
2067 invalid_operands:
2068 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2070 if ((type1->t & VT_BTYPE) == VT_VOID ||
2071 (type2->t & VT_BTYPE) == VT_VOID)
2072 return;
2073 tmp_type1 = *type1;
2074 tmp_type2 = *type2;
2075 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2076 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2077 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2078 /* gcc-like error if '-' is used */
2079 if (op == '-')
2080 goto invalid_operands;
2081 else
2082 tcc_warning("comparison of distinct pointer types lacks a cast");
2086 /* generic gen_op: handles types problems */
2087 ST_FUNC void gen_op(int op)
2089 int u, t1, t2, bt1, bt2, t;
2090 CType type1;
2092 redo:
2093 t1 = vtop[-1].type.t;
2094 t2 = vtop[0].type.t;
2095 bt1 = t1 & VT_BTYPE;
2096 bt2 = t2 & VT_BTYPE;
2098 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2099 tcc_error("operation on a struct");
2100 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2101 if (bt2 == VT_FUNC) {
2102 mk_pointer(&vtop->type);
2103 gaddrof();
2105 if (bt1 == VT_FUNC) {
2106 vswap();
2107 mk_pointer(&vtop->type);
2108 gaddrof();
2109 vswap();
2111 goto redo;
2112 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2113 /* at least one operand is a pointer */
2114 /* relational op: must be both pointers */
2115 if (op >= TOK_ULT && op <= TOK_LOR) {
2116 check_comparison_pointer_types(vtop - 1, vtop, op);
2117 /* pointers are handled are unsigned */
2118 #if PTR_SIZE == 8
2119 t = VT_LLONG | VT_UNSIGNED;
2120 #else
2121 t = VT_INT | VT_UNSIGNED;
2122 #endif
2123 goto std_op;
2125 /* if both pointers, then it must be the '-' op */
2126 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2127 if (op != '-')
2128 tcc_error("cannot use pointers here");
2129 check_comparison_pointer_types(vtop - 1, vtop, op);
2130 /* XXX: check that types are compatible */
2131 if (vtop[-1].type.t & VT_VLA) {
2132 vla_runtime_pointed_size(&vtop[-1].type);
2133 } else {
2134 vpushi(pointed_size(&vtop[-1].type));
2136 vrott(3);
2137 gen_opic(op);
2138 vtop->type.t = ptrdiff_type.t;
2139 vswap();
2140 gen_op(TOK_PDIV);
2141 } else {
2142 /* exactly one pointer : must be '+' or '-'. */
2143 if (op != '-' && op != '+')
2144 tcc_error("cannot use pointers here");
2145 /* Put pointer as first operand */
2146 if (bt2 == VT_PTR) {
2147 vswap();
2148 t = t1, t1 = t2, t2 = t;
2150 #if PTR_SIZE == 4
2151 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2152 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2153 gen_cast_s(VT_INT);
2154 #endif
2155 type1 = vtop[-1].type;
2156 type1.t &= ~VT_ARRAY;
2157 if (vtop[-1].type.t & VT_VLA)
2158 vla_runtime_pointed_size(&vtop[-1].type);
2159 else {
2160 u = pointed_size(&vtop[-1].type);
2161 if (u < 0)
2162 tcc_error("unknown array element size");
2163 #if PTR_SIZE == 8
2164 vpushll(u);
2165 #else
2166 /* XXX: cast to int ? (long long case) */
2167 vpushi(u);
2168 #endif
2170 gen_op('*');
2171 #if 0
2172 /* #ifdef CONFIG_TCC_BCHECK
2173 The main reason to removing this code:
2174 #include <stdio.h>
2175 int main ()
2177 int v[10];
2178 int i = 10;
2179 int j = 9;
2180 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2181 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2183 When this code is on. then the output looks like
2184 v+i-j = 0xfffffffe
2185 v+(i-j) = 0xbff84000
2187 /* if evaluating constant expression, no code should be
2188 generated, so no bound check */
2189 if (tcc_state->do_bounds_check && !const_wanted) {
2190 /* if bounded pointers, we generate a special code to
2191 test bounds */
2192 if (op == '-') {
2193 vpushi(0);
2194 vswap();
2195 gen_op('-');
2197 gen_bounded_ptr_add();
2198 } else
2199 #endif
2201 gen_opic(op);
2203 /* put again type if gen_opic() swaped operands */
2204 vtop->type = type1;
2206 } else if (is_float(bt1) || is_float(bt2)) {
2207 /* compute bigger type and do implicit casts */
2208 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2209 t = VT_LDOUBLE;
2210 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2211 t = VT_DOUBLE;
2212 } else {
2213 t = VT_FLOAT;
2215 /* floats can only be used for a few operations */
2216 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2217 (op < TOK_ULT || op > TOK_GT))
2218 tcc_error("invalid operands for binary operation");
2219 goto std_op;
2220 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2221 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2222 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2223 t |= VT_UNSIGNED;
2224 t |= (VT_LONG & t1);
2225 goto std_op;
2226 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2227 /* cast to biggest op */
2228 t = VT_LLONG | VT_LONG;
2229 if (bt1 == VT_LLONG)
2230 t &= t1;
2231 if (bt2 == VT_LLONG)
2232 t &= t2;
2233 /* convert to unsigned if it does not fit in a long long */
2234 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2235 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2236 t |= VT_UNSIGNED;
2237 goto std_op;
2238 } else {
2239 /* integer operations */
2240 t = VT_INT | (VT_LONG & (t1 | t2));
2241 /* convert to unsigned if it does not fit in an integer */
2242 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2243 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2244 t |= VT_UNSIGNED;
2245 std_op:
2246 /* XXX: currently, some unsigned operations are explicit, so
2247 we modify them here */
2248 if (t & VT_UNSIGNED) {
2249 if (op == TOK_SAR)
2250 op = TOK_SHR;
2251 else if (op == '/')
2252 op = TOK_UDIV;
2253 else if (op == '%')
2254 op = TOK_UMOD;
2255 else if (op == TOK_LT)
2256 op = TOK_ULT;
2257 else if (op == TOK_GT)
2258 op = TOK_UGT;
2259 else if (op == TOK_LE)
2260 op = TOK_ULE;
2261 else if (op == TOK_GE)
2262 op = TOK_UGE;
2264 vswap();
2265 type1.t = t;
2266 type1.ref = NULL;
2267 gen_cast(&type1);
2268 vswap();
2269 /* special case for shifts and long long: we keep the shift as
2270 an integer */
2271 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2272 type1.t = VT_INT;
2273 gen_cast(&type1);
2274 if (is_float(t))
2275 gen_opif(op);
2276 else
2277 gen_opic(op);
2278 if (op >= TOK_ULT && op <= TOK_GT) {
2279 /* relational op: the result is an int */
2280 vtop->type.t = VT_INT;
2281 } else {
2282 vtop->type.t = t;
2285 // Make sure that we have converted to an rvalue:
2286 if (vtop->r & VT_LVAL)
2287 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2290 #ifndef TCC_TARGET_ARM
2291 /* generic itof for unsigned long long case */
2292 static void gen_cvt_itof1(int t)
2294 #ifdef TCC_TARGET_ARM64
2295 gen_cvt_itof(t);
2296 #else
2297 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2298 (VT_LLONG | VT_UNSIGNED)) {
2300 if (t == VT_FLOAT)
2301 vpush_global_sym(&func_old_type, TOK___floatundisf);
2302 #if LDOUBLE_SIZE != 8
2303 else if (t == VT_LDOUBLE)
2304 vpush_global_sym(&func_old_type, TOK___floatundixf);
2305 #endif
2306 else
2307 vpush_global_sym(&func_old_type, TOK___floatundidf);
2308 vrott(2);
2309 gfunc_call(1);
2310 vpushi(0);
2311 vtop->r = reg_fret(t);
2312 } else {
2313 gen_cvt_itof(t);
2315 #endif
2317 #endif
2319 /* generic ftoi for unsigned long long case */
2320 static void gen_cvt_ftoi1(int t)
2322 #ifdef TCC_TARGET_ARM64
2323 gen_cvt_ftoi(t);
2324 #else
2325 int st;
2327 if (t == (VT_LLONG | VT_UNSIGNED)) {
2328 /* not handled natively */
2329 st = vtop->type.t & VT_BTYPE;
2330 if (st == VT_FLOAT)
2331 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2332 #if LDOUBLE_SIZE != 8
2333 else if (st == VT_LDOUBLE)
2334 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2335 #endif
2336 else
2337 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2338 vrott(2);
2339 gfunc_call(1);
2340 vpushi(0);
2341 vtop->r = REG_IRET;
2342 vtop->r2 = REG_LRET;
2343 } else {
2344 gen_cvt_ftoi(t);
2346 #endif
2349 /* force char or short cast */
2350 static void force_charshort_cast(int t)
2352 int bits, dbt;
2354 /* cannot cast static initializers */
2355 if (STATIC_DATA_WANTED)
2356 return;
2358 dbt = t & VT_BTYPE;
2359 /* XXX: add optimization if lvalue : just change type and offset */
2360 if (dbt == VT_BYTE)
2361 bits = 8;
2362 else
2363 bits = 16;
2364 if (t & VT_UNSIGNED) {
2365 vpushi((1 << bits) - 1);
2366 gen_op('&');
2367 } else {
2368 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2369 bits = 64 - bits;
2370 else
2371 bits = 32 - bits;
2372 vpushi(bits);
2373 gen_op(TOK_SHL);
2374 /* result must be signed or the SAR is converted to an SHL
2375 This was not the case when "t" was a signed short
2376 and the last value on the stack was an unsigned int */
2377 vtop->type.t &= ~VT_UNSIGNED;
2378 vpushi(bits);
2379 gen_op(TOK_SAR);
2383 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2384 static void gen_cast_s(int t)
2386 CType type;
2387 type.t = t;
2388 type.ref = NULL;
2389 gen_cast(&type);
2392 static void gen_cast(CType *type)
2394 int sbt, dbt, sf, df, c, p;
2396 /* special delayed cast for char/short */
2397 /* XXX: in some cases (multiple cascaded casts), it may still
2398 be incorrect */
2399 if (vtop->r & VT_MUSTCAST) {
2400 vtop->r &= ~VT_MUSTCAST;
2401 force_charshort_cast(vtop->type.t);
2404 /* bitfields first get cast to ints */
2405 if (vtop->type.t & VT_BITFIELD) {
2406 gv(RC_INT);
2409 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2410 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2412 if (sbt != dbt) {
2413 sf = is_float(sbt);
2414 df = is_float(dbt);
2415 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2416 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2417 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2418 c &= dbt != VT_LDOUBLE;
2419 #endif
2420 if (c) {
2421 /* constant case: we can do it now */
2422 /* XXX: in ISOC, cannot do it if error in convert */
2423 if (sbt == VT_FLOAT)
2424 vtop->c.ld = vtop->c.f;
2425 else if (sbt == VT_DOUBLE)
2426 vtop->c.ld = vtop->c.d;
2428 if (df) {
2429 if ((sbt & VT_BTYPE) == VT_LLONG) {
2430 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2431 vtop->c.ld = vtop->c.i;
2432 else
2433 vtop->c.ld = -(long double)-vtop->c.i;
2434 } else if(!sf) {
2435 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2436 vtop->c.ld = (uint32_t)vtop->c.i;
2437 else
2438 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2441 if (dbt == VT_FLOAT)
2442 vtop->c.f = (float)vtop->c.ld;
2443 else if (dbt == VT_DOUBLE)
2444 vtop->c.d = (double)vtop->c.ld;
2445 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2446 vtop->c.i = vtop->c.ld;
2447 } else if (sf && dbt == VT_BOOL) {
2448 vtop->c.i = (vtop->c.ld != 0);
2449 } else {
2450 if(sf)
2451 vtop->c.i = vtop->c.ld;
2452 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2454 else if (sbt & VT_UNSIGNED)
2455 vtop->c.i = (uint32_t)vtop->c.i;
2456 #if PTR_SIZE == 8
2457 else if (sbt == VT_PTR)
2459 #endif
2460 else if (sbt != VT_LLONG)
2461 vtop->c.i = ((uint32_t)vtop->c.i |
2462 -(vtop->c.i & 0x80000000));
2464 if (dbt == (VT_LLONG|VT_UNSIGNED))
2466 else if (dbt == VT_BOOL)
2467 vtop->c.i = (vtop->c.i != 0);
2468 #if PTR_SIZE == 8
2469 else if (dbt == VT_PTR)
2471 #endif
2472 else if (dbt != VT_LLONG) {
2473 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2474 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2475 0xffffffff);
2476 vtop->c.i &= m;
2477 if (!(dbt & VT_UNSIGNED))
2478 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2481 } else if (p && dbt == VT_BOOL) {
2482 vtop->r = VT_CONST;
2483 vtop->c.i = 1;
2484 } else {
2485 /* non constant case: generate code */
2486 if (sf && df) {
2487 /* convert from fp to fp */
2488 gen_cvt_ftof(dbt);
2489 } else if (df) {
2490 /* convert int to fp */
2491 gen_cvt_itof1(dbt);
2492 } else if (sf) {
2493 /* convert fp to int */
2494 if (dbt == VT_BOOL) {
2495 vpushi(0);
2496 gen_op(TOK_NE);
2497 } else {
2498 /* we handle char/short/etc... with generic code */
2499 if (dbt != (VT_INT | VT_UNSIGNED) &&
2500 dbt != (VT_LLONG | VT_UNSIGNED) &&
2501 dbt != VT_LLONG)
2502 dbt = VT_INT;
2503 gen_cvt_ftoi1(dbt);
2504 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2505 /* additional cast for char/short... */
2506 vtop->type.t = dbt;
2507 gen_cast(type);
2510 #if PTR_SIZE == 4
2511 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2512 if ((sbt & VT_BTYPE) != VT_LLONG) {
2513 /* scalar to long long */
2514 /* machine independent conversion */
2515 gv(RC_INT);
2516 /* generate high word */
2517 if (sbt == (VT_INT | VT_UNSIGNED)) {
2518 vpushi(0);
2519 gv(RC_INT);
2520 } else {
2521 if (sbt == VT_PTR) {
2522 /* cast from pointer to int before we apply
2523 shift operation, which pointers don't support*/
2524 gen_cast_s(VT_INT);
2526 gv_dup();
2527 vpushi(31);
2528 gen_op(TOK_SAR);
2530 /* patch second register */
2531 vtop[-1].r2 = vtop->r;
2532 vpop();
2534 #else
2535 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2536 (dbt & VT_BTYPE) == VT_PTR ||
2537 (dbt & VT_BTYPE) == VT_FUNC) {
2538 if ((sbt & VT_BTYPE) != VT_LLONG &&
2539 (sbt & VT_BTYPE) != VT_PTR &&
2540 (sbt & VT_BTYPE) != VT_FUNC) {
2541 /* need to convert from 32bit to 64bit */
2542 gv(RC_INT);
2543 if (sbt != (VT_INT | VT_UNSIGNED)) {
2544 #if defined(TCC_TARGET_ARM64)
2545 gen_cvt_sxtw();
2546 #elif defined(TCC_TARGET_X86_64)
2547 int r = gv(RC_INT);
2548 /* x86_64 specific: movslq */
2549 o(0x6348);
2550 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2551 #else
2552 #error
2553 #endif
2556 #endif
2557 } else if (dbt == VT_BOOL) {
2558 /* scalar to bool */
2559 vpushi(0);
2560 gen_op(TOK_NE);
2561 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2562 (dbt & VT_BTYPE) == VT_SHORT) {
2563 if (sbt == VT_PTR) {
2564 vtop->type.t = VT_INT;
2565 tcc_warning("nonportable conversion from pointer to char/short");
2567 force_charshort_cast(dbt);
2568 #if PTR_SIZE == 4
2569 } else if ((dbt & VT_BTYPE) == VT_INT) {
2570 /* scalar to int */
2571 if ((sbt & VT_BTYPE) == VT_LLONG) {
2572 /* from long long: just take low order word */
2573 lexpand();
2574 vpop();
2576 /* if lvalue and single word type, nothing to do because
2577 the lvalue already contains the real type size (see
2578 VT_LVAL_xxx constants) */
2579 #endif
2582 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2583 /* if we are casting between pointer types,
2584 we must update the VT_LVAL_xxx size */
2585 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2586 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2588 vtop->type = *type;
2591 /* return type size as known at compile time. Put alignment at 'a' */
2592 ST_FUNC int type_size(CType *type, int *a)
2594 Sym *s;
2595 int bt;
2597 bt = type->t & VT_BTYPE;
2598 if (bt == VT_STRUCT) {
2599 /* struct/union */
2600 s = type->ref;
2601 *a = s->r;
2602 return s->c;
2603 } else if (bt == VT_PTR) {
2604 if (type->t & VT_ARRAY) {
2605 int ts;
2607 s = type->ref;
2608 ts = type_size(&s->type, a);
2610 if (ts < 0 && s->c < 0)
2611 ts = -ts;
2613 return ts * s->c;
2614 } else {
2615 *a = PTR_SIZE;
2616 return PTR_SIZE;
2618 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2619 return -1; /* incomplete enum */
2620 } else if (bt == VT_LDOUBLE) {
2621 *a = LDOUBLE_ALIGN;
2622 return LDOUBLE_SIZE;
2623 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2624 #ifdef TCC_TARGET_I386
2625 #ifdef TCC_TARGET_PE
2626 *a = 8;
2627 #else
2628 *a = 4;
2629 #endif
2630 #elif defined(TCC_TARGET_ARM)
2631 #ifdef TCC_ARM_EABI
2632 *a = 8;
2633 #else
2634 *a = 4;
2635 #endif
2636 #else
2637 *a = 8;
2638 #endif
2639 return 8;
2640 } else if (bt == VT_INT || bt == VT_FLOAT) {
2641 *a = 4;
2642 return 4;
2643 } else if (bt == VT_SHORT) {
2644 *a = 2;
2645 return 2;
2646 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2647 *a = 8;
2648 return 16;
2649 } else {
2650 /* char, void, function, _Bool */
2651 *a = 1;
2652 return 1;
2656 /* push type size as known at runtime time on top of value stack. Put
2657 alignment at 'a' */
2658 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2660 if (type->t & VT_VLA) {
2661 type_size(&type->ref->type, a);
2662 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2663 } else {
2664 vpushi(type_size(type, a));
2668 static void vla_sp_restore(void) {
2669 if (vlas_in_scope) {
2670 gen_vla_sp_restore(vla_sp_loc);
2674 static void vla_sp_restore_root(void) {
2675 if (vlas_in_scope) {
2676 gen_vla_sp_restore(vla_sp_root_loc);
2680 /* return the pointed type of t */
2681 static inline CType *pointed_type(CType *type)
2683 return &type->ref->type;
2686 /* modify type so that its it is a pointer to type. */
2687 ST_FUNC void mk_pointer(CType *type)
2689 Sym *s;
2690 s = sym_push(SYM_FIELD, type, 0, -1);
2691 type->t = VT_PTR | (type->t & VT_STORAGE);
2692 type->ref = s;
2695 /* compare function types. OLD functions match any new functions */
2696 static int is_compatible_func(CType *type1, CType *type2)
2698 Sym *s1, *s2;
2700 s1 = type1->ref;
2701 s2 = type2->ref;
2702 if (!is_compatible_types(&s1->type, &s2->type))
2703 return 0;
2704 /* check func_call */
2705 if (s1->f.func_call != s2->f.func_call)
2706 return 0;
2707 /* XXX: not complete */
2708 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2709 return 1;
2710 if (s1->f.func_type != s2->f.func_type)
2711 return 0;
2712 while (s1 != NULL) {
2713 if (s2 == NULL)
2714 return 0;
2715 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2716 return 0;
2717 s1 = s1->next;
2718 s2 = s2->next;
2720 if (s2)
2721 return 0;
2722 return 1;
2725 /* return true if type1 and type2 are the same. If unqualified is
2726 true, qualifiers on the types are ignored.
2728 - enums are not checked as gcc __builtin_types_compatible_p ()
2730 static int compare_types(CType *type1, CType *type2, int unqualified)
2732 int bt1, t1, t2;
2734 t1 = type1->t & VT_TYPE;
2735 t2 = type2->t & VT_TYPE;
2736 if (unqualified) {
2737 /* strip qualifiers before comparing */
2738 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2739 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2742 /* Default Vs explicit signedness only matters for char */
2743 if ((t1 & VT_BTYPE) != VT_BYTE) {
2744 t1 &= ~VT_DEFSIGN;
2745 t2 &= ~VT_DEFSIGN;
2747 /* XXX: bitfields ? */
2748 if (t1 != t2)
2749 return 0;
2750 /* test more complicated cases */
2751 bt1 = t1 & VT_BTYPE;
2752 if (bt1 == VT_PTR) {
2753 type1 = pointed_type(type1);
2754 type2 = pointed_type(type2);
2755 return is_compatible_types(type1, type2);
2756 } else if (bt1 == VT_STRUCT) {
2757 return (type1->ref == type2->ref);
2758 } else if (bt1 == VT_FUNC) {
2759 return is_compatible_func(type1, type2);
2760 } else {
2761 return 1;
2765 /* return true if type1 and type2 are exactly the same (including
2766 qualifiers).
2768 static int is_compatible_types(CType *type1, CType *type2)
2770 return compare_types(type1,type2,0);
2773 /* return true if type1 and type2 are the same (ignoring qualifiers).
2775 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2777 return compare_types(type1,type2,1);
2780 /* print a type. If 'varstr' is not NULL, then the variable is also
2781 printed in the type */
2782 /* XXX: union */
2783 /* XXX: add array and function pointers */
2784 static void type_to_str(char *buf, int buf_size,
2785 CType *type, const char *varstr)
2787 int bt, v, t;
2788 Sym *s, *sa;
2789 char buf1[256];
2790 const char *tstr;
2792 t = type->t;
2793 bt = t & VT_BTYPE;
2794 buf[0] = '\0';
2796 if (t & VT_EXTERN)
2797 pstrcat(buf, buf_size, "extern ");
2798 if (t & VT_STATIC)
2799 pstrcat(buf, buf_size, "static ");
2800 if (t & VT_TYPEDEF)
2801 pstrcat(buf, buf_size, "typedef ");
2802 if (t & VT_INLINE)
2803 pstrcat(buf, buf_size, "inline ");
2804 if (t & VT_VOLATILE)
2805 pstrcat(buf, buf_size, "volatile ");
2806 if (t & VT_CONSTANT)
2807 pstrcat(buf, buf_size, "const ");
2809 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2810 || ((t & VT_UNSIGNED)
2811 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2812 && !IS_ENUM(t)
2814 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2816 buf_size -= strlen(buf);
2817 buf += strlen(buf);
2819 switch(bt) {
2820 case VT_VOID:
2821 tstr = "void";
2822 goto add_tstr;
2823 case VT_BOOL:
2824 tstr = "_Bool";
2825 goto add_tstr;
2826 case VT_BYTE:
2827 tstr = "char";
2828 goto add_tstr;
2829 case VT_SHORT:
2830 tstr = "short";
2831 goto add_tstr;
2832 case VT_INT:
2833 tstr = "int";
2834 goto maybe_long;
2835 case VT_LLONG:
2836 tstr = "long long";
2837 maybe_long:
2838 if (t & VT_LONG)
2839 tstr = "long";
2840 if (!IS_ENUM(t))
2841 goto add_tstr;
2842 tstr = "enum ";
2843 goto tstruct;
2844 case VT_FLOAT:
2845 tstr = "float";
2846 goto add_tstr;
2847 case VT_DOUBLE:
2848 tstr = "double";
2849 goto add_tstr;
2850 case VT_LDOUBLE:
2851 tstr = "long double";
2852 add_tstr:
2853 pstrcat(buf, buf_size, tstr);
2854 break;
2855 case VT_STRUCT:
2856 tstr = "struct ";
2857 if (IS_UNION(t))
2858 tstr = "union ";
2859 tstruct:
2860 pstrcat(buf, buf_size, tstr);
2861 v = type->ref->v & ~SYM_STRUCT;
2862 if (v >= SYM_FIRST_ANOM)
2863 pstrcat(buf, buf_size, "<anonymous>");
2864 else
2865 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2866 break;
2867 case VT_FUNC:
2868 s = type->ref;
2869 type_to_str(buf, buf_size, &s->type, varstr);
2870 pstrcat(buf, buf_size, "(");
2871 sa = s->next;
2872 while (sa != NULL) {
2873 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2874 pstrcat(buf, buf_size, buf1);
2875 sa = sa->next;
2876 if (sa)
2877 pstrcat(buf, buf_size, ", ");
2879 pstrcat(buf, buf_size, ")");
2880 goto no_var;
2881 case VT_PTR:
2882 s = type->ref;
2883 if (t & VT_ARRAY) {
2884 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2885 type_to_str(buf, buf_size, &s->type, buf1);
2886 goto no_var;
2888 pstrcpy(buf1, sizeof(buf1), "*");
2889 if (t & VT_CONSTANT)
2890 pstrcat(buf1, buf_size, "const ");
2891 if (t & VT_VOLATILE)
2892 pstrcat(buf1, buf_size, "volatile ");
2893 if (varstr)
2894 pstrcat(buf1, sizeof(buf1), varstr);
2895 type_to_str(buf, buf_size, &s->type, buf1);
2896 goto no_var;
2898 if (varstr) {
2899 pstrcat(buf, buf_size, " ");
2900 pstrcat(buf, buf_size, varstr);
2902 no_var: ;
2905 /* verify type compatibility to store vtop in 'dt' type, and generate
2906 casts if needed. */
2907 static void gen_assign_cast(CType *dt)
2909 CType *st, *type1, *type2;
2910 char buf1[256], buf2[256];
2911 int dbt, sbt;
2913 st = &vtop->type; /* source type */
2914 dbt = dt->t & VT_BTYPE;
2915 sbt = st->t & VT_BTYPE;
2916 if (sbt == VT_VOID || dbt == VT_VOID) {
2917 if (sbt == VT_VOID && dbt == VT_VOID)
2918 ; /*
2919 It is Ok if both are void
2920 A test program:
2921 void func1() {}
2922 void func2() {
2923 return func1();
2925 gcc accepts this program
2927 else
2928 tcc_error("cannot cast from/to void");
2930 if (dt->t & VT_CONSTANT)
2931 tcc_warning("assignment of read-only location");
2932 switch(dbt) {
2933 case VT_PTR:
2934 /* special cases for pointers */
2935 /* '0' can also be a pointer */
2936 if (is_null_pointer(vtop))
2937 goto type_ok;
2938 /* accept implicit pointer to integer cast with warning */
2939 if (is_integer_btype(sbt)) {
2940 tcc_warning("assignment makes pointer from integer without a cast");
2941 goto type_ok;
2943 type1 = pointed_type(dt);
2944 /* a function is implicitly a function pointer */
2945 if (sbt == VT_FUNC) {
2946 if ((type1->t & VT_BTYPE) != VT_VOID &&
2947 !is_compatible_types(pointed_type(dt), st))
2948 tcc_warning("assignment from incompatible pointer type");
2949 goto type_ok;
2951 if (sbt != VT_PTR)
2952 goto error;
2953 type2 = pointed_type(st);
2954 if ((type1->t & VT_BTYPE) == VT_VOID ||
2955 (type2->t & VT_BTYPE) == VT_VOID) {
2956 /* void * can match anything */
2957 } else {
2958 //printf("types %08x %08x\n", type1->t, type2->t);
2959 /* exact type match, except for qualifiers */
2960 if (!is_compatible_unqualified_types(type1, type2)) {
2961 /* Like GCC don't warn by default for merely changes
2962 in pointer target signedness. Do warn for different
2963 base types, though, in particular for unsigned enums
2964 and signed int targets. */
2965 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
2966 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
2968 tcc_warning("assignment from incompatible pointer type");
2971 /* check const and volatile */
2972 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2973 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2974 tcc_warning("assignment discards qualifiers from pointer target type");
2975 break;
2976 case VT_BYTE:
2977 case VT_SHORT:
2978 case VT_INT:
2979 case VT_LLONG:
2980 if (sbt == VT_PTR || sbt == VT_FUNC) {
2981 tcc_warning("assignment makes integer from pointer without a cast");
2982 } else if (sbt == VT_STRUCT) {
2983 goto case_VT_STRUCT;
2985 /* XXX: more tests */
2986 break;
2987 case VT_STRUCT:
2988 case_VT_STRUCT:
2989 if (!is_compatible_unqualified_types(dt, st)) {
2990 error:
2991 type_to_str(buf1, sizeof(buf1), st, NULL);
2992 type_to_str(buf2, sizeof(buf2), dt, NULL);
2993 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2995 break;
2997 type_ok:
2998 gen_cast(dt);
3001 /* store vtop in lvalue pushed on stack */
3002 ST_FUNC void vstore(void)
3004 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3006 ft = vtop[-1].type.t;
3007 sbt = vtop->type.t & VT_BTYPE;
3008 dbt = ft & VT_BTYPE;
3009 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3010 (sbt == VT_INT && dbt == VT_SHORT))
3011 && !(vtop->type.t & VT_BITFIELD)) {
3012 /* optimize char/short casts */
3013 delayed_cast = VT_MUSTCAST;
3014 vtop->type.t = ft & VT_TYPE;
3015 /* XXX: factorize */
3016 if (ft & VT_CONSTANT)
3017 tcc_warning("assignment of read-only location");
3018 } else {
3019 delayed_cast = 0;
3020 if (!(ft & VT_BITFIELD))
3021 gen_assign_cast(&vtop[-1].type);
3024 if (sbt == VT_STRUCT) {
3025 /* if structure, only generate pointer */
3026 /* structure assignment : generate memcpy */
3027 /* XXX: optimize if small size */
3028 size = type_size(&vtop->type, &align);
3030 /* destination */
3031 vswap();
3032 vtop->type.t = VT_PTR;
3033 gaddrof();
3035 /* address of memcpy() */
3036 #ifdef TCC_ARM_EABI
3037 if(!(align & 7))
3038 vpush_global_sym(&func_old_type, TOK_memcpy8);
3039 else if(!(align & 3))
3040 vpush_global_sym(&func_old_type, TOK_memcpy4);
3041 else
3042 #endif
3043 /* Use memmove, rather than memcpy, as dest and src may be same: */
3044 vpush_global_sym(&func_old_type, TOK_memmove);
3046 vswap();
3047 /* source */
3048 vpushv(vtop - 2);
3049 vtop->type.t = VT_PTR;
3050 gaddrof();
3051 /* type size */
3052 vpushi(size);
3053 gfunc_call(3);
3055 /* leave source on stack */
3056 } else if (ft & VT_BITFIELD) {
3057 /* bitfield store handling */
3059 /* save lvalue as expression result (example: s.b = s.a = n;) */
3060 vdup(), vtop[-1] = vtop[-2];
3062 bit_pos = BIT_POS(ft);
3063 bit_size = BIT_SIZE(ft);
3064 /* remove bit field info to avoid loops */
3065 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3067 if ((ft & VT_BTYPE) == VT_BOOL) {
3068 gen_cast(&vtop[-1].type);
3069 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3072 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3073 if (r == VT_STRUCT) {
3074 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3075 store_packed_bf(bit_pos, bit_size);
3076 } else {
3077 unsigned long long mask = (1ULL << bit_size) - 1;
3078 if ((ft & VT_BTYPE) != VT_BOOL) {
3079 /* mask source */
3080 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3081 vpushll(mask);
3082 else
3083 vpushi((unsigned)mask);
3084 gen_op('&');
3086 /* shift source */
3087 vpushi(bit_pos);
3088 gen_op(TOK_SHL);
3089 vswap();
3090 /* duplicate destination */
3091 vdup();
3092 vrott(3);
3093 /* load destination, mask and or with source */
3094 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3095 vpushll(~(mask << bit_pos));
3096 else
3097 vpushi(~((unsigned)mask << bit_pos));
3098 gen_op('&');
3099 gen_op('|');
3100 /* store result */
3101 vstore();
3102 /* ... and discard */
3103 vpop();
3105 } else if (dbt == VT_VOID) {
3106 --vtop;
3107 } else {
3108 #ifdef CONFIG_TCC_BCHECK
3109 /* bound check case */
3110 if (vtop[-1].r & VT_MUSTBOUND) {
3111 vswap();
3112 gbound();
3113 vswap();
3115 #endif
3116 rc = RC_INT;
3117 if (is_float(ft)) {
3118 rc = RC_FLOAT;
3119 #ifdef TCC_TARGET_X86_64
3120 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3121 rc = RC_ST0;
3122 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3123 rc = RC_FRET;
3125 #endif
3127 r = gv(rc); /* generate value */
3128 /* if lvalue was saved on stack, must read it */
3129 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3130 SValue sv;
3131 t = get_reg(RC_INT);
3132 #if PTR_SIZE == 8
3133 sv.type.t = VT_PTR;
3134 #else
3135 sv.type.t = VT_INT;
3136 #endif
3137 sv.r = VT_LOCAL | VT_LVAL;
3138 sv.c.i = vtop[-1].c.i;
3139 load(t, &sv);
3140 vtop[-1].r = t | VT_LVAL;
3142 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3143 #if PTR_SIZE == 8
3144 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3145 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3146 #else
3147 if ((ft & VT_BTYPE) == VT_LLONG) {
3148 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3149 #endif
3150 vtop[-1].type.t = load_type;
3151 store(r, vtop - 1);
3152 vswap();
3153 /* convert to int to increment easily */
3154 vtop->type.t = addr_type;
3155 gaddrof();
3156 vpushi(load_size);
3157 gen_op('+');
3158 vtop->r |= VT_LVAL;
3159 vswap();
3160 vtop[-1].type.t = load_type;
3161 /* XXX: it works because r2 is spilled last ! */
3162 store(vtop->r2, vtop - 1);
3163 } else {
3164 store(r, vtop - 1);
3167 vswap();
3168 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3169 vtop->r |= delayed_cast;
3173 /* post defines POST/PRE add. c is the token ++ or -- */
3174 ST_FUNC void inc(int post, int c)
3176 test_lvalue();
3177 vdup(); /* save lvalue */
3178 if (post) {
3179 gv_dup(); /* duplicate value */
3180 vrotb(3);
3181 vrotb(3);
3183 /* add constant */
3184 vpushi(c - TOK_MID);
3185 gen_op('+');
3186 vstore(); /* store value */
3187 if (post)
3188 vpop(); /* if post op, return saved value */
3191 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3193 /* read the string */
3194 if (tok != TOK_STR)
3195 expect(msg);
3196 cstr_new(astr);
3197 while (tok == TOK_STR) {
3198 /* XXX: add \0 handling too ? */
3199 cstr_cat(astr, tokc.str.data, -1);
3200 next();
3202 cstr_ccat(astr, '\0');
3205 /* If I is >= 1 and a power of two, returns log2(i)+1.
3206 If I is 0 returns 0. */
3207 static int exact_log2p1(int i)
3209 int ret;
3210 if (!i)
3211 return 0;
3212 for (ret = 1; i >= 1 << 8; ret += 8)
3213 i >>= 8;
3214 if (i >= 1 << 4)
3215 ret += 4, i >>= 4;
3216 if (i >= 1 << 2)
3217 ret += 2, i >>= 2;
3218 if (i >= 1 << 1)
3219 ret++;
3220 return ret;
3223 /* Parse __attribute__((...)) GNUC extension. */
3224 static void parse_attribute(AttributeDef *ad)
3226 int t, n;
3227 CString astr;
3229 redo:
3230 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3231 return;
3232 next();
3233 skip('(');
3234 skip('(');
3235 while (tok != ')') {
3236 if (tok < TOK_IDENT)
3237 expect("attribute name");
3238 t = tok;
3239 next();
3240 switch(t) {
3241 case TOK_SECTION1:
3242 case TOK_SECTION2:
3243 skip('(');
3244 parse_mult_str(&astr, "section name");
3245 ad->section = find_section(tcc_state, (char *)astr.data);
3246 skip(')');
3247 cstr_free(&astr);
3248 break;
3249 case TOK_ALIAS1:
3250 case TOK_ALIAS2:
3251 skip('(');
3252 parse_mult_str(&astr, "alias(\"target\")");
3253 ad->alias_target = /* save string as token, for later */
3254 tok_alloc((char*)astr.data, astr.size-1)->tok;
3255 skip(')');
3256 cstr_free(&astr);
3257 break;
3258 case TOK_VISIBILITY1:
3259 case TOK_VISIBILITY2:
3260 skip('(');
3261 parse_mult_str(&astr,
3262 "visibility(\"default|hidden|internal|protected\")");
3263 if (!strcmp (astr.data, "default"))
3264 ad->a.visibility = STV_DEFAULT;
3265 else if (!strcmp (astr.data, "hidden"))
3266 ad->a.visibility = STV_HIDDEN;
3267 else if (!strcmp (astr.data, "internal"))
3268 ad->a.visibility = STV_INTERNAL;
3269 else if (!strcmp (astr.data, "protected"))
3270 ad->a.visibility = STV_PROTECTED;
3271 else
3272 expect("visibility(\"default|hidden|internal|protected\")");
3273 skip(')');
3274 cstr_free(&astr);
3275 break;
3276 case TOK_ALIGNED1:
3277 case TOK_ALIGNED2:
3278 if (tok == '(') {
3279 next();
3280 n = expr_const();
3281 if (n <= 0 || (n & (n - 1)) != 0)
3282 tcc_error("alignment must be a positive power of two");
3283 skip(')');
3284 } else {
3285 n = MAX_ALIGN;
3287 ad->a.aligned = exact_log2p1(n);
3288 if (n != 1 << (ad->a.aligned - 1))
3289 tcc_error("alignment of %d is larger than implemented", n);
3290 break;
3291 case TOK_PACKED1:
3292 case TOK_PACKED2:
3293 ad->a.packed = 1;
3294 break;
3295 case TOK_WEAK1:
3296 case TOK_WEAK2:
3297 ad->a.weak = 1;
3298 break;
3299 case TOK_UNUSED1:
3300 case TOK_UNUSED2:
3301 /* currently, no need to handle it because tcc does not
3302 track unused objects */
3303 break;
3304 case TOK_NORETURN1:
3305 case TOK_NORETURN2:
3306 /* currently, no need to handle it because tcc does not
3307 track unused objects */
3308 break;
3309 case TOK_CDECL1:
3310 case TOK_CDECL2:
3311 case TOK_CDECL3:
3312 ad->f.func_call = FUNC_CDECL;
3313 break;
3314 case TOK_STDCALL1:
3315 case TOK_STDCALL2:
3316 case TOK_STDCALL3:
3317 ad->f.func_call = FUNC_STDCALL;
3318 break;
3319 #ifdef TCC_TARGET_I386
3320 case TOK_REGPARM1:
3321 case TOK_REGPARM2:
3322 skip('(');
3323 n = expr_const();
3324 if (n > 3)
3325 n = 3;
3326 else if (n < 0)
3327 n = 0;
3328 if (n > 0)
3329 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3330 skip(')');
3331 break;
3332 case TOK_FASTCALL1:
3333 case TOK_FASTCALL2:
3334 case TOK_FASTCALL3:
3335 ad->f.func_call = FUNC_FASTCALLW;
3336 break;
3337 #endif
3338 case TOK_MODE:
3339 skip('(');
3340 switch(tok) {
3341 case TOK_MODE_DI:
3342 ad->attr_mode = VT_LLONG + 1;
3343 break;
3344 case TOK_MODE_QI:
3345 ad->attr_mode = VT_BYTE + 1;
3346 break;
3347 case TOK_MODE_HI:
3348 ad->attr_mode = VT_SHORT + 1;
3349 break;
3350 case TOK_MODE_SI:
3351 case TOK_MODE_word:
3352 ad->attr_mode = VT_INT + 1;
3353 break;
3354 default:
3355 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3356 break;
3358 next();
3359 skip(')');
3360 break;
3361 case TOK_DLLEXPORT:
3362 ad->a.dllexport = 1;
3363 break;
3364 case TOK_DLLIMPORT:
3365 ad->a.dllimport = 1;
3366 break;
3367 default:
3368 if (tcc_state->warn_unsupported)
3369 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3370 /* skip parameters */
3371 if (tok == '(') {
3372 int parenthesis = 0;
3373 do {
3374 if (tok == '(')
3375 parenthesis++;
3376 else if (tok == ')')
3377 parenthesis--;
3378 next();
3379 } while (parenthesis && tok != -1);
3381 break;
3383 if (tok != ',')
3384 break;
3385 next();
3387 skip(')');
3388 skip(')');
3389 goto redo;
3392 static Sym * find_field (CType *type, int v)
3394 Sym *s = type->ref;
3395 v |= SYM_FIELD;
3396 while ((s = s->next) != NULL) {
3397 if ((s->v & SYM_FIELD) &&
3398 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3399 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3400 Sym *ret = find_field (&s->type, v);
3401 if (ret)
3402 return ret;
3404 if (s->v == v)
3405 break;
3407 return s;
3410 static void struct_add_offset (Sym *s, int offset)
3412 while ((s = s->next) != NULL) {
3413 if ((s->v & SYM_FIELD) &&
3414 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3415 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3416 struct_add_offset(s->type.ref, offset);
3417 } else
3418 s->c += offset;
3422 static void struct_layout(CType *type, AttributeDef *ad)
3424 int size, align, maxalign, offset, c, bit_pos, bit_size;
3425 int packed, a, bt, prevbt, prev_bit_size;
3426 int pcc = !tcc_state->ms_bitfields;
3427 int pragma_pack = *tcc_state->pack_stack_ptr;
3428 Sym *f;
3430 maxalign = 1;
3431 offset = 0;
3432 c = 0;
3433 bit_pos = 0;
3434 prevbt = VT_STRUCT; /* make it never match */
3435 prev_bit_size = 0;
3437 //#define BF_DEBUG
3439 for (f = type->ref->next; f; f = f->next) {
3440 if (f->type.t & VT_BITFIELD)
3441 bit_size = BIT_SIZE(f->type.t);
3442 else
3443 bit_size = -1;
3444 size = type_size(&f->type, &align);
3445 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3446 packed = 0;
3448 if (pcc && bit_size == 0) {
3449 /* in pcc mode, packing does not affect zero-width bitfields */
3451 } else {
3452 /* in pcc mode, attribute packed overrides if set. */
3453 if (pcc && (f->a.packed || ad->a.packed))
3454 align = packed = 1;
3456 /* pragma pack overrides align if lesser and packs bitfields always */
3457 if (pragma_pack) {
3458 packed = 1;
3459 if (pragma_pack < align)
3460 align = pragma_pack;
3461 /* in pcc mode pragma pack also overrides individual align */
3462 if (pcc && pragma_pack < a)
3463 a = 0;
3466 /* some individual align was specified */
3467 if (a)
3468 align = a;
3470 if (type->ref->type.t == VT_UNION) {
3471 if (pcc && bit_size >= 0)
3472 size = (bit_size + 7) >> 3;
3473 offset = 0;
3474 if (size > c)
3475 c = size;
3477 } else if (bit_size < 0) {
3478 if (pcc)
3479 c += (bit_pos + 7) >> 3;
3480 c = (c + align - 1) & -align;
3481 offset = c;
3482 if (size > 0)
3483 c += size;
3484 bit_pos = 0;
3485 prevbt = VT_STRUCT;
3486 prev_bit_size = 0;
3488 } else {
3489 /* A bit-field. Layout is more complicated. There are two
3490 options: PCC (GCC) compatible and MS compatible */
3491 if (pcc) {
3492 /* In PCC layout a bit-field is placed adjacent to the
3493 preceding bit-fields, except if:
3494 - it has zero-width
3495 - an individual alignment was given
3496 - it would overflow its base type container and
3497 there is no packing */
3498 if (bit_size == 0) {
3499 new_field:
3500 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3501 bit_pos = 0;
3502 } else if (f->a.aligned) {
3503 goto new_field;
3504 } else if (!packed) {
3505 int a8 = align * 8;
3506 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3507 if (ofs > size / align)
3508 goto new_field;
3511 /* in pcc mode, long long bitfields have type int if they fit */
3512 if (size == 8 && bit_size <= 32)
3513 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3515 while (bit_pos >= align * 8)
3516 c += align, bit_pos -= align * 8;
3517 offset = c;
3519 /* In PCC layout named bit-fields influence the alignment
3520 of the containing struct using the base types alignment,
3521 except for packed fields (which here have correct align). */
3522 if (f->v & SYM_FIRST_ANOM
3523 // && bit_size // ??? gcc on ARM/rpi does that
3525 align = 1;
3527 } else {
3528 bt = f->type.t & VT_BTYPE;
3529 if ((bit_pos + bit_size > size * 8)
3530 || (bit_size > 0) == (bt != prevbt)
3532 c = (c + align - 1) & -align;
3533 offset = c;
3534 bit_pos = 0;
3535 /* In MS bitfield mode a bit-field run always uses
3536 at least as many bits as the underlying type.
3537 To start a new run it's also required that this
3538 or the last bit-field had non-zero width. */
3539 if (bit_size || prev_bit_size)
3540 c += size;
3542 /* In MS layout the records alignment is normally
3543 influenced by the field, except for a zero-width
3544 field at the start of a run (but by further zero-width
3545 fields it is again). */
3546 if (bit_size == 0 && prevbt != bt)
3547 align = 1;
3548 prevbt = bt;
3549 prev_bit_size = bit_size;
3552 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3553 | (bit_pos << VT_STRUCT_SHIFT);
3554 bit_pos += bit_size;
3556 if (align > maxalign)
3557 maxalign = align;
3559 #ifdef BF_DEBUG
3560 printf("set field %s offset %-2d size %-2d align %-2d",
3561 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3562 if (f->type.t & VT_BITFIELD) {
3563 printf(" pos %-2d bits %-2d",
3564 BIT_POS(f->type.t),
3565 BIT_SIZE(f->type.t)
3568 printf("\n");
3569 #endif
3571 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3572 Sym *ass;
3573 /* An anonymous struct/union. Adjust member offsets
3574 to reflect the real offset of our containing struct.
3575 Also set the offset of this anon member inside
3576 the outer struct to be zero. Via this it
3577 works when accessing the field offset directly
3578 (from base object), as well as when recursing
3579 members in initializer handling. */
3580 int v2 = f->type.ref->v;
3581 if (!(v2 & SYM_FIELD) &&
3582 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3583 Sym **pps;
3584 /* This happens only with MS extensions. The
3585 anon member has a named struct type, so it
3586 potentially is shared with other references.
3587 We need to unshare members so we can modify
3588 them. */
3589 ass = f->type.ref;
3590 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3591 &f->type.ref->type, 0,
3592 f->type.ref->c);
3593 pps = &f->type.ref->next;
3594 while ((ass = ass->next) != NULL) {
3595 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3596 pps = &((*pps)->next);
3598 *pps = NULL;
3600 struct_add_offset(f->type.ref, offset);
3601 f->c = 0;
3602 } else {
3603 f->c = offset;
3606 f->r = 0;
3609 if (pcc)
3610 c += (bit_pos + 7) >> 3;
3612 /* store size and alignment */
3613 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3614 if (a < maxalign)
3615 a = maxalign;
3616 type->ref->r = a;
3617 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3618 /* can happen if individual align for some member was given. In
3619 this case MSVC ignores maxalign when aligning the size */
3620 a = pragma_pack;
3621 if (a < bt)
3622 a = bt;
3624 c = (c + a - 1) & -a;
3625 type->ref->c = c;
3627 #ifdef BF_DEBUG
3628 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3629 #endif
3631 /* check whether we can access bitfields by their type */
3632 for (f = type->ref->next; f; f = f->next) {
3633 int s, px, cx, c0;
3634 CType t;
3636 if (0 == (f->type.t & VT_BITFIELD))
3637 continue;
3638 f->type.ref = f;
3639 f->auxtype = -1;
3640 bit_size = BIT_SIZE(f->type.t);
3641 if (bit_size == 0)
3642 continue;
3643 bit_pos = BIT_POS(f->type.t);
3644 size = type_size(&f->type, &align);
3645 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3646 continue;
3648 /* try to access the field using a different type */
3649 c0 = -1, s = align = 1;
3650 for (;;) {
3651 px = f->c * 8 + bit_pos;
3652 cx = (px >> 3) & -align;
3653 px = px - (cx << 3);
3654 if (c0 == cx)
3655 break;
3656 s = (px + bit_size + 7) >> 3;
3657 if (s > 4) {
3658 t.t = VT_LLONG;
3659 } else if (s > 2) {
3660 t.t = VT_INT;
3661 } else if (s > 1) {
3662 t.t = VT_SHORT;
3663 } else {
3664 t.t = VT_BYTE;
3666 s = type_size(&t, &align);
3667 c0 = cx;
3670 if (px + bit_size <= s * 8 && cx + s <= c) {
3671 /* update offset and bit position */
3672 f->c = cx;
3673 bit_pos = px;
3674 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3675 | (bit_pos << VT_STRUCT_SHIFT);
3676 if (s != size)
3677 f->auxtype = t.t;
3678 #ifdef BF_DEBUG
3679 printf("FIX field %s offset %-2d size %-2d align %-2d "
3680 "pos %-2d bits %-2d\n",
3681 get_tok_str(f->v & ~SYM_FIELD, NULL),
3682 cx, s, align, px, bit_size);
3683 #endif
3684 } else {
3685 /* fall back to load/store single-byte wise */
3686 f->auxtype = VT_STRUCT;
3687 #ifdef BF_DEBUG
3688 printf("FIX field %s : load byte-wise\n",
3689 get_tok_str(f->v & ~SYM_FIELD, NULL));
3690 #endif
3695 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3696 static void struct_decl(CType *type, int u)
3698 int v, c, size, align, flexible;
3699 int bit_size, bsize, bt;
3700 Sym *s, *ss, **ps;
3701 AttributeDef ad, ad1;
3702 CType type1, btype;
3704 memset(&ad, 0, sizeof ad);
3705 next();
3706 parse_attribute(&ad);
3707 if (tok != '{') {
3708 v = tok;
3709 next();
3710 /* struct already defined ? return it */
3711 if (v < TOK_IDENT)
3712 expect("struct/union/enum name");
3713 s = struct_find(v);
3714 if (s && (s->sym_scope == local_scope || tok != '{')) {
3715 if (u == s->type.t)
3716 goto do_decl;
3717 if (u == VT_ENUM && IS_ENUM(s->type.t))
3718 goto do_decl;
3719 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3721 } else {
3722 v = anon_sym++;
3724 /* Record the original enum/struct/union token. */
3725 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3726 type1.ref = NULL;
3727 /* we put an undefined size for struct/union */
3728 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3729 s->r = 0; /* default alignment is zero as gcc */
3730 do_decl:
3731 type->t = s->type.t;
3732 type->ref = s;
3734 if (tok == '{') {
3735 next();
3736 if (s->c != -1)
3737 tcc_error("struct/union/enum already defined");
3738 /* cannot be empty */
3739 /* non empty enums are not allowed */
3740 ps = &s->next;
3741 if (u == VT_ENUM) {
3742 long long ll = 0, pl = 0, nl = 0;
3743 CType t;
3744 t.ref = s;
3745 /* enum symbols have static storage */
3746 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3747 for(;;) {
3748 v = tok;
3749 if (v < TOK_UIDENT)
3750 expect("identifier");
3751 ss = sym_find(v);
3752 if (ss && !local_stack)
3753 tcc_error("redefinition of enumerator '%s'",
3754 get_tok_str(v, NULL));
3755 next();
3756 if (tok == '=') {
3757 next();
3758 ll = expr_const64();
3760 ss = sym_push(v, &t, VT_CONST, 0);
3761 ss->enum_val = ll;
3762 *ps = ss, ps = &ss->next;
3763 if (ll < nl)
3764 nl = ll;
3765 if (ll > pl)
3766 pl = ll;
3767 if (tok != ',')
3768 break;
3769 next();
3770 ll++;
3771 /* NOTE: we accept a trailing comma */
3772 if (tok == '}')
3773 break;
3775 skip('}');
3776 /* set integral type of the enum */
3777 t.t = VT_INT;
3778 if (nl >= 0) {
3779 if (pl != (unsigned)pl)
3780 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3781 t.t |= VT_UNSIGNED;
3782 } else if (pl != (int)pl || nl != (int)nl)
3783 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3784 s->type.t = type->t = t.t | VT_ENUM;
3785 s->c = 0;
3786 /* set type for enum members */
3787 for (ss = s->next; ss; ss = ss->next) {
3788 ll = ss->enum_val;
3789 if (ll == (int)ll) /* default is int if it fits */
3790 continue;
3791 if (t.t & VT_UNSIGNED) {
3792 ss->type.t |= VT_UNSIGNED;
3793 if (ll == (unsigned)ll)
3794 continue;
3796 ss->type.t = (ss->type.t & ~VT_BTYPE)
3797 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3799 } else {
3800 c = 0;
3801 flexible = 0;
3802 while (tok != '}') {
3803 if (!parse_btype(&btype, &ad1)) {
3804 skip(';');
3805 continue;
3807 while (1) {
3808 if (flexible)
3809 tcc_error("flexible array member '%s' not at the end of struct",
3810 get_tok_str(v, NULL));
3811 bit_size = -1;
3812 v = 0;
3813 type1 = btype;
3814 if (tok != ':') {
3815 if (tok != ';')
3816 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3817 if (v == 0) {
3818 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3819 expect("identifier");
3820 else {
3821 int v = btype.ref->v;
3822 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3823 if (tcc_state->ms_extensions == 0)
3824 expect("identifier");
3828 if (type_size(&type1, &align) < 0) {
3829 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3830 flexible = 1;
3831 else
3832 tcc_error("field '%s' has incomplete type",
3833 get_tok_str(v, NULL));
3835 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3836 (type1.t & VT_STORAGE))
3837 tcc_error("invalid type for '%s'",
3838 get_tok_str(v, NULL));
3840 if (tok == ':') {
3841 next();
3842 bit_size = expr_const();
3843 /* XXX: handle v = 0 case for messages */
3844 if (bit_size < 0)
3845 tcc_error("negative width in bit-field '%s'",
3846 get_tok_str(v, NULL));
3847 if (v && bit_size == 0)
3848 tcc_error("zero width for bit-field '%s'",
3849 get_tok_str(v, NULL));
3850 parse_attribute(&ad1);
3852 size = type_size(&type1, &align);
3853 if (bit_size >= 0) {
3854 bt = type1.t & VT_BTYPE;
3855 if (bt != VT_INT &&
3856 bt != VT_BYTE &&
3857 bt != VT_SHORT &&
3858 bt != VT_BOOL &&
3859 bt != VT_LLONG)
3860 tcc_error("bitfields must have scalar type");
3861 bsize = size * 8;
3862 if (bit_size > bsize) {
3863 tcc_error("width of '%s' exceeds its type",
3864 get_tok_str(v, NULL));
3865 } else if (bit_size == bsize
3866 && !ad.a.packed && !ad1.a.packed) {
3867 /* no need for bit fields */
3869 } else if (bit_size == 64) {
3870 tcc_error("field width 64 not implemented");
3871 } else {
3872 type1.t = (type1.t & ~VT_STRUCT_MASK)
3873 | VT_BITFIELD
3874 | (bit_size << (VT_STRUCT_SHIFT + 6));
3877 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3878 /* Remember we've seen a real field to check
3879 for placement of flexible array member. */
3880 c = 1;
3882 /* If member is a struct or bit-field, enforce
3883 placing into the struct (as anonymous). */
3884 if (v == 0 &&
3885 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3886 bit_size >= 0)) {
3887 v = anon_sym++;
3889 if (v) {
3890 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3891 ss->a = ad1.a;
3892 *ps = ss;
3893 ps = &ss->next;
3895 if (tok == ';' || tok == TOK_EOF)
3896 break;
3897 skip(',');
3899 skip(';');
3901 skip('}');
3902 parse_attribute(&ad);
3903 struct_layout(type, &ad);
3908 static void sym_to_attr(AttributeDef *ad, Sym *s)
3910 if (s->a.aligned && 0 == ad->a.aligned)
3911 ad->a.aligned = s->a.aligned;
3912 if (s->f.func_call && 0 == ad->f.func_call)
3913 ad->f.func_call = s->f.func_call;
3914 if (s->f.func_type && 0 == ad->f.func_type)
3915 ad->f.func_type = s->f.func_type;
3916 if (s->a.packed)
3917 ad->a.packed = 1;
3920 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3921 are added to the element type, copied because it could be a typedef. */
3922 static void parse_btype_qualify(CType *type, int qualifiers)
3924 while (type->t & VT_ARRAY) {
3925 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3926 type = &type->ref->type;
3928 type->t |= qualifiers;
3931 /* return 0 if no type declaration. otherwise, return the basic type
3932 and skip it.
3934 static int parse_btype(CType *type, AttributeDef *ad)
3936 int t, u, bt, st, type_found, typespec_found, g;
3937 Sym *s;
3938 CType type1;
3940 memset(ad, 0, sizeof(AttributeDef));
3941 type_found = 0;
3942 typespec_found = 0;
3943 t = VT_INT;
3944 bt = st = -1;
3945 type->ref = NULL;
3947 while(1) {
3948 switch(tok) {
3949 case TOK_EXTENSION:
3950 /* currently, we really ignore extension */
3951 next();
3952 continue;
3954 /* basic types */
3955 case TOK_CHAR:
3956 u = VT_BYTE;
3957 basic_type:
3958 next();
3959 basic_type1:
3960 if (u == VT_SHORT || u == VT_LONG) {
3961 if (st != -1 || (bt != -1 && bt != VT_INT))
3962 tmbt: tcc_error("too many basic types");
3963 st = u;
3964 } else {
3965 if (bt != -1 || (st != -1 && u != VT_INT))
3966 goto tmbt;
3967 bt = u;
3969 if (u != VT_INT)
3970 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
3971 typespec_found = 1;
3972 break;
3973 case TOK_VOID:
3974 u = VT_VOID;
3975 goto basic_type;
3976 case TOK_SHORT:
3977 u = VT_SHORT;
3978 goto basic_type;
3979 case TOK_INT:
3980 u = VT_INT;
3981 goto basic_type;
3982 case TOK_LONG:
3983 if ((t & VT_BTYPE) == VT_DOUBLE) {
3984 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
3985 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
3986 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
3987 } else {
3988 u = VT_LONG;
3989 goto basic_type;
3991 next();
3992 break;
3993 #ifdef TCC_TARGET_ARM64
3994 case TOK_UINT128:
3995 /* GCC's __uint128_t appears in some Linux header files. Make it a
3996 synonym for long double to get the size and alignment right. */
3997 u = VT_LDOUBLE;
3998 goto basic_type;
3999 #endif
4000 case TOK_BOOL:
4001 u = VT_BOOL;
4002 goto basic_type;
4003 case TOK_FLOAT:
4004 u = VT_FLOAT;
4005 goto basic_type;
4006 case TOK_DOUBLE:
4007 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4008 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4009 } else {
4010 u = VT_DOUBLE;
4011 goto basic_type;
4013 next();
4014 break;
4015 case TOK_ENUM:
4016 struct_decl(&type1, VT_ENUM);
4017 basic_type2:
4018 u = type1.t;
4019 type->ref = type1.ref;
4020 goto basic_type1;
4021 case TOK_STRUCT:
4022 struct_decl(&type1, VT_STRUCT);
4023 goto basic_type2;
4024 case TOK_UNION:
4025 struct_decl(&type1, VT_UNION);
4026 goto basic_type2;
4028 /* type modifiers */
4029 case TOK_CONST1:
4030 case TOK_CONST2:
4031 case TOK_CONST3:
4032 type->t = t;
4033 parse_btype_qualify(type, VT_CONSTANT);
4034 t = type->t;
4035 next();
4036 break;
4037 case TOK_VOLATILE1:
4038 case TOK_VOLATILE2:
4039 case TOK_VOLATILE3:
4040 type->t = t;
4041 parse_btype_qualify(type, VT_VOLATILE);
4042 t = type->t;
4043 next();
4044 break;
4045 case TOK_SIGNED1:
4046 case TOK_SIGNED2:
4047 case TOK_SIGNED3:
4048 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4049 tcc_error("signed and unsigned modifier");
4050 t |= VT_DEFSIGN;
4051 next();
4052 typespec_found = 1;
4053 break;
4054 case TOK_REGISTER:
4055 case TOK_AUTO:
4056 case TOK_RESTRICT1:
4057 case TOK_RESTRICT2:
4058 case TOK_RESTRICT3:
4059 next();
4060 break;
4061 case TOK_UNSIGNED:
4062 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4063 tcc_error("signed and unsigned modifier");
4064 t |= VT_DEFSIGN | VT_UNSIGNED;
4065 next();
4066 typespec_found = 1;
4067 break;
4069 /* storage */
4070 case TOK_EXTERN:
4071 g = VT_EXTERN;
4072 goto storage;
4073 case TOK_STATIC:
4074 g = VT_STATIC;
4075 goto storage;
4076 case TOK_TYPEDEF:
4077 g = VT_TYPEDEF;
4078 goto storage;
4079 storage:
4080 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4081 tcc_error("multiple storage classes");
4082 t |= g;
4083 next();
4084 break;
4085 case TOK_INLINE1:
4086 case TOK_INLINE2:
4087 case TOK_INLINE3:
4088 t |= VT_INLINE;
4089 next();
4090 break;
4092 /* GNUC attribute */
4093 case TOK_ATTRIBUTE1:
4094 case TOK_ATTRIBUTE2:
4095 parse_attribute(ad);
4096 if (ad->attr_mode) {
4097 u = ad->attr_mode -1;
4098 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4100 break;
4101 /* GNUC typeof */
4102 case TOK_TYPEOF1:
4103 case TOK_TYPEOF2:
4104 case TOK_TYPEOF3:
4105 next();
4106 parse_expr_type(&type1);
4107 /* remove all storage modifiers except typedef */
4108 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4109 if (type1.ref)
4110 sym_to_attr(ad, type1.ref);
4111 goto basic_type2;
4112 default:
4113 if (typespec_found)
4114 goto the_end;
4115 s = sym_find(tok);
4116 if (!s || !(s->type.t & VT_TYPEDEF))
4117 goto the_end;
4118 t &= ~(VT_BTYPE|VT_LONG);
4119 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4120 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4121 type->ref = s->type.ref;
4122 if (t)
4123 parse_btype_qualify(type, t);
4124 t = type->t;
4125 /* get attributes from typedef */
4126 sym_to_attr(ad, s);
4127 next();
4128 typespec_found = 1;
4129 st = bt = -2;
4130 break;
4132 type_found = 1;
4134 the_end:
4135 if (tcc_state->char_is_unsigned) {
4136 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4137 t |= VT_UNSIGNED;
4139 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4140 bt = t & (VT_BTYPE|VT_LONG);
4141 if (bt == VT_LONG)
4142 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4143 #ifdef TCC_TARGET_PE
4144 if (bt == VT_LDOUBLE)
4145 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4146 #endif
4147 type->t = t;
4148 return type_found;
4151 /* convert a function parameter type (array to pointer and function to
4152 function pointer) */
4153 static inline void convert_parameter_type(CType *pt)
4155 /* remove const and volatile qualifiers (XXX: const could be used
4156 to indicate a const function parameter */
4157 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4158 /* array must be transformed to pointer according to ANSI C */
4159 pt->t &= ~VT_ARRAY;
4160 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4161 mk_pointer(pt);
4165 ST_FUNC void parse_asm_str(CString *astr)
4167 skip('(');
4168 parse_mult_str(astr, "string constant");
4171 /* Parse an asm label and return the token */
4172 static int asm_label_instr(void)
4174 int v;
4175 CString astr;
4177 next();
4178 parse_asm_str(&astr);
4179 skip(')');
4180 #ifdef ASM_DEBUG
4181 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4182 #endif
4183 v = tok_alloc(astr.data, astr.size - 1)->tok;
4184 cstr_free(&astr);
4185 return v;
4188 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4190 int n, l, t1, arg_size, align;
4191 Sym **plast, *s, *first;
4192 AttributeDef ad1;
4193 CType pt;
4195 if (tok == '(') {
4196 /* function type, or recursive declarator (return if so) */
4197 next();
4198 if (td && !(td & TYPE_ABSTRACT))
4199 return 0;
4200 if (tok == ')')
4201 l = 0;
4202 else if (parse_btype(&pt, &ad1))
4203 l = FUNC_NEW;
4204 else if (td)
4205 return 0;
4206 else
4207 l = FUNC_OLD;
4208 first = NULL;
4209 plast = &first;
4210 arg_size = 0;
4211 if (l) {
4212 for(;;) {
4213 /* read param name and compute offset */
4214 if (l != FUNC_OLD) {
4215 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4216 break;
4217 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4218 if ((pt.t & VT_BTYPE) == VT_VOID)
4219 tcc_error("parameter declared as void");
4220 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4221 } else {
4222 n = tok;
4223 if (n < TOK_UIDENT)
4224 expect("identifier");
4225 pt.t = VT_VOID; /* invalid type */
4226 next();
4228 convert_parameter_type(&pt);
4229 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4230 *plast = s;
4231 plast = &s->next;
4232 if (tok == ')')
4233 break;
4234 skip(',');
4235 if (l == FUNC_NEW && tok == TOK_DOTS) {
4236 l = FUNC_ELLIPSIS;
4237 next();
4238 break;
4240 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4241 tcc_error("invalid type");
4243 } else
4244 /* if no parameters, then old type prototype */
4245 l = FUNC_OLD;
4246 skip(')');
4247 /* NOTE: const is ignored in returned type as it has a special
4248 meaning in gcc / C++ */
4249 type->t &= ~VT_CONSTANT;
4250 /* some ancient pre-K&R C allows a function to return an array
4251 and the array brackets to be put after the arguments, such
4252 that "int c()[]" means something like "int[] c()" */
4253 if (tok == '[') {
4254 next();
4255 skip(']'); /* only handle simple "[]" */
4256 mk_pointer(type);
4258 /* we push a anonymous symbol which will contain the function prototype */
4259 ad->f.func_args = arg_size;
4260 ad->f.func_type = l;
4261 s = sym_push(SYM_FIELD, type, 0, 0);
4262 s->a = ad->a;
4263 s->f = ad->f;
4264 s->next = first;
4265 type->t = VT_FUNC;
4266 type->ref = s;
4267 } else if (tok == '[') {
4268 int saved_nocode_wanted = nocode_wanted;
4269 /* array definition */
4270 next();
4271 if (tok == TOK_RESTRICT1)
4272 next();
4273 n = -1;
4274 t1 = 0;
4275 if (tok != ']') {
4276 if (!local_stack || (storage & VT_STATIC))
4277 vpushi(expr_const());
4278 else {
4279 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4280 length must always be evaluated, even under nocode_wanted,
4281 so that its size slot is initialized (e.g. under sizeof
4282 or typeof). */
4283 nocode_wanted = 0;
4284 gexpr();
4286 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4287 n = vtop->c.i;
4288 if (n < 0)
4289 tcc_error("invalid array size");
4290 } else {
4291 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4292 tcc_error("size of variable length array should be an integer");
4293 t1 = VT_VLA;
4296 skip(']');
4297 /* parse next post type */
4298 post_type(type, ad, storage, 0);
4299 if (type->t == VT_FUNC)
4300 tcc_error("declaration of an array of functions");
4301 t1 |= type->t & VT_VLA;
4303 if (t1 & VT_VLA) {
4304 loc -= type_size(&int_type, &align);
4305 loc &= -align;
4306 n = loc;
4308 vla_runtime_type_size(type, &align);
4309 gen_op('*');
4310 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4311 vswap();
4312 vstore();
4314 if (n != -1)
4315 vpop();
4316 nocode_wanted = saved_nocode_wanted;
4318 /* we push an anonymous symbol which will contain the array
4319 element type */
4320 s = sym_push(SYM_FIELD, type, 0, n);
4321 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4322 type->ref = s;
4324 return 1;
4327 /* Parse a type declarator (except basic type), and return the type
4328 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4329 expected. 'type' should contain the basic type. 'ad' is the
4330 attribute definition of the basic type. It can be modified by
4331 type_decl(). If this (possibly abstract) declarator is a pointer chain
4332 it returns the innermost pointed to type (equals *type, but is a different
4333 pointer), otherwise returns type itself, that's used for recursive calls. */
4334 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4336 CType *post, *ret;
4337 int qualifiers, storage;
4339 /* recursive type, remove storage bits first, apply them later again */
4340 storage = type->t & VT_STORAGE;
4341 type->t &= ~VT_STORAGE;
4342 post = ret = type;
4344 while (tok == '*') {
4345 qualifiers = 0;
4346 redo:
4347 next();
4348 switch(tok) {
4349 case TOK_CONST1:
4350 case TOK_CONST2:
4351 case TOK_CONST3:
4352 qualifiers |= VT_CONSTANT;
4353 goto redo;
4354 case TOK_VOLATILE1:
4355 case TOK_VOLATILE2:
4356 case TOK_VOLATILE3:
4357 qualifiers |= VT_VOLATILE;
4358 goto redo;
4359 case TOK_RESTRICT1:
4360 case TOK_RESTRICT2:
4361 case TOK_RESTRICT3:
4362 goto redo;
4363 /* XXX: clarify attribute handling */
4364 case TOK_ATTRIBUTE1:
4365 case TOK_ATTRIBUTE2:
4366 parse_attribute(ad);
4367 break;
4369 mk_pointer(type);
4370 type->t |= qualifiers;
4371 if (ret == type)
4372 /* innermost pointed to type is the one for the first derivation */
4373 ret = pointed_type(type);
4376 if (tok == '(') {
4377 /* This is possibly a parameter type list for abstract declarators
4378 ('int ()'), use post_type for testing this. */
4379 if (!post_type(type, ad, 0, td)) {
4380 /* It's not, so it's a nested declarator, and the post operations
4381 apply to the innermost pointed to type (if any). */
4382 /* XXX: this is not correct to modify 'ad' at this point, but
4383 the syntax is not clear */
4384 parse_attribute(ad);
4385 post = type_decl(type, ad, v, td);
4386 skip(')');
4388 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4389 /* type identifier */
4390 *v = tok;
4391 next();
4392 } else {
4393 if (!(td & TYPE_ABSTRACT))
4394 expect("identifier");
4395 *v = 0;
4397 post_type(post, ad, storage, 0);
4398 parse_attribute(ad);
4399 type->t |= storage;
4400 return ret;
4403 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4404 ST_FUNC int lvalue_type(int t)
4406 int bt, r;
4407 r = VT_LVAL;
4408 bt = t & VT_BTYPE;
4409 if (bt == VT_BYTE || bt == VT_BOOL)
4410 r |= VT_LVAL_BYTE;
4411 else if (bt == VT_SHORT)
4412 r |= VT_LVAL_SHORT;
4413 else
4414 return r;
4415 if (t & VT_UNSIGNED)
4416 r |= VT_LVAL_UNSIGNED;
4417 return r;
4420 /* indirection with full error checking and bound check */
4421 ST_FUNC void indir(void)
4423 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4424 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4425 return;
4426 expect("pointer");
4428 if (vtop->r & VT_LVAL)
4429 gv(RC_INT);
4430 vtop->type = *pointed_type(&vtop->type);
4431 /* Arrays and functions are never lvalues */
4432 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4433 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4434 vtop->r |= lvalue_type(vtop->type.t);
4435 /* if bound checking, the referenced pointer must be checked */
4436 #ifdef CONFIG_TCC_BCHECK
4437 if (tcc_state->do_bounds_check)
4438 vtop->r |= VT_MUSTBOUND;
4439 #endif
4443 /* pass a parameter to a function and do type checking and casting */
4444 static void gfunc_param_typed(Sym *func, Sym *arg)
4446 int func_type;
4447 CType type;
4449 func_type = func->f.func_type;
4450 if (func_type == FUNC_OLD ||
4451 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4452 /* default casting : only need to convert float to double */
4453 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4454 gen_cast_s(VT_DOUBLE);
4455 } else if (vtop->type.t & VT_BITFIELD) {
4456 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4457 type.ref = vtop->type.ref;
4458 gen_cast(&type);
4460 } else if (arg == NULL) {
4461 tcc_error("too many arguments to function");
4462 } else {
4463 type = arg->type;
4464 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4465 gen_assign_cast(&type);
4469 /* parse an expression and return its type without any side effect. */
4470 static void expr_type(CType *type, void (*expr_fn)(void))
4472 nocode_wanted++;
4473 expr_fn();
4474 *type = vtop->type;
4475 vpop();
4476 nocode_wanted--;
4479 /* parse an expression of the form '(type)' or '(expr)' and return its
4480 type */
4481 static void parse_expr_type(CType *type)
4483 int n;
4484 AttributeDef ad;
4486 skip('(');
4487 if (parse_btype(type, &ad)) {
4488 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4489 } else {
4490 expr_type(type, gexpr);
4492 skip(')');
4495 static void parse_type(CType *type)
4497 AttributeDef ad;
4498 int n;
4500 if (!parse_btype(type, &ad)) {
4501 expect("type");
4503 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4506 static void parse_builtin_params(int nc, const char *args)
4508 char c, sep = '(';
4509 CType t;
4510 if (nc)
4511 nocode_wanted++;
4512 next();
4513 while ((c = *args++)) {
4514 skip(sep);
4515 sep = ',';
4516 switch (c) {
4517 case 'e': expr_eq(); continue;
4518 case 't': parse_type(&t); vpush(&t); continue;
4519 default: tcc_error("internal error"); break;
4522 skip(')');
4523 if (nc)
4524 nocode_wanted--;
4527 ST_FUNC void unary(void)
4529 int n, t, align, size, r, sizeof_caller;
4530 CType type;
4531 Sym *s;
4532 AttributeDef ad;
4534 sizeof_caller = in_sizeof;
4535 in_sizeof = 0;
4536 type.ref = NULL;
4537 /* XXX: GCC 2.95.3 does not generate a table although it should be
4538 better here */
4539 tok_next:
4540 switch(tok) {
4541 case TOK_EXTENSION:
4542 next();
4543 goto tok_next;
4544 case TOK_LCHAR:
4545 #ifdef TCC_TARGET_PE
4546 t = VT_SHORT|VT_UNSIGNED;
4547 goto push_tokc;
4548 #endif
4549 case TOK_CINT:
4550 case TOK_CCHAR:
4551 t = VT_INT;
4552 push_tokc:
4553 type.t = t;
4554 vsetc(&type, VT_CONST, &tokc);
4555 next();
4556 break;
4557 case TOK_CUINT:
4558 t = VT_INT | VT_UNSIGNED;
4559 goto push_tokc;
4560 case TOK_CLLONG:
4561 t = VT_LLONG;
4562 goto push_tokc;
4563 case TOK_CULLONG:
4564 t = VT_LLONG | VT_UNSIGNED;
4565 goto push_tokc;
4566 case TOK_CFLOAT:
4567 t = VT_FLOAT;
4568 goto push_tokc;
4569 case TOK_CDOUBLE:
4570 t = VT_DOUBLE;
4571 goto push_tokc;
4572 case TOK_CLDOUBLE:
4573 t = VT_LDOUBLE;
4574 goto push_tokc;
4575 case TOK_CLONG:
4576 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4577 goto push_tokc;
4578 case TOK_CULONG:
4579 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4580 goto push_tokc;
4581 case TOK___FUNCTION__:
4582 if (!gnu_ext)
4583 goto tok_identifier;
4584 /* fall thru */
4585 case TOK___FUNC__:
4587 void *ptr;
4588 int len;
4589 /* special function name identifier */
4590 len = strlen(funcname) + 1;
4591 /* generate char[len] type */
4592 type.t = VT_BYTE;
4593 mk_pointer(&type);
4594 type.t |= VT_ARRAY;
4595 type.ref->c = len;
4596 vpush_ref(&type, data_section, data_section->data_offset, len);
4597 if (!NODATA_WANTED) {
4598 ptr = section_ptr_add(data_section, len);
4599 memcpy(ptr, funcname, len);
4601 next();
4603 break;
4604 case TOK_LSTR:
4605 #ifdef TCC_TARGET_PE
4606 t = VT_SHORT | VT_UNSIGNED;
4607 #else
4608 t = VT_INT;
4609 #endif
4610 goto str_init;
4611 case TOK_STR:
4612 /* string parsing */
4613 t = VT_BYTE;
4614 if (tcc_state->char_is_unsigned)
4615 t = VT_BYTE | VT_UNSIGNED;
4616 str_init:
4617 if (tcc_state->warn_write_strings)
4618 t |= VT_CONSTANT;
4619 type.t = t;
4620 mk_pointer(&type);
4621 type.t |= VT_ARRAY;
4622 memset(&ad, 0, sizeof(AttributeDef));
4623 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4624 break;
4625 case '(':
4626 next();
4627 /* cast ? */
4628 if (parse_btype(&type, &ad)) {
4629 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4630 skip(')');
4631 /* check ISOC99 compound literal */
4632 if (tok == '{') {
4633 /* data is allocated locally by default */
4634 if (global_expr)
4635 r = VT_CONST;
4636 else
4637 r = VT_LOCAL;
4638 /* all except arrays are lvalues */
4639 if (!(type.t & VT_ARRAY))
4640 r |= lvalue_type(type.t);
4641 memset(&ad, 0, sizeof(AttributeDef));
4642 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4643 } else {
4644 if (sizeof_caller) {
4645 vpush(&type);
4646 return;
4648 unary();
4649 gen_cast(&type);
4651 } else if (tok == '{') {
4652 int saved_nocode_wanted = nocode_wanted;
4653 if (const_wanted)
4654 tcc_error("expected constant");
4655 /* save all registers */
4656 save_regs(0);
4657 /* statement expression : we do not accept break/continue
4658 inside