the R_X86_64_GOTOFF64 relocation was missing
[tinycc.git] / tccgen.c
blob9eb29f6eb471f174c60be6eb3227a86db2503232
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void init_putv(CType *type, Section *sec, unsigned long c);
80 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
81 static void block(int *bsym, int *csym, int is_expr);
82 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
83 static int decl0(int l, int is_for_loop_init, Sym *);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static inline int64_t expr_const64(void);
90 ST_FUNC void vpush64(int ty, unsigned long long v);
91 ST_FUNC void vpush(CType *type);
92 ST_FUNC int gvtst(int inv, int t);
93 ST_FUNC int is_btype_size(int bt);
94 static void gen_inline_functions(TCCState *s);
96 ST_INLN int is_float(int t)
98 int bt;
99 bt = t & VT_BTYPE;
100 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC int ieee_finite(double d)
108 int p[4];
109 memcpy(p, &d, sizeof(double));
110 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC void test_lvalue(void)
115 if (!(vtop->r & VT_LVAL))
116 expect("lvalue");
119 ST_FUNC void check_vstack(void)
121 if (pvtop != vtop)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
128 #if 0
129 void pv (const char *lbl, int a, int b)
131 int i;
132 for (i = a; i < a + b; ++i) {
133 SValue *p = &vtop[-i];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
138 #endif
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC void tcc_debug_start(TCCState *s1)
144 if (s1->do_debug) {
145 char buf[512];
147 /* file info: full path + filename */
148 section_sym = put_elf_sym(symtab_section, 0, 0,
149 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
150 text_section->sh_num, NULL);
151 getcwd(buf, sizeof(buf));
152 #ifdef _WIN32
153 normalize_slashes(buf);
154 #endif
155 pstrcat(buf, sizeof(buf), "/");
156 put_stabs_r(buf, N_SO, 0, 0,
157 text_section->data_offset, text_section, section_sym);
158 put_stabs_r(file->filename, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 last_ind = 0;
161 last_line_num = 0;
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
168 SHN_ABS, file->filename);
171 /* put end of translation unit info */
172 ST_FUNC void tcc_debug_end(TCCState *s1)
174 if (!s1->do_debug)
175 return;
176 put_stabs_r(NULL, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
181 /* generate line number info */
182 ST_FUNC void tcc_debug_line(TCCState *s1)
184 if (!s1->do_debug)
185 return;
186 if ((last_line_num != file->line_num || last_ind != ind)) {
187 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
188 last_ind = ind;
189 last_line_num = file->line_num;
193 /* put function symbol */
194 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
196 char buf[512];
198 if (!s1->do_debug)
199 return;
201 /* stabs info */
202 /* XXX: we put here a dummy type */
203 snprintf(buf, sizeof(buf), "%s:%c1",
204 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
205 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
206 cur_text_section, sym->c);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE, 0, file->line_num, 0);
210 last_ind = 0;
211 last_line_num = 0;
214 /* put function size */
215 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
217 if (!s1->do_debug)
218 return;
219 put_stabn(N_FUN, 0, 0, size);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC void tccgen_start(TCCState *s1)
225 cur_text_section = NULL;
226 funcname = "";
227 anon_sym = SYM_FIRST_ANOM;
228 section_sym = 0;
229 const_wanted = 0;
230 nocode_wanted = 1;
232 /* define some often used types */
233 int_type.t = VT_INT;
234 char_pointer_type.t = VT_BYTE;
235 mk_pointer(&char_pointer_type);
236 #if PTR_SIZE == 4
237 size_type.t = VT_INT;
238 #else
239 size_type.t = VT_LLONG;
240 #endif
241 func_old_type.t = VT_FUNC;
242 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
244 tcc_debug_start(s1);
246 #ifdef TCC_TARGET_ARM
247 arm_init(s1);
248 #endif
251 ST_FUNC void tccgen_end(TCCState *s1)
253 gen_inline_functions(s1);
254 check_vstack();
255 /* end of translation unit info */
256 tcc_debug_end(s1);
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attributes to Elf symbol */
262 static void update_storage(Sym *sym)
264 int t;
265 ElfW(Sym) *esym;
267 if (0 == sym->c)
268 return;
270 t = sym->type.t;
271 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
273 if (t & VT_VIS_MASK)
274 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
275 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
277 if (t & VT_WEAK)
278 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
280 #ifdef TCC_TARGET_PE
281 if (t & VT_EXPORT)
282 esym->st_other |= ST_PE_EXPORT;
283 #endif
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
291 addr_t value, unsigned long size,
292 int can_add_underscore)
294 int sym_type, sym_bind, sh_num, info, other, t;
295 ElfW(Sym) *esym;
296 const char *name;
297 char buf1[256];
298 #ifdef CONFIG_TCC_BCHECK
299 char buf[32];
300 #endif
302 if (section == NULL)
303 sh_num = SHN_UNDEF;
304 else if (section == SECTION_ABS)
305 sh_num = SHN_ABS;
306 else
307 sh_num = section->sh_num;
309 if (!sym->c) {
310 name = get_tok_str(sym->v, NULL);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state->do_bounds_check) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
316 switch(sym->v) {
317 #ifdef TCC_TARGET_PE
318 /* XXX: we rely only on malloc hooks */
319 case TOK_malloc:
320 case TOK_free:
321 case TOK_realloc:
322 case TOK_memalign:
323 case TOK_calloc:
324 #endif
325 case TOK_memcpy:
326 case TOK_memmove:
327 case TOK_memset:
328 case TOK_strlen:
329 case TOK_strcpy:
330 case TOK_alloca:
331 strcpy(buf, "__bound_");
332 strcat(buf, name);
333 name = buf;
334 break;
337 #endif
338 t = sym->type.t;
339 if ((t & VT_BTYPE) == VT_FUNC) {
340 sym_type = STT_FUNC;
341 } else if ((t & VT_BTYPE) == VT_VOID) {
342 sym_type = STT_NOTYPE;
343 } else {
344 sym_type = STT_OBJECT;
346 if (t & VT_STATIC)
347 sym_bind = STB_LOCAL;
348 else
349 sym_bind = STB_GLOBAL;
350 other = 0;
351 #ifdef TCC_TARGET_PE
352 if (sym_type == STT_FUNC && sym->type.ref) {
353 Sym *ref = sym->type.ref;
354 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
355 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
356 name = buf1;
357 other |= ST_PE_STDCALL;
358 can_add_underscore = 0;
361 if (t & VT_IMPORT)
362 other |= ST_PE_IMPORT;
363 #endif
364 if (tcc_state->leading_underscore && can_add_underscore) {
365 buf1[0] = '_';
366 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
367 name = buf1;
369 if (sym->asm_label)
370 name = get_tok_str(sym->asm_label, NULL);
371 info = ELFW(ST_INFO)(sym_bind, sym_type);
372 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
373 } else {
374 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
375 esym->st_value = value;
376 esym->st_size = size;
377 esym->st_shndx = sh_num;
379 update_storage(sym);
382 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
383 addr_t value, unsigned long size)
385 put_extern_sym2(sym, section, value, size, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
390 addr_t addend)
392 int c = 0;
394 if (nocode_wanted && s == cur_text_section)
395 return;
397 if (sym) {
398 if (0 == sym->c)
399 put_extern_sym(sym, NULL, 0, 0);
400 c = sym->c;
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section, s, offset, type, c, addend);
407 #if PTR_SIZE == 4
408 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
410 greloca(s, sym, offset, type, 0);
412 #endif
414 /* ------------------------------------------------------------------------- */
415 /* symbol allocator */
416 static Sym *__sym_malloc(void)
418 Sym *sym_pool, *sym, *last_sym;
419 int i;
421 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
422 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
424 last_sym = sym_free_first;
425 sym = sym_pool;
426 for(i = 0; i < SYM_POOL_NB; i++) {
427 sym->next = last_sym;
428 last_sym = sym;
429 sym++;
431 sym_free_first = last_sym;
432 return last_sym;
435 static inline Sym *sym_malloc(void)
437 Sym *sym;
438 #ifndef SYM_DEBUG
439 sym = sym_free_first;
440 if (!sym)
441 sym = __sym_malloc();
442 sym_free_first = sym->next;
443 return sym;
444 #else
445 sym = tcc_malloc(sizeof(Sym));
446 return sym;
447 #endif
450 ST_INLN void sym_free(Sym *sym)
452 #ifndef SYM_DEBUG
453 sym->next = sym_free_first;
454 sym_free_first = sym;
455 #else
456 tcc_free(sym);
457 #endif
460 /* push, without hashing */
461 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
463 Sym *s;
465 s = sym_malloc();
466 s->scope = 0;
467 s->v = v;
468 s->type.t = t;
469 s->type.ref = NULL;
470 #ifdef _WIN64
471 s->d = NULL;
472 #endif
473 s->c = c;
474 s->next = NULL;
475 /* add in stack */
476 s->prev = *ps;
477 *ps = s;
478 return s;
481 /* find a symbol and return its associated structure. 's' is the top
482 of the symbol stack */
483 ST_FUNC Sym *sym_find2(Sym *s, int v)
485 while (s) {
486 if (s->v == v)
487 return s;
488 else if (s->v == -1)
489 return NULL;
490 s = s->prev;
492 return NULL;
495 /* structure lookup */
496 ST_INLN Sym *struct_find(int v)
498 v -= TOK_IDENT;
499 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
500 return NULL;
501 return table_ident[v]->sym_struct;
504 /* find an identifier */
505 ST_INLN Sym *sym_find(int v)
507 v -= TOK_IDENT;
508 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
509 return NULL;
510 return table_ident[v]->sym_identifier;
513 /* push a given symbol on the symbol stack */
514 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
516 Sym *s, **ps;
517 TokenSym *ts;
519 if (local_stack)
520 ps = &local_stack;
521 else
522 ps = &global_stack;
523 s = sym_push2(ps, v, type->t, c);
524 s->type.ref = type->ref;
525 s->r = r;
526 /* don't record fields or anonymous symbols */
527 /* XXX: simplify */
528 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
529 /* record symbol in token array */
530 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
531 if (v & SYM_STRUCT)
532 ps = &ts->sym_struct;
533 else
534 ps = &ts->sym_identifier;
535 s->prev_tok = *ps;
536 *ps = s;
537 s->scope = local_scope;
538 if (s->prev_tok && s->prev_tok->scope == s->scope)
539 tcc_error("redeclaration of '%s'",
540 get_tok_str(v & ~SYM_STRUCT, NULL));
542 return s;
545 /* push a global identifier */
546 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
548 Sym *s, **ps;
549 s = sym_push2(&global_stack, v, t, c);
550 /* don't record anonymous symbol */
551 if (v < SYM_FIRST_ANOM) {
552 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
553 /* modify the top most local identifier, so that
554 sym_identifier will point to 's' when popped */
555 while (*ps != NULL)
556 ps = &(*ps)->prev_tok;
557 s->prev_tok = NULL;
558 *ps = s;
560 return s;
563 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
564 pop them yet from the list, but do remove them from the token array. */
565 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
567 Sym *s, *ss, **ps;
568 TokenSym *ts;
569 int v;
571 s = *ptop;
572 while(s != b) {
573 ss = s->prev;
574 v = s->v;
575 /* remove symbol in token array */
576 /* XXX: simplify */
577 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
578 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
579 if (v & SYM_STRUCT)
580 ps = &ts->sym_struct;
581 else
582 ps = &ts->sym_identifier;
583 *ps = s->prev_tok;
585 if (!keep)
586 sym_free(s);
587 s = ss;
589 if (!keep)
590 *ptop = b;
593 /* ------------------------------------------------------------------------- */
595 static void vsetc(CType *type, int r, CValue *vc)
597 int v;
599 if (vtop >= vstack + (VSTACK_SIZE - 1))
600 tcc_error("memory full (vstack)");
601 /* cannot let cpu flags if other instruction are generated. Also
602 avoid leaving VT_JMP anywhere except on the top of the stack
603 because it would complicate the code generator.
605 Don't do this when nocode_wanted. vtop might come from
606 !nocode_wanted regions (see 88_codeopt.c) and transforming
607 it to a register without actually generating code is wrong
608 as their value might still be used for real. All values
609 we push under nocode_wanted will eventually be popped
610 again, so that the VT_CMP/VT_JMP value will be in vtop
611 when code is unsuppressed again.
613 Same logic below in vswap(); */
614 if (vtop >= vstack && !nocode_wanted) {
615 v = vtop->r & VT_VALMASK;
616 if (v == VT_CMP || (v & ~1) == VT_JMP)
617 gv(RC_INT);
620 vtop++;
621 vtop->type = *type;
622 vtop->r = r;
623 vtop->r2 = VT_CONST;
624 vtop->c = *vc;
625 vtop->sym = NULL;
628 ST_FUNC void vswap(void)
630 SValue tmp;
631 /* cannot vswap cpu flags. See comment at vsetc() above */
632 if (vtop >= vstack && !nocode_wanted) {
633 int v = vtop->r & VT_VALMASK;
634 if (v == VT_CMP || (v & ~1) == VT_JMP)
635 gv(RC_INT);
637 tmp = vtop[0];
638 vtop[0] = vtop[-1];
639 vtop[-1] = tmp;
642 /* pop stack value */
643 ST_FUNC void vpop(void)
645 int v;
646 v = vtop->r & VT_VALMASK;
647 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
648 /* for x86, we need to pop the FP stack */
649 if (v == TREG_ST0) {
650 o(0xd8dd); /* fstp %st(0) */
651 } else
652 #endif
653 if (v == VT_JMP || v == VT_JMPI) {
654 /* need to put correct jump if && or || without test */
655 gsym(vtop->c.i);
657 vtop--;
660 /* push constant of type "type" with useless value */
661 ST_FUNC void vpush(CType *type)
663 CValue cval;
664 vsetc(type, VT_CONST, &cval);
667 /* push integer constant */
668 ST_FUNC void vpushi(int v)
670 CValue cval;
671 cval.i = v;
672 vsetc(&int_type, VT_CONST, &cval);
675 /* push a pointer sized constant */
676 static void vpushs(addr_t v)
678 CValue cval;
679 cval.i = v;
680 vsetc(&size_type, VT_CONST, &cval);
683 /* push arbitrary 64bit constant */
684 ST_FUNC void vpush64(int ty, unsigned long long v)
686 CValue cval;
687 CType ctype;
688 ctype.t = ty;
689 ctype.ref = NULL;
690 cval.i = v;
691 vsetc(&ctype, VT_CONST, &cval);
694 /* push long long constant */
695 static inline void vpushll(long long v)
697 vpush64(VT_LLONG, v);
700 ST_FUNC void vset(CType *type, int r, long v)
702 CValue cval;
704 cval.i = v;
705 vsetc(type, r, &cval);
708 static void vseti(int r, int v)
710 CType type;
711 type.t = VT_INT;
712 type.ref = 0;
713 vset(&type, r, v);
716 ST_FUNC void vpushv(SValue *v)
718 if (vtop >= vstack + (VSTACK_SIZE - 1))
719 tcc_error("memory full (vstack)");
720 vtop++;
721 *vtop = *v;
724 static void vdup(void)
726 vpushv(vtop);
729 /* rotate n first stack elements to the bottom
730 I1 ... In -> I2 ... In I1 [top is right]
732 ST_FUNC void vrotb(int n)
734 int i;
735 SValue tmp;
737 tmp = vtop[-n + 1];
738 for(i=-n+1;i!=0;i++)
739 vtop[i] = vtop[i+1];
740 vtop[0] = tmp;
743 /* rotate the n elements before entry e towards the top
744 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
746 ST_FUNC void vrote(SValue *e, int n)
748 int i;
749 SValue tmp;
751 tmp = *e;
752 for(i = 0;i < n - 1; i++)
753 e[-i] = e[-i - 1];
754 e[-n + 1] = tmp;
757 /* rotate n first stack elements to the top
758 I1 ... In -> In I1 ... I(n-1) [top is right]
760 ST_FUNC void vrott(int n)
762 vrote(vtop, n);
765 /* push a symbol value of TYPE */
766 static inline void vpushsym(CType *type, Sym *sym)
768 CValue cval;
769 cval.i = 0;
770 vsetc(type, VT_CONST | VT_SYM, &cval);
771 vtop->sym = sym;
774 /* Return a static symbol pointing to a section */
775 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
777 int v;
778 Sym *sym;
780 v = anon_sym++;
781 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
782 sym->type.ref = type->ref;
783 sym->r = VT_CONST | VT_SYM;
784 put_extern_sym(sym, sec, offset, size);
785 return sym;
788 /* push a reference to a section offset by adding a dummy symbol */
789 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
791 vpushsym(type, get_sym_ref(type, sec, offset, size));
794 /* define a new external reference to a symbol 'v' of type 'u' */
795 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
797 Sym *s;
799 s = sym_find(v);
800 if (!s) {
801 /* push forward reference */
802 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
803 s->type.ref = type->ref;
804 s->r = r | VT_CONST | VT_SYM;
806 return s;
809 /* Merge some storage attributes. */
810 static void patch_storage(Sym *sym, CType *type)
812 int t;
813 if (!is_compatible_types(&sym->type, type))
814 tcc_error("incompatible types for redefinition of '%s'",
815 get_tok_str(sym->v, NULL));
816 t = type->t;
817 #ifdef TCC_TARGET_PE
818 if ((sym->type.t ^ t) & VT_IMPORT)
819 tcc_error("incompatible dll linkage for redefinition of '%s'",
820 get_tok_str(sym->v, NULL));
821 #endif
822 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
823 if (t & VT_VIS_MASK) {
824 int vis = sym->type.t & VT_VIS_MASK;
825 int vis2 = t & VT_VIS_MASK;
826 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
827 vis = vis2;
828 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
829 vis = (vis < vis2) ? vis : vis2;
830 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
834 /* define a new external reference to a symbol 'v' */
835 static Sym *external_sym(int v, CType *type, int r)
837 Sym *s;
838 s = sym_find(v);
839 if (!s) {
840 /* push forward reference */
841 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
842 s->type.t |= VT_EXTERN;
843 } else {
844 if (s->type.ref == func_old_type.ref) {
845 s->type.ref = type->ref;
846 s->r = r | VT_CONST | VT_SYM;
847 s->type.t |= VT_EXTERN;
849 patch_storage(s, type);
850 update_storage(s);
852 return s;
855 /* push a reference to global symbol v */
856 ST_FUNC void vpush_global_sym(CType *type, int v)
858 vpushsym(type, external_global_sym(v, type, 0));
861 /* save registers up to (vtop - n) stack entry */
862 ST_FUNC void save_regs(int n)
864 SValue *p, *p1;
865 for(p = vstack, p1 = vtop - n; p <= p1; p++)
866 save_reg(p->r);
869 /* save r to the memory stack, and mark it as being free */
870 ST_FUNC void save_reg(int r)
872 save_reg_upstack(r, 0);
875 /* save r to the memory stack, and mark it as being free,
876 if seen up to (vtop - n) stack entry */
877 ST_FUNC void save_reg_upstack(int r, int n)
879 int l, saved, size, align;
880 SValue *p, *p1, sv;
881 CType *type;
883 if ((r &= VT_VALMASK) >= VT_CONST)
884 return;
885 if (nocode_wanted)
886 return;
888 /* modify all stack values */
889 saved = 0;
890 l = 0;
891 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
892 if ((p->r & VT_VALMASK) == r ||
893 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
894 /* must save value on stack if not already done */
895 if (!saved) {
896 /* NOTE: must reload 'r' because r might be equal to r2 */
897 r = p->r & VT_VALMASK;
898 /* store register in the stack */
899 type = &p->type;
900 if ((p->r & VT_LVAL) ||
901 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
902 #if PTR_SIZE == 8
903 type = &char_pointer_type;
904 #else
905 type = &int_type;
906 #endif
907 size = type_size(type, &align);
908 loc = (loc - size) & -align;
909 sv.type.t = type->t;
910 sv.r = VT_LOCAL | VT_LVAL;
911 sv.c.i = loc;
912 store(r, &sv);
913 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
914 /* x86 specific: need to pop fp register ST0 if saved */
915 if (r == TREG_ST0) {
916 o(0xd8dd); /* fstp %st(0) */
918 #endif
919 #if PTR_SIZE == 4
920 /* special long long case */
921 if ((type->t & VT_BTYPE) == VT_LLONG) {
922 sv.c.i += 4;
923 store(p->r2, &sv);
925 #endif
926 l = loc;
927 saved = 1;
929 /* mark that stack entry as being saved on the stack */
930 if (p->r & VT_LVAL) {
931 /* also clear the bounded flag because the
932 relocation address of the function was stored in
933 p->c.i */
934 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
935 } else {
936 p->r = lvalue_type(p->type.t) | VT_LOCAL;
938 p->r2 = VT_CONST;
939 p->c.i = l;
944 #ifdef TCC_TARGET_ARM
945 /* find a register of class 'rc2' with at most one reference on stack.
946 * If none, call get_reg(rc) */
947 ST_FUNC int get_reg_ex(int rc, int rc2)
949 int r;
950 SValue *p;
952 for(r=0;r<NB_REGS;r++) {
953 if (reg_classes[r] & rc2) {
954 int n;
955 n=0;
956 for(p = vstack; p <= vtop; p++) {
957 if ((p->r & VT_VALMASK) == r ||
958 (p->r2 & VT_VALMASK) == r)
959 n++;
961 if (n <= 1)
962 return r;
965 return get_reg(rc);
967 #endif
969 /* find a free register of class 'rc'. If none, save one register */
970 ST_FUNC int get_reg(int rc)
972 int r;
973 SValue *p;
975 /* find a free register */
976 for(r=0;r<NB_REGS;r++) {
977 if (reg_classes[r] & rc) {
978 if (nocode_wanted)
979 return r;
980 for(p=vstack;p<=vtop;p++) {
981 if ((p->r & VT_VALMASK) == r ||
982 (p->r2 & VT_VALMASK) == r)
983 goto notfound;
985 return r;
987 notfound: ;
990 /* no register left : free the first one on the stack (VERY
991 IMPORTANT to start from the bottom to ensure that we don't
992 spill registers used in gen_opi()) */
993 for(p=vstack;p<=vtop;p++) {
994 /* look at second register (if long long) */
995 r = p->r2 & VT_VALMASK;
996 if (r < VT_CONST && (reg_classes[r] & rc))
997 goto save_found;
998 r = p->r & VT_VALMASK;
999 if (r < VT_CONST && (reg_classes[r] & rc)) {
1000 save_found:
1001 save_reg(r);
1002 return r;
1005 /* Should never comes here */
1006 return -1;
1009 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1010 if needed */
1011 static void move_reg(int r, int s, int t)
1013 SValue sv;
1015 if (r != s) {
1016 save_reg(r);
1017 sv.type.t = t;
1018 sv.type.ref = NULL;
1019 sv.r = s;
1020 sv.c.i = 0;
1021 load(r, &sv);
1025 /* get address of vtop (vtop MUST BE an lvalue) */
1026 ST_FUNC void gaddrof(void)
1028 vtop->r &= ~VT_LVAL;
1029 /* tricky: if saved lvalue, then we can go back to lvalue */
1030 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1031 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1036 #ifdef CONFIG_TCC_BCHECK
1037 /* generate lvalue bound code */
1038 static void gbound(void)
1040 int lval_type;
1041 CType type1;
1043 vtop->r &= ~VT_MUSTBOUND;
1044 /* if lvalue, then use checking code before dereferencing */
1045 if (vtop->r & VT_LVAL) {
1046 /* if not VT_BOUNDED value, then make one */
1047 if (!(vtop->r & VT_BOUNDED)) {
1048 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1049 /* must save type because we must set it to int to get pointer */
1050 type1 = vtop->type;
1051 vtop->type.t = VT_PTR;
1052 gaddrof();
1053 vpushi(0);
1054 gen_bounded_ptr_add();
1055 vtop->r |= lval_type;
1056 vtop->type = type1;
1058 /* then check for dereferencing */
1059 gen_bounded_ptr_deref();
1062 #endif
1064 /* store vtop a register belonging to class 'rc'. lvalues are
1065 converted to values. Cannot be used if cannot be converted to
1066 register value (such as structures). */
1067 ST_FUNC int gv(int rc)
1069 int r, bit_pos, bit_size, size, align;
1070 int rc2;
1072 /* NOTE: get_reg can modify vstack[] */
1073 if (vtop->type.t & VT_BITFIELD) {
1074 CType type;
1075 int bits = 32;
1076 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1077 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1078 /* remove bit field info to avoid loops */
1079 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1080 /* cast to int to propagate signedness in following ops */
1081 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1082 type.t = VT_LLONG;
1083 bits = 64;
1084 } else
1085 type.t = VT_INT;
1086 if((vtop->type.t & VT_UNSIGNED) ||
1087 (vtop->type.t & VT_BTYPE) == VT_BOOL ||
1088 (((vtop->type.t & VT_BTYPE) == VT_ENUM) &&
1089 vtop->type.ref->a.unsigned_enum))
1090 type.t |= VT_UNSIGNED;
1091 gen_cast(&type);
1092 /* generate shifts */
1093 vpushi(bits - (bit_pos + bit_size));
1094 gen_op(TOK_SHL);
1095 vpushi(bits - bit_size);
1096 /* NOTE: transformed to SHR if unsigned */
1097 gen_op(TOK_SAR);
1098 r = gv(rc);
1099 } else {
1100 if (is_float(vtop->type.t) &&
1101 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1102 unsigned long offset;
1103 /* CPUs usually cannot use float constants, so we store them
1104 generically in data segment */
1105 size = type_size(&vtop->type, &align);
1106 offset = section_add(data_section, size, align);
1107 vpush_ref(&vtop->type, data_section, offset, size);
1108 vswap();
1109 init_putv(&vtop->type, data_section, offset);
1110 vtop->r |= VT_LVAL;
1112 #ifdef CONFIG_TCC_BCHECK
1113 if (vtop->r & VT_MUSTBOUND)
1114 gbound();
1115 #endif
1117 r = vtop->r & VT_VALMASK;
1118 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1119 #ifndef TCC_TARGET_ARM64
1120 if (rc == RC_IRET)
1121 rc2 = RC_LRET;
1122 #ifdef TCC_TARGET_X86_64
1123 else if (rc == RC_FRET)
1124 rc2 = RC_QRET;
1125 #endif
1126 #endif
1127 /* need to reload if:
1128 - constant
1129 - lvalue (need to dereference pointer)
1130 - already a register, but not in the right class */
1131 if (r >= VT_CONST
1132 || (vtop->r & VT_LVAL)
1133 || !(reg_classes[r] & rc)
1134 #if PTR_SIZE == 8
1135 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1136 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1137 #else
1138 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1139 #endif
1142 r = get_reg(rc);
1143 #if PTR_SIZE == 8
1144 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1145 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1146 #else
1147 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1148 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1149 unsigned long long ll;
1150 #endif
1151 int r2, original_type;
1152 original_type = vtop->type.t;
1153 /* two register type load : expand to two words
1154 temporarily */
1155 #if PTR_SIZE == 4
1156 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1157 /* load constant */
1158 ll = vtop->c.i;
1159 vtop->c.i = ll; /* first word */
1160 load(r, vtop);
1161 vtop->r = r; /* save register value */
1162 vpushi(ll >> 32); /* second word */
1163 } else
1164 #endif
1165 if (vtop->r & VT_LVAL) {
1166 /* We do not want to modifier the long long
1167 pointer here, so the safest (and less
1168 efficient) is to save all the other registers
1169 in the stack. XXX: totally inefficient. */
1170 #if 0
1171 save_regs(1);
1172 #else
1173 /* lvalue_save: save only if used further down the stack */
1174 save_reg_upstack(vtop->r, 1);
1175 #endif
1176 /* load from memory */
1177 vtop->type.t = load_type;
1178 load(r, vtop);
1179 vdup();
1180 vtop[-1].r = r; /* save register value */
1181 /* increment pointer to get second word */
1182 vtop->type.t = addr_type;
1183 gaddrof();
1184 vpushi(load_size);
1185 gen_op('+');
1186 vtop->r |= VT_LVAL;
1187 vtop->type.t = load_type;
1188 } else {
1189 /* move registers */
1190 load(r, vtop);
1191 vdup();
1192 vtop[-1].r = r; /* save register value */
1193 vtop->r = vtop[-1].r2;
1195 /* Allocate second register. Here we rely on the fact that
1196 get_reg() tries first to free r2 of an SValue. */
1197 r2 = get_reg(rc2);
1198 load(r2, vtop);
1199 vpop();
1200 /* write second register */
1201 vtop->r2 = r2;
1202 vtop->type.t = original_type;
1203 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1204 int t1, t;
1205 /* lvalue of scalar type : need to use lvalue type
1206 because of possible cast */
1207 t = vtop->type.t;
1208 t1 = t;
1209 /* compute memory access type */
1210 if (vtop->r & VT_LVAL_BYTE)
1211 t = VT_BYTE;
1212 else if (vtop->r & VT_LVAL_SHORT)
1213 t = VT_SHORT;
1214 if (vtop->r & VT_LVAL_UNSIGNED)
1215 t |= VT_UNSIGNED;
1216 vtop->type.t = t;
1217 load(r, vtop);
1218 /* restore wanted type */
1219 vtop->type.t = t1;
1220 } else {
1221 /* one register type load */
1222 load(r, vtop);
1225 vtop->r = r;
1226 #ifdef TCC_TARGET_C67
1227 /* uses register pairs for doubles */
1228 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1229 vtop->r2 = r+1;
1230 #endif
1232 return r;
1235 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1236 ST_FUNC void gv2(int rc1, int rc2)
1238 int v;
1240 /* generate more generic register first. But VT_JMP or VT_CMP
1241 values must be generated first in all cases to avoid possible
1242 reload errors */
1243 v = vtop[0].r & VT_VALMASK;
1244 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1245 vswap();
1246 gv(rc1);
1247 vswap();
1248 gv(rc2);
1249 /* test if reload is needed for first register */
1250 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1251 vswap();
1252 gv(rc1);
1253 vswap();
1255 } else {
1256 gv(rc2);
1257 vswap();
1258 gv(rc1);
1259 vswap();
1260 /* test if reload is needed for first register */
1261 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1262 gv(rc2);
1267 #ifndef TCC_TARGET_ARM64
1268 /* wrapper around RC_FRET to return a register by type */
1269 static int rc_fret(int t)
1271 #ifdef TCC_TARGET_X86_64
1272 if (t == VT_LDOUBLE) {
1273 return RC_ST0;
1275 #endif
1276 return RC_FRET;
1278 #endif
1280 /* wrapper around REG_FRET to return a register by type */
1281 static int reg_fret(int t)
1283 #ifdef TCC_TARGET_X86_64
1284 if (t == VT_LDOUBLE) {
1285 return TREG_ST0;
1287 #endif
1288 return REG_FRET;
1291 #if PTR_SIZE == 4
1292 /* expand 64bit on stack in two ints */
1293 static void lexpand(void)
1295 int u, v;
1296 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1297 v = vtop->r & (VT_VALMASK | VT_LVAL);
1298 if (v == VT_CONST) {
1299 vdup();
1300 vtop[0].c.i >>= 32;
1301 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1302 vdup();
1303 vtop[0].c.i += 4;
1304 } else {
1305 gv(RC_INT);
1306 vdup();
1307 vtop[0].r = vtop[-1].r2;
1308 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1310 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1312 #endif
1314 #ifdef TCC_TARGET_ARM
1315 /* expand long long on stack */
1316 ST_FUNC void lexpand_nr(void)
1318 int u,v;
1320 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1321 vdup();
1322 vtop->r2 = VT_CONST;
1323 vtop->type.t = VT_INT | u;
1324 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1325 if (v == VT_CONST) {
1326 vtop[-1].c.i = vtop->c.i;
1327 vtop->c.i = vtop->c.i >> 32;
1328 vtop->r = VT_CONST;
1329 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1330 vtop->c.i += 4;
1331 vtop->r = vtop[-1].r;
1332 } else if (v > VT_CONST) {
1333 vtop--;
1334 lexpand();
1335 } else
1336 vtop->r = vtop[-1].r2;
1337 vtop[-1].r2 = VT_CONST;
1338 vtop[-1].type.t = VT_INT | u;
1340 #endif
1342 #if PTR_SIZE == 4
1343 /* build a long long from two ints */
1344 static void lbuild(int t)
1346 gv2(RC_INT, RC_INT);
1347 vtop[-1].r2 = vtop[0].r;
1348 vtop[-1].type.t = t;
1349 vpop();
1351 #endif
1353 /* convert stack entry to register and duplicate its value in another
1354 register */
1355 static void gv_dup(void)
1357 int rc, t, r, r1;
1358 SValue sv;
1360 t = vtop->type.t;
1361 #if PTR_SIZE == 4
1362 if ((t & VT_BTYPE) == VT_LLONG) {
1363 lexpand();
1364 gv_dup();
1365 vswap();
1366 vrotb(3);
1367 gv_dup();
1368 vrotb(4);
1369 /* stack: H L L1 H1 */
1370 lbuild(t);
1371 vrotb(3);
1372 vrotb(3);
1373 vswap();
1374 lbuild(t);
1375 vswap();
1376 } else
1377 #endif
1379 /* duplicate value */
1380 rc = RC_INT;
1381 sv.type.t = VT_INT;
1382 if (is_float(t)) {
1383 rc = RC_FLOAT;
1384 #ifdef TCC_TARGET_X86_64
1385 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1386 rc = RC_ST0;
1388 #endif
1389 sv.type.t = t;
1391 r = gv(rc);
1392 r1 = get_reg(rc);
1393 sv.r = r;
1394 sv.c.i = 0;
1395 load(r1, &sv); /* move r to r1 */
1396 vdup();
1397 /* duplicates value */
1398 if (r != r1)
1399 vtop->r = r1;
1403 /* Generate value test
1405 * Generate a test for any value (jump, comparison and integers) */
1406 ST_FUNC int gvtst(int inv, int t)
1408 int v = vtop->r & VT_VALMASK;
1409 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1410 vpushi(0);
1411 gen_op(TOK_NE);
1413 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1414 /* constant jmp optimization */
1415 if ((vtop->c.i != 0) != inv)
1416 t = gjmp(t);
1417 vtop--;
1418 return t;
1420 return gtst(inv, t);
1423 #if PTR_SIZE == 4
1424 /* generate CPU independent (unsigned) long long operations */
1425 static void gen_opl(int op)
1427 int t, a, b, op1, c, i;
1428 int func;
1429 unsigned short reg_iret = REG_IRET;
1430 unsigned short reg_lret = REG_LRET;
1431 SValue tmp;
1433 switch(op) {
1434 case '/':
1435 case TOK_PDIV:
1436 func = TOK___divdi3;
1437 goto gen_func;
1438 case TOK_UDIV:
1439 func = TOK___udivdi3;
1440 goto gen_func;
1441 case '%':
1442 func = TOK___moddi3;
1443 goto gen_mod_func;
1444 case TOK_UMOD:
1445 func = TOK___umoddi3;
1446 gen_mod_func:
1447 #ifdef TCC_ARM_EABI
1448 reg_iret = TREG_R2;
1449 reg_lret = TREG_R3;
1450 #endif
1451 gen_func:
1452 /* call generic long long function */
1453 vpush_global_sym(&func_old_type, func);
1454 vrott(3);
1455 gfunc_call(2);
1456 vpushi(0);
1457 vtop->r = reg_iret;
1458 vtop->r2 = reg_lret;
1459 break;
1460 case '^':
1461 case '&':
1462 case '|':
1463 case '*':
1464 case '+':
1465 case '-':
1466 //pv("gen_opl A",0,2);
1467 t = vtop->type.t;
1468 vswap();
1469 lexpand();
1470 vrotb(3);
1471 lexpand();
1472 /* stack: L1 H1 L2 H2 */
1473 tmp = vtop[0];
1474 vtop[0] = vtop[-3];
1475 vtop[-3] = tmp;
1476 tmp = vtop[-2];
1477 vtop[-2] = vtop[-3];
1478 vtop[-3] = tmp;
1479 vswap();
1480 /* stack: H1 H2 L1 L2 */
1481 //pv("gen_opl B",0,4);
1482 if (op == '*') {
1483 vpushv(vtop - 1);
1484 vpushv(vtop - 1);
1485 gen_op(TOK_UMULL);
1486 lexpand();
1487 /* stack: H1 H2 L1 L2 ML MH */
1488 for(i=0;i<4;i++)
1489 vrotb(6);
1490 /* stack: ML MH H1 H2 L1 L2 */
1491 tmp = vtop[0];
1492 vtop[0] = vtop[-2];
1493 vtop[-2] = tmp;
1494 /* stack: ML MH H1 L2 H2 L1 */
1495 gen_op('*');
1496 vrotb(3);
1497 vrotb(3);
1498 gen_op('*');
1499 /* stack: ML MH M1 M2 */
1500 gen_op('+');
1501 gen_op('+');
1502 } else if (op == '+' || op == '-') {
1503 /* XXX: add non carry method too (for MIPS or alpha) */
1504 if (op == '+')
1505 op1 = TOK_ADDC1;
1506 else
1507 op1 = TOK_SUBC1;
1508 gen_op(op1);
1509 /* stack: H1 H2 (L1 op L2) */
1510 vrotb(3);
1511 vrotb(3);
1512 gen_op(op1 + 1); /* TOK_xxxC2 */
1513 } else {
1514 gen_op(op);
1515 /* stack: H1 H2 (L1 op L2) */
1516 vrotb(3);
1517 vrotb(3);
1518 /* stack: (L1 op L2) H1 H2 */
1519 gen_op(op);
1520 /* stack: (L1 op L2) (H1 op H2) */
1522 /* stack: L H */
1523 lbuild(t);
1524 break;
1525 case TOK_SAR:
1526 case TOK_SHR:
1527 case TOK_SHL:
1528 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1529 t = vtop[-1].type.t;
1530 vswap();
1531 lexpand();
1532 vrotb(3);
1533 /* stack: L H shift */
1534 c = (int)vtop->c.i;
1535 /* constant: simpler */
1536 /* NOTE: all comments are for SHL. the other cases are
1537 done by swapping words */
1538 vpop();
1539 if (op != TOK_SHL)
1540 vswap();
1541 if (c >= 32) {
1542 /* stack: L H */
1543 vpop();
1544 if (c > 32) {
1545 vpushi(c - 32);
1546 gen_op(op);
1548 if (op != TOK_SAR) {
1549 vpushi(0);
1550 } else {
1551 gv_dup();
1552 vpushi(31);
1553 gen_op(TOK_SAR);
1555 vswap();
1556 } else {
1557 vswap();
1558 gv_dup();
1559 /* stack: H L L */
1560 vpushi(c);
1561 gen_op(op);
1562 vswap();
1563 vpushi(32 - c);
1564 if (op == TOK_SHL)
1565 gen_op(TOK_SHR);
1566 else
1567 gen_op(TOK_SHL);
1568 vrotb(3);
1569 /* stack: L L H */
1570 vpushi(c);
1571 if (op == TOK_SHL)
1572 gen_op(TOK_SHL);
1573 else
1574 gen_op(TOK_SHR);
1575 gen_op('|');
1577 if (op != TOK_SHL)
1578 vswap();
1579 lbuild(t);
1580 } else {
1581 /* XXX: should provide a faster fallback on x86 ? */
1582 switch(op) {
1583 case TOK_SAR:
1584 func = TOK___ashrdi3;
1585 goto gen_func;
1586 case TOK_SHR:
1587 func = TOK___lshrdi3;
1588 goto gen_func;
1589 case TOK_SHL:
1590 func = TOK___ashldi3;
1591 goto gen_func;
1594 break;
1595 default:
1596 /* compare operations */
1597 t = vtop->type.t;
1598 vswap();
1599 lexpand();
1600 vrotb(3);
1601 lexpand();
1602 /* stack: L1 H1 L2 H2 */
1603 tmp = vtop[-1];
1604 vtop[-1] = vtop[-2];
1605 vtop[-2] = tmp;
1606 /* stack: L1 L2 H1 H2 */
1607 /* compare high */
1608 op1 = op;
1609 /* when values are equal, we need to compare low words. since
1610 the jump is inverted, we invert the test too. */
1611 if (op1 == TOK_LT)
1612 op1 = TOK_LE;
1613 else if (op1 == TOK_GT)
1614 op1 = TOK_GE;
1615 else if (op1 == TOK_ULT)
1616 op1 = TOK_ULE;
1617 else if (op1 == TOK_UGT)
1618 op1 = TOK_UGE;
1619 a = 0;
1620 b = 0;
1621 gen_op(op1);
1622 if (op == TOK_NE) {
1623 b = gvtst(0, 0);
1624 } else {
1625 a = gvtst(1, 0);
1626 if (op != TOK_EQ) {
1627 /* generate non equal test */
1628 vpushi(TOK_NE);
1629 vtop->r = VT_CMP;
1630 b = gvtst(0, 0);
1633 /* compare low. Always unsigned */
1634 op1 = op;
1635 if (op1 == TOK_LT)
1636 op1 = TOK_ULT;
1637 else if (op1 == TOK_LE)
1638 op1 = TOK_ULE;
1639 else if (op1 == TOK_GT)
1640 op1 = TOK_UGT;
1641 else if (op1 == TOK_GE)
1642 op1 = TOK_UGE;
1643 gen_op(op1);
1644 a = gvtst(1, a);
1645 gsym(b);
1646 vseti(VT_JMPI, a);
1647 break;
1650 #endif
1652 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1654 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1655 return (a ^ b) >> 63 ? -x : x;
1658 static int gen_opic_lt(uint64_t a, uint64_t b)
1660 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1663 /* handle integer constant optimizations and various machine
1664 independent opt */
1665 static void gen_opic(int op)
1667 SValue *v1 = vtop - 1;
1668 SValue *v2 = vtop;
1669 int t1 = v1->type.t & VT_BTYPE;
1670 int t2 = v2->type.t & VT_BTYPE;
1671 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1672 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1673 uint64_t l1 = c1 ? v1->c.i : 0;
1674 uint64_t l2 = c2 ? v2->c.i : 0;
1675 int shm = (t1 == VT_LLONG) ? 63 : 31;
1677 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1678 l1 = ((uint32_t)l1 |
1679 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1680 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1681 l2 = ((uint32_t)l2 |
1682 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1684 if (c1 && c2) {
1685 switch(op) {
1686 case '+': l1 += l2; break;
1687 case '-': l1 -= l2; break;
1688 case '&': l1 &= l2; break;
1689 case '^': l1 ^= l2; break;
1690 case '|': l1 |= l2; break;
1691 case '*': l1 *= l2; break;
1693 case TOK_PDIV:
1694 case '/':
1695 case '%':
1696 case TOK_UDIV:
1697 case TOK_UMOD:
1698 /* if division by zero, generate explicit division */
1699 if (l2 == 0) {
1700 if (const_wanted)
1701 tcc_error("division by zero in constant");
1702 goto general_case;
1704 switch(op) {
1705 default: l1 = gen_opic_sdiv(l1, l2); break;
1706 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1707 case TOK_UDIV: l1 = l1 / l2; break;
1708 case TOK_UMOD: l1 = l1 % l2; break;
1710 break;
1711 case TOK_SHL: l1 <<= (l2 & shm); break;
1712 case TOK_SHR: l1 >>= (l2 & shm); break;
1713 case TOK_SAR:
1714 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1715 break;
1716 /* tests */
1717 case TOK_ULT: l1 = l1 < l2; break;
1718 case TOK_UGE: l1 = l1 >= l2; break;
1719 case TOK_EQ: l1 = l1 == l2; break;
1720 case TOK_NE: l1 = l1 != l2; break;
1721 case TOK_ULE: l1 = l1 <= l2; break;
1722 case TOK_UGT: l1 = l1 > l2; break;
1723 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1724 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1725 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1726 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1727 /* logical */
1728 case TOK_LAND: l1 = l1 && l2; break;
1729 case TOK_LOR: l1 = l1 || l2; break;
1730 default:
1731 goto general_case;
1733 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1734 l1 = ((uint32_t)l1 |
1735 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1736 v1->c.i = l1;
1737 vtop--;
1738 } else {
1739 /* if commutative ops, put c2 as constant */
1740 if (c1 && (op == '+' || op == '&' || op == '^' ||
1741 op == '|' || op == '*')) {
1742 vswap();
1743 c2 = c1; //c = c1, c1 = c2, c2 = c;
1744 l2 = l1; //l = l1, l1 = l2, l2 = l;
1746 if (!const_wanted &&
1747 c1 && ((l1 == 0 &&
1748 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1749 (l1 == -1 && op == TOK_SAR))) {
1750 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1751 vtop--;
1752 } else if (!const_wanted &&
1753 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1754 (l2 == -1 && op == '|') ||
1755 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1756 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1757 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1758 if (l2 == 1)
1759 vtop->c.i = 0;
1760 vswap();
1761 vtop--;
1762 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1763 op == TOK_PDIV) &&
1764 l2 == 1) ||
1765 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1766 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1767 l2 == 0) ||
1768 (op == '&' &&
1769 l2 == -1))) {
1770 /* filter out NOP operations like x*1, x-0, x&-1... */
1771 vtop--;
1772 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1773 /* try to use shifts instead of muls or divs */
1774 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1775 int n = -1;
1776 while (l2) {
1777 l2 >>= 1;
1778 n++;
1780 vtop->c.i = n;
1781 if (op == '*')
1782 op = TOK_SHL;
1783 else if (op == TOK_PDIV)
1784 op = TOK_SAR;
1785 else
1786 op = TOK_SHR;
1788 goto general_case;
1789 } else if (c2 && (op == '+' || op == '-') &&
1790 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1791 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1792 /* symbol + constant case */
1793 if (op == '-')
1794 l2 = -l2;
1795 l2 += vtop[-1].c.i;
1796 /* The backends can't always deal with addends to symbols
1797 larger than +-1<<31. Don't construct such. */
1798 if ((int)l2 != l2)
1799 goto general_case;
1800 vtop--;
1801 vtop->c.i = l2;
1802 } else {
1803 general_case:
1804 /* call low level op generator */
1805 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1806 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1807 gen_opl(op);
1808 else
1809 gen_opi(op);
1814 /* generate a floating point operation with constant propagation */
1815 static void gen_opif(int op)
1817 int c1, c2;
1818 SValue *v1, *v2;
1819 long double f1, f2;
1821 v1 = vtop - 1;
1822 v2 = vtop;
1823 /* currently, we cannot do computations with forward symbols */
1824 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1826 if (c1 && c2) {
1827 if (v1->type.t == VT_FLOAT) {
1828 f1 = v1->c.f;
1829 f2 = v2->c.f;
1830 } else if (v1->type.t == VT_DOUBLE) {
1831 f1 = v1->c.d;
1832 f2 = v2->c.d;
1833 } else {
1834 f1 = v1->c.ld;
1835 f2 = v2->c.ld;
1838 /* NOTE: we only do constant propagation if finite number (not
1839 NaN or infinity) (ANSI spec) */
1840 if (!ieee_finite(f1) || !ieee_finite(f2))
1841 goto general_case;
1843 switch(op) {
1844 case '+': f1 += f2; break;
1845 case '-': f1 -= f2; break;
1846 case '*': f1 *= f2; break;
1847 case '/':
1848 if (f2 == 0.0) {
1849 if (const_wanted)
1850 tcc_error("division by zero in constant");
1851 goto general_case;
1853 f1 /= f2;
1854 break;
1855 /* XXX: also handles tests ? */
1856 default:
1857 goto general_case;
1859 /* XXX: overflow test ? */
1860 if (v1->type.t == VT_FLOAT) {
1861 v1->c.f = f1;
1862 } else if (v1->type.t == VT_DOUBLE) {
1863 v1->c.d = f1;
1864 } else {
1865 v1->c.ld = f1;
1867 vtop--;
1868 } else {
1869 general_case:
1870 gen_opf(op);
1874 static int pointed_size(CType *type)
1876 int align;
1877 return type_size(pointed_type(type), &align);
1880 static void vla_runtime_pointed_size(CType *type)
1882 int align;
1883 vla_runtime_type_size(pointed_type(type), &align);
1886 static inline int is_null_pointer(SValue *p)
1888 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1889 return 0;
1890 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1891 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1892 ((p->type.t & VT_BTYPE) == VT_PTR &&
1893 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1896 static inline int is_integer_btype(int bt)
1898 return (bt == VT_BYTE || bt == VT_SHORT ||
1899 bt == VT_INT || bt == VT_LLONG);
1902 /* check types for comparison or subtraction of pointers */
1903 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1905 CType *type1, *type2, tmp_type1, tmp_type2;
1906 int bt1, bt2;
1908 /* null pointers are accepted for all comparisons as gcc */
1909 if (is_null_pointer(p1) || is_null_pointer(p2))
1910 return;
1911 type1 = &p1->type;
1912 type2 = &p2->type;
1913 bt1 = type1->t & VT_BTYPE;
1914 bt2 = type2->t & VT_BTYPE;
1915 /* accept comparison between pointer and integer with a warning */
1916 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1917 if (op != TOK_LOR && op != TOK_LAND )
1918 tcc_warning("comparison between pointer and integer");
1919 return;
1922 /* both must be pointers or implicit function pointers */
1923 if (bt1 == VT_PTR) {
1924 type1 = pointed_type(type1);
1925 } else if (bt1 != VT_FUNC)
1926 goto invalid_operands;
1928 if (bt2 == VT_PTR) {
1929 type2 = pointed_type(type2);
1930 } else if (bt2 != VT_FUNC) {
1931 invalid_operands:
1932 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1934 if ((type1->t & VT_BTYPE) == VT_VOID ||
1935 (type2->t & VT_BTYPE) == VT_VOID)
1936 return;
1937 tmp_type1 = *type1;
1938 tmp_type2 = *type2;
1939 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1940 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1941 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1942 /* gcc-like error if '-' is used */
1943 if (op == '-')
1944 goto invalid_operands;
1945 else
1946 tcc_warning("comparison of distinct pointer types lacks a cast");
1950 /* generic gen_op: handles types problems */
1951 ST_FUNC void gen_op(int op)
1953 int u, t1, t2, bt1, bt2, t;
1954 CType type1;
1956 redo:
1957 t1 = vtop[-1].type.t;
1958 t2 = vtop[0].type.t;
1959 bt1 = t1 & VT_BTYPE;
1960 bt2 = t2 & VT_BTYPE;
1962 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1963 tcc_error("operation on a struct");
1964 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1965 if (bt2 == VT_FUNC) {
1966 mk_pointer(&vtop->type);
1967 gaddrof();
1969 if (bt1 == VT_FUNC) {
1970 vswap();
1971 mk_pointer(&vtop->type);
1972 gaddrof();
1973 vswap();
1975 goto redo;
1976 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1977 /* at least one operand is a pointer */
1978 /* relational op: must be both pointers */
1979 if (op >= TOK_ULT && op <= TOK_LOR) {
1980 check_comparison_pointer_types(vtop - 1, vtop, op);
1981 /* pointers are handled are unsigned */
1982 #if PTR_SIZE == 8
1983 t = VT_LLONG | VT_UNSIGNED;
1984 #else
1985 t = VT_INT | VT_UNSIGNED;
1986 #endif
1987 goto std_op;
1989 /* if both pointers, then it must be the '-' op */
1990 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1991 if (op != '-')
1992 tcc_error("cannot use pointers here");
1993 check_comparison_pointer_types(vtop - 1, vtop, op);
1994 /* XXX: check that types are compatible */
1995 if (vtop[-1].type.t & VT_VLA) {
1996 vla_runtime_pointed_size(&vtop[-1].type);
1997 } else {
1998 vpushi(pointed_size(&vtop[-1].type));
2000 vrott(3);
2001 gen_opic(op);
2002 /* set to integer type */
2003 #if PTR_SIZE == 8
2004 vtop->type.t = VT_LLONG;
2005 #else
2006 vtop->type.t = VT_INT;
2007 #endif
2008 vswap();
2009 gen_op(TOK_PDIV);
2010 } else {
2011 /* exactly one pointer : must be '+' or '-'. */
2012 if (op != '-' && op != '+')
2013 tcc_error("cannot use pointers here");
2014 /* Put pointer as first operand */
2015 if (bt2 == VT_PTR) {
2016 vswap();
2017 t = t1, t1 = t2, t2 = t;
2019 #if PTR_SIZE == 4
2020 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2021 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2022 gen_cast(&int_type);
2023 #endif
2024 type1 = vtop[-1].type;
2025 type1.t &= ~VT_ARRAY;
2026 if (vtop[-1].type.t & VT_VLA)
2027 vla_runtime_pointed_size(&vtop[-1].type);
2028 else {
2029 u = pointed_size(&vtop[-1].type);
2030 if (u < 0)
2031 tcc_error("unknown array element size");
2032 #if PTR_SIZE == 8
2033 vpushll(u);
2034 #else
2035 /* XXX: cast to int ? (long long case) */
2036 vpushi(u);
2037 #endif
2039 gen_op('*');
2040 #if 0
2041 /* #ifdef CONFIG_TCC_BCHECK
2042 The main reason to removing this code:
2043 #include <stdio.h>
2044 int main ()
2046 int v[10];
2047 int i = 10;
2048 int j = 9;
2049 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2050 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2052 When this code is on. then the output looks like
2053 v+i-j = 0xfffffffe
2054 v+(i-j) = 0xbff84000
2056 /* if evaluating constant expression, no code should be
2057 generated, so no bound check */
2058 if (tcc_state->do_bounds_check && !const_wanted) {
2059 /* if bounded pointers, we generate a special code to
2060 test bounds */
2061 if (op == '-') {
2062 vpushi(0);
2063 vswap();
2064 gen_op('-');
2066 gen_bounded_ptr_add();
2067 } else
2068 #endif
2070 gen_opic(op);
2072 /* put again type if gen_opic() swaped operands */
2073 vtop->type = type1;
2075 } else if (is_float(bt1) || is_float(bt2)) {
2076 /* compute bigger type and do implicit casts */
2077 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2078 t = VT_LDOUBLE;
2079 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2080 t = VT_DOUBLE;
2081 } else {
2082 t = VT_FLOAT;
2084 /* floats can only be used for a few operations */
2085 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2086 (op < TOK_ULT || op > TOK_GT))
2087 tcc_error("invalid operands for binary operation");
2088 goto std_op;
2089 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2090 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2091 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2092 t |= VT_UNSIGNED;
2093 goto std_op;
2094 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2095 /* cast to biggest op */
2096 t = VT_LLONG;
2097 /* convert to unsigned if it does not fit in a long long */
2098 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2099 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2100 t |= VT_UNSIGNED;
2101 goto std_op;
2102 } else {
2103 /* integer operations */
2104 t = VT_INT;
2105 /* convert to unsigned if it does not fit in an integer */
2106 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2107 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2108 t |= VT_UNSIGNED;
2109 std_op:
2110 /* XXX: currently, some unsigned operations are explicit, so
2111 we modify them here */
2112 if (t & VT_UNSIGNED) {
2113 if (op == TOK_SAR)
2114 op = TOK_SHR;
2115 else if (op == '/')
2116 op = TOK_UDIV;
2117 else if (op == '%')
2118 op = TOK_UMOD;
2119 else if (op == TOK_LT)
2120 op = TOK_ULT;
2121 else if (op == TOK_GT)
2122 op = TOK_UGT;
2123 else if (op == TOK_LE)
2124 op = TOK_ULE;
2125 else if (op == TOK_GE)
2126 op = TOK_UGE;
2128 vswap();
2129 type1.t = t;
2130 gen_cast(&type1);
2131 vswap();
2132 /* special case for shifts and long long: we keep the shift as
2133 an integer */
2134 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2135 type1.t = VT_INT;
2136 gen_cast(&type1);
2137 if (is_float(t))
2138 gen_opif(op);
2139 else
2140 gen_opic(op);
2141 if (op >= TOK_ULT && op <= TOK_GT) {
2142 /* relational op: the result is an int */
2143 vtop->type.t = VT_INT;
2144 } else {
2145 vtop->type.t = t;
2148 // Make sure that we have converted to an rvalue:
2149 if (vtop->r & VT_LVAL)
2150 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2153 #ifndef TCC_TARGET_ARM
2154 /* generic itof for unsigned long long case */
2155 static void gen_cvt_itof1(int t)
2157 #ifdef TCC_TARGET_ARM64
2158 gen_cvt_itof(t);
2159 #else
2160 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2161 (VT_LLONG | VT_UNSIGNED)) {
2163 if (t == VT_FLOAT)
2164 vpush_global_sym(&func_old_type, TOK___floatundisf);
2165 #if LDOUBLE_SIZE != 8
2166 else if (t == VT_LDOUBLE)
2167 vpush_global_sym(&func_old_type, TOK___floatundixf);
2168 #endif
2169 else
2170 vpush_global_sym(&func_old_type, TOK___floatundidf);
2171 vrott(2);
2172 gfunc_call(1);
2173 vpushi(0);
2174 vtop->r = reg_fret(t);
2175 } else {
2176 gen_cvt_itof(t);
2178 #endif
2180 #endif
2182 /* generic ftoi for unsigned long long case */
2183 static void gen_cvt_ftoi1(int t)
2185 #ifdef TCC_TARGET_ARM64
2186 gen_cvt_ftoi(t);
2187 #else
2188 int st;
2190 if (t == (VT_LLONG | VT_UNSIGNED)) {
2191 /* not handled natively */
2192 st = vtop->type.t & VT_BTYPE;
2193 if (st == VT_FLOAT)
2194 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2195 #if LDOUBLE_SIZE != 8
2196 else if (st == VT_LDOUBLE)
2197 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2198 #endif
2199 else
2200 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2201 vrott(2);
2202 gfunc_call(1);
2203 vpushi(0);
2204 vtop->r = REG_IRET;
2205 vtop->r2 = REG_LRET;
2206 } else {
2207 gen_cvt_ftoi(t);
2209 #endif
2212 /* force char or short cast */
2213 static void force_charshort_cast(int t)
2215 int bits, dbt;
2216 dbt = t & VT_BTYPE;
2217 /* XXX: add optimization if lvalue : just change type and offset */
2218 if (dbt == VT_BYTE)
2219 bits = 8;
2220 else
2221 bits = 16;
2222 if (t & VT_UNSIGNED) {
2223 vpushi((1 << bits) - 1);
2224 gen_op('&');
2225 } else {
2226 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2227 bits = 64 - bits;
2228 else
2229 bits = 32 - bits;
2230 vpushi(bits);
2231 gen_op(TOK_SHL);
2232 /* result must be signed or the SAR is converted to an SHL
2233 This was not the case when "t" was a signed short
2234 and the last value on the stack was an unsigned int */
2235 vtop->type.t &= ~VT_UNSIGNED;
2236 vpushi(bits);
2237 gen_op(TOK_SAR);
2241 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2242 static void gen_cast(CType *type)
2244 int sbt, dbt, sf, df, c, p;
2246 /* special delayed cast for char/short */
2247 /* XXX: in some cases (multiple cascaded casts), it may still
2248 be incorrect */
2249 if (vtop->r & VT_MUSTCAST) {
2250 vtop->r &= ~VT_MUSTCAST;
2251 force_charshort_cast(vtop->type.t);
2254 /* bitfields first get cast to ints */
2255 if (vtop->type.t & VT_BITFIELD) {
2256 gv(RC_INT);
2259 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2260 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2262 if (sbt != dbt) {
2263 sf = is_float(sbt);
2264 df = is_float(dbt);
2265 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2266 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2267 if (c) {
2268 /* constant case: we can do it now */
2269 /* XXX: in ISOC, cannot do it if error in convert */
2270 if (sbt == VT_FLOAT)
2271 vtop->c.ld = vtop->c.f;
2272 else if (sbt == VT_DOUBLE)
2273 vtop->c.ld = vtop->c.d;
2275 if (df) {
2276 if ((sbt & VT_BTYPE) == VT_LLONG) {
2277 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2278 vtop->c.ld = vtop->c.i;
2279 else
2280 vtop->c.ld = -(long double)-vtop->c.i;
2281 } else if(!sf) {
2282 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2283 vtop->c.ld = (uint32_t)vtop->c.i;
2284 else
2285 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2288 if (dbt == VT_FLOAT)
2289 vtop->c.f = (float)vtop->c.ld;
2290 else if (dbt == VT_DOUBLE)
2291 vtop->c.d = (double)vtop->c.ld;
2292 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2293 vtop->c.i = vtop->c.ld;
2294 } else if (sf && dbt == VT_BOOL) {
2295 vtop->c.i = (vtop->c.ld != 0);
2296 } else {
2297 if(sf)
2298 vtop->c.i = vtop->c.ld;
2299 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2301 else if (sbt & VT_UNSIGNED)
2302 vtop->c.i = (uint32_t)vtop->c.i;
2303 #if PTR_SIZE == 8
2304 else if (sbt == VT_PTR)
2306 #endif
2307 else if (sbt != VT_LLONG)
2308 vtop->c.i = ((uint32_t)vtop->c.i |
2309 -(vtop->c.i & 0x80000000));
2311 if (dbt == (VT_LLONG|VT_UNSIGNED))
2313 else if (dbt == VT_BOOL)
2314 vtop->c.i = (vtop->c.i != 0);
2315 #if PTR_SIZE == 8
2316 else if (dbt == VT_PTR)
2318 #endif
2319 else if (dbt != VT_LLONG) {
2320 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2321 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2322 0xffffffff);
2323 vtop->c.i &= m;
2324 if (!(dbt & VT_UNSIGNED))
2325 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2328 } else if (p && dbt == VT_BOOL) {
2329 vtop->r = VT_CONST;
2330 vtop->c.i = 1;
2331 } else {
2332 /* non constant case: generate code */
2333 if (sf && df) {
2334 /* convert from fp to fp */
2335 gen_cvt_ftof(dbt);
2336 } else if (df) {
2337 /* convert int to fp */
2338 gen_cvt_itof1(dbt);
2339 } else if (sf) {
2340 /* convert fp to int */
2341 if (dbt == VT_BOOL) {
2342 vpushi(0);
2343 gen_op(TOK_NE);
2344 } else {
2345 /* we handle char/short/etc... with generic code */
2346 if (dbt != (VT_INT | VT_UNSIGNED) &&
2347 dbt != (VT_LLONG | VT_UNSIGNED) &&
2348 dbt != VT_LLONG)
2349 dbt = VT_INT;
2350 gen_cvt_ftoi1(dbt);
2351 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2352 /* additional cast for char/short... */
2353 vtop->type.t = dbt;
2354 gen_cast(type);
2357 #if PTR_SIZE == 4
2358 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2359 if ((sbt & VT_BTYPE) != VT_LLONG) {
2360 /* scalar to long long */
2361 /* machine independent conversion */
2362 gv(RC_INT);
2363 /* generate high word */
2364 if (sbt == (VT_INT | VT_UNSIGNED)) {
2365 vpushi(0);
2366 gv(RC_INT);
2367 } else {
2368 if (sbt == VT_PTR) {
2369 /* cast from pointer to int before we apply
2370 shift operation, which pointers don't support*/
2371 gen_cast(&int_type);
2373 gv_dup();
2374 vpushi(31);
2375 gen_op(TOK_SAR);
2377 /* patch second register */
2378 vtop[-1].r2 = vtop->r;
2379 vpop();
2381 #else
2382 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2383 (dbt & VT_BTYPE) == VT_PTR ||
2384 (dbt & VT_BTYPE) == VT_FUNC) {
2385 if ((sbt & VT_BTYPE) != VT_LLONG &&
2386 (sbt & VT_BTYPE) != VT_PTR &&
2387 (sbt & VT_BTYPE) != VT_FUNC) {
2388 /* need to convert from 32bit to 64bit */
2389 gv(RC_INT);
2390 if (sbt != (VT_INT | VT_UNSIGNED)) {
2391 #if defined(TCC_TARGET_ARM64)
2392 gen_cvt_sxtw();
2393 #elif defined(TCC_TARGET_X86_64)
2394 int r = gv(RC_INT);
2395 /* x86_64 specific: movslq */
2396 o(0x6348);
2397 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2398 #else
2399 #error
2400 #endif
2403 #endif
2404 } else if (dbt == VT_BOOL) {
2405 /* scalar to bool */
2406 vpushi(0);
2407 gen_op(TOK_NE);
2408 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2409 (dbt & VT_BTYPE) == VT_SHORT) {
2410 if (sbt == VT_PTR) {
2411 vtop->type.t = VT_INT;
2412 tcc_warning("nonportable conversion from pointer to char/short");
2414 force_charshort_cast(dbt);
2415 #if PTR_SIZE == 4
2416 } else if ((dbt & VT_BTYPE) == VT_INT) {
2417 /* scalar to int */
2418 if ((sbt & VT_BTYPE) == VT_LLONG) {
2419 /* from long long: just take low order word */
2420 lexpand();
2421 vpop();
2423 /* if lvalue and single word type, nothing to do because
2424 the lvalue already contains the real type size (see
2425 VT_LVAL_xxx constants) */
2426 #endif
2429 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2430 /* if we are casting between pointer types,
2431 we must update the VT_LVAL_xxx size */
2432 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2433 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2435 vtop->type = *type;
2438 /* return type size as known at compile time. Put alignment at 'a' */
2439 ST_FUNC int type_size(CType *type, int *a)
2441 Sym *s;
2442 int bt;
2444 bt = type->t & VT_BTYPE;
2445 if (bt == VT_STRUCT) {
2446 /* struct/union */
2447 s = type->ref;
2448 *a = s->r;
2449 return s->c;
2450 } else if (bt == VT_PTR) {
2451 if (type->t & VT_ARRAY) {
2452 int ts;
2454 s = type->ref;
2455 ts = type_size(&s->type, a);
2457 if (ts < 0 && s->c < 0)
2458 ts = -ts;
2460 return ts * s->c;
2461 } else {
2462 *a = PTR_SIZE;
2463 return PTR_SIZE;
2465 } else if (bt == VT_LDOUBLE) {
2466 *a = LDOUBLE_ALIGN;
2467 return LDOUBLE_SIZE;
2468 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2469 #ifdef TCC_TARGET_I386
2470 #ifdef TCC_TARGET_PE
2471 *a = 8;
2472 #else
2473 *a = 4;
2474 #endif
2475 #elif defined(TCC_TARGET_ARM)
2476 #ifdef TCC_ARM_EABI
2477 *a = 8;
2478 #else
2479 *a = 4;
2480 #endif
2481 #else
2482 *a = 8;
2483 #endif
2484 return 8;
2485 } else if (bt == VT_INT || bt == VT_FLOAT) {
2486 *a = 4;
2487 return 4;
2488 } else if (bt == VT_SHORT) {
2489 *a = 2;
2490 return 2;
2491 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2492 *a = 8;
2493 return 16;
2494 } else if (bt == VT_ENUM) {
2495 *a = 4;
2496 /* Enums might be incomplete, so don't just return '4' here. */
2497 return type->ref->c;
2498 } else {
2499 /* char, void, function, _Bool */
2500 *a = 1;
2501 return 1;
2505 /* push type size as known at runtime time on top of value stack. Put
2506 alignment at 'a' */
2507 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2509 if (type->t & VT_VLA) {
2510 type_size(&type->ref->type, a);
2511 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2512 } else {
2513 vpushi(type_size(type, a));
2517 static void vla_sp_restore(void) {
2518 if (vlas_in_scope) {
2519 gen_vla_sp_restore(vla_sp_loc);
2523 static void vla_sp_restore_root(void) {
2524 if (vlas_in_scope) {
2525 gen_vla_sp_restore(vla_sp_root_loc);
2529 /* return the pointed type of t */
2530 static inline CType *pointed_type(CType *type)
2532 return &type->ref->type;
2535 /* modify type so that its it is a pointer to type. */
2536 ST_FUNC void mk_pointer(CType *type)
2538 Sym *s;
2539 s = sym_push(SYM_FIELD, type, 0, -1);
2540 type->t = VT_PTR | (type->t & ~VT_TYPE);
2541 type->ref = s;
2544 /* compare function types. OLD functions match any new functions */
2545 static int is_compatible_func(CType *type1, CType *type2)
2547 Sym *s1, *s2;
2549 s1 = type1->ref;
2550 s2 = type2->ref;
2551 if (!is_compatible_types(&s1->type, &s2->type))
2552 return 0;
2553 /* check func_call */
2554 if (s1->a.func_call != s2->a.func_call)
2555 return 0;
2556 /* XXX: not complete */
2557 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2558 return 1;
2559 if (s1->c != s2->c)
2560 return 0;
2561 while (s1 != NULL) {
2562 if (s2 == NULL)
2563 return 0;
2564 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2565 return 0;
2566 s1 = s1->next;
2567 s2 = s2->next;
2569 if (s2)
2570 return 0;
2571 return 1;
2574 /* return true if type1 and type2 are the same. If unqualified is
2575 true, qualifiers on the types are ignored.
2577 - enums are not checked as gcc __builtin_types_compatible_p ()
2579 static int compare_types(CType *type1, CType *type2, int unqualified)
2581 int bt1, t1, t2;
2583 t1 = type1->t & VT_TYPE;
2584 t2 = type2->t & VT_TYPE;
2585 if (unqualified) {
2586 /* strip qualifiers before comparing */
2587 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2588 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2590 /* Default Vs explicit signedness only matters for char */
2591 if ((t1 & VT_BTYPE) != VT_BYTE) {
2592 t1 &= ~VT_DEFSIGN;
2593 t2 &= ~VT_DEFSIGN;
2595 /* An enum is compatible with (unsigned) int. Ideally we would
2596 store the enums signedness in type->ref.a.<some_bit> and
2597 only accept unsigned enums with unsigned int and vice versa.
2598 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2599 from pointer target types, so we can't add it here either. */
2600 if ((t1 & VT_BTYPE) == VT_ENUM) {
2601 t1 = VT_INT;
2602 if (type1->ref->a.unsigned_enum)
2603 t1 |= VT_UNSIGNED;
2605 if ((t2 & VT_BTYPE) == VT_ENUM) {
2606 t2 = VT_INT;
2607 if (type2->ref->a.unsigned_enum)
2608 t2 |= VT_UNSIGNED;
2610 /* XXX: bitfields ? */
2611 if (t1 != t2)
2612 return 0;
2613 /* test more complicated cases */
2614 bt1 = t1 & VT_BTYPE;
2615 if (bt1 == VT_PTR) {
2616 type1 = pointed_type(type1);
2617 type2 = pointed_type(type2);
2618 return is_compatible_types(type1, type2);
2619 } else if (bt1 == VT_STRUCT) {
2620 return (type1->ref == type2->ref);
2621 } else if (bt1 == VT_FUNC) {
2622 return is_compatible_func(type1, type2);
2623 } else {
2624 return 1;
2628 /* return true if type1 and type2 are exactly the same (including
2629 qualifiers).
2631 static int is_compatible_types(CType *type1, CType *type2)
2633 return compare_types(type1,type2,0);
2636 /* return true if type1 and type2 are the same (ignoring qualifiers).
2638 static int is_compatible_parameter_types(CType *type1, CType *type2)
2640 return compare_types(type1,type2,1);
2643 /* print a type. If 'varstr' is not NULL, then the variable is also
2644 printed in the type */
2645 /* XXX: union */
2646 /* XXX: add array and function pointers */
2647 static void type_to_str(char *buf, int buf_size,
2648 CType *type, const char *varstr)
2650 int bt, v, t;
2651 Sym *s, *sa;
2652 char buf1[256];
2653 const char *tstr;
2655 t = type->t;
2656 bt = t & VT_BTYPE;
2657 buf[0] = '\0';
2658 if (t & VT_CONSTANT)
2659 pstrcat(buf, buf_size, "const ");
2660 if (t & VT_VOLATILE)
2661 pstrcat(buf, buf_size, "volatile ");
2662 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2663 pstrcat(buf, buf_size, "unsigned ");
2664 else if (t & VT_DEFSIGN)
2665 pstrcat(buf, buf_size, "signed ");
2666 if (t & VT_EXTERN)
2667 pstrcat(buf, buf_size, "extern ");
2668 if (t & VT_STATIC)
2669 pstrcat(buf, buf_size, "static ");
2670 if (t & VT_TYPEDEF)
2671 pstrcat(buf, buf_size, "typedef ");
2672 if (t & VT_INLINE)
2673 pstrcat(buf, buf_size, "inline ");
2674 buf_size -= strlen(buf);
2675 buf += strlen(buf);
2676 switch(bt) {
2677 case VT_VOID:
2678 tstr = "void";
2679 goto add_tstr;
2680 case VT_BOOL:
2681 tstr = "_Bool";
2682 goto add_tstr;
2683 case VT_BYTE:
2684 tstr = "char";
2685 goto add_tstr;
2686 case VT_SHORT:
2687 tstr = "short";
2688 goto add_tstr;
2689 case VT_INT:
2690 tstr = "int";
2691 goto add_tstr;
2692 case VT_LONG:
2693 tstr = "long";
2694 goto add_tstr;
2695 case VT_LLONG:
2696 tstr = "long long";
2697 goto add_tstr;
2698 case VT_FLOAT:
2699 tstr = "float";
2700 goto add_tstr;
2701 case VT_DOUBLE:
2702 tstr = "double";
2703 goto add_tstr;
2704 case VT_LDOUBLE:
2705 tstr = "long double";
2706 add_tstr:
2707 pstrcat(buf, buf_size, tstr);
2708 break;
2709 case VT_ENUM:
2710 case VT_STRUCT:
2711 if (bt == VT_STRUCT)
2712 tstr = "struct ";
2713 else
2714 tstr = "enum ";
2715 pstrcat(buf, buf_size, tstr);
2716 v = type->ref->v & ~SYM_STRUCT;
2717 if (v >= SYM_FIRST_ANOM)
2718 pstrcat(buf, buf_size, "<anonymous>");
2719 else
2720 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2721 break;
2722 case VT_FUNC:
2723 s = type->ref;
2724 type_to_str(buf, buf_size, &s->type, varstr);
2725 pstrcat(buf, buf_size, "(");
2726 sa = s->next;
2727 while (sa != NULL) {
2728 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2729 pstrcat(buf, buf_size, buf1);
2730 sa = sa->next;
2731 if (sa)
2732 pstrcat(buf, buf_size, ", ");
2734 pstrcat(buf, buf_size, ")");
2735 goto no_var;
2736 case VT_PTR:
2737 s = type->ref;
2738 if (t & VT_ARRAY) {
2739 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2740 type_to_str(buf, buf_size, &s->type, buf1);
2741 goto no_var;
2743 pstrcpy(buf1, sizeof(buf1), "*");
2744 if (t & VT_CONSTANT)
2745 pstrcat(buf1, buf_size, "const ");
2746 if (t & VT_VOLATILE)
2747 pstrcat(buf1, buf_size, "volatile ");
2748 if (varstr)
2749 pstrcat(buf1, sizeof(buf1), varstr);
2750 type_to_str(buf, buf_size, &s->type, buf1);
2751 goto no_var;
2753 if (varstr) {
2754 pstrcat(buf, buf_size, " ");
2755 pstrcat(buf, buf_size, varstr);
2757 no_var: ;
2760 /* verify type compatibility to store vtop in 'dt' type, and generate
2761 casts if needed. */
2762 static void gen_assign_cast(CType *dt)
2764 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2765 char buf1[256], buf2[256];
2766 int dbt, sbt;
2768 st = &vtop->type; /* source type */
2769 dbt = dt->t & VT_BTYPE;
2770 sbt = st->t & VT_BTYPE;
2771 if (sbt == VT_VOID || dbt == VT_VOID) {
2772 if (sbt == VT_VOID && dbt == VT_VOID)
2773 ; /*
2774 It is Ok if both are void
2775 A test program:
2776 void func1() {}
2777 void func2() {
2778 return func1();
2780 gcc accepts this program
2782 else
2783 tcc_error("cannot cast from/to void");
2785 if (dt->t & VT_CONSTANT)
2786 tcc_warning("assignment of read-only location");
2787 switch(dbt) {
2788 case VT_PTR:
2789 /* special cases for pointers */
2790 /* '0' can also be a pointer */
2791 if (is_null_pointer(vtop))
2792 goto type_ok;
2793 /* accept implicit pointer to integer cast with warning */
2794 if (is_integer_btype(sbt)) {
2795 tcc_warning("assignment makes pointer from integer without a cast");
2796 goto type_ok;
2798 type1 = pointed_type(dt);
2799 /* a function is implicitly a function pointer */
2800 if (sbt == VT_FUNC) {
2801 if ((type1->t & VT_BTYPE) != VT_VOID &&
2802 !is_compatible_types(pointed_type(dt), st))
2803 tcc_warning("assignment from incompatible pointer type");
2804 goto type_ok;
2806 if (sbt != VT_PTR)
2807 goto error;
2808 type2 = pointed_type(st);
2809 if ((type1->t & VT_BTYPE) == VT_VOID ||
2810 (type2->t & VT_BTYPE) == VT_VOID) {
2811 /* void * can match anything */
2812 } else {
2813 /* exact type match, except for qualifiers */
2814 tmp_type1 = *type1;
2815 tmp_type2 = *type2;
2816 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2817 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2818 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2819 /* Like GCC don't warn by default for merely changes
2820 in pointer target signedness. Do warn for different
2821 base types, though, in particular for unsigned enums
2822 and signed int targets. */
2823 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2824 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2825 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2827 else
2828 tcc_warning("assignment from incompatible pointer type");
2831 /* check const and volatile */
2832 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2833 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2834 tcc_warning("assignment discards qualifiers from pointer target type");
2835 break;
2836 case VT_BYTE:
2837 case VT_SHORT:
2838 case VT_INT:
2839 case VT_LLONG:
2840 if (sbt == VT_PTR || sbt == VT_FUNC) {
2841 tcc_warning("assignment makes integer from pointer without a cast");
2842 } else if (sbt == VT_STRUCT) {
2843 goto case_VT_STRUCT;
2845 /* XXX: more tests */
2846 break;
2847 case VT_STRUCT:
2848 case_VT_STRUCT:
2849 tmp_type1 = *dt;
2850 tmp_type2 = *st;
2851 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2852 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2853 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2854 error:
2855 type_to_str(buf1, sizeof(buf1), st, NULL);
2856 type_to_str(buf2, sizeof(buf2), dt, NULL);
2857 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2859 break;
2861 type_ok:
2862 gen_cast(dt);
2865 /* store vtop in lvalue pushed on stack */
2866 ST_FUNC void vstore(void)
2868 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2870 ft = vtop[-1].type.t;
2871 sbt = vtop->type.t & VT_BTYPE;
2872 dbt = ft & VT_BTYPE;
2873 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2874 (sbt == VT_INT && dbt == VT_SHORT))
2875 && !(vtop->type.t & VT_BITFIELD)) {
2876 /* optimize char/short casts */
2877 delayed_cast = VT_MUSTCAST;
2878 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2879 ((1 << VT_STRUCT_SHIFT) - 1));
2880 /* XXX: factorize */
2881 if (ft & VT_CONSTANT)
2882 tcc_warning("assignment of read-only location");
2883 } else {
2884 delayed_cast = 0;
2885 if (!(ft & VT_BITFIELD))
2886 gen_assign_cast(&vtop[-1].type);
2889 if (sbt == VT_STRUCT) {
2890 /* if structure, only generate pointer */
2891 /* structure assignment : generate memcpy */
2892 /* XXX: optimize if small size */
2893 size = type_size(&vtop->type, &align);
2895 /* destination */
2896 vswap();
2897 vtop->type.t = VT_PTR;
2898 gaddrof();
2900 /* address of memcpy() */
2901 #ifdef TCC_ARM_EABI
2902 if(!(align & 7))
2903 vpush_global_sym(&func_old_type, TOK_memcpy8);
2904 else if(!(align & 3))
2905 vpush_global_sym(&func_old_type, TOK_memcpy4);
2906 else
2907 #endif
2908 /* Use memmove, rather than memcpy, as dest and src may be same: */
2909 vpush_global_sym(&func_old_type, TOK_memmove);
2911 vswap();
2912 /* source */
2913 vpushv(vtop - 2);
2914 vtop->type.t = VT_PTR;
2915 gaddrof();
2916 /* type size */
2917 vpushi(size);
2918 gfunc_call(3);
2920 /* leave source on stack */
2921 } else if (ft & VT_BITFIELD) {
2922 /* bitfield store handling */
2924 /* save lvalue as expression result (example: s.b = s.a = n;) */
2925 vdup(), vtop[-1] = vtop[-2];
2927 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2928 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2929 /* remove bit field info to avoid loops */
2930 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2932 if((ft & VT_BTYPE) == VT_BOOL) {
2933 gen_cast(&vtop[-1].type);
2934 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2937 /* duplicate destination */
2938 vdup();
2939 vtop[-1] = vtop[-2];
2941 /* mask and shift source */
2942 if((ft & VT_BTYPE) != VT_BOOL) {
2943 if((ft & VT_BTYPE) == VT_LLONG) {
2944 vpushll((1ULL << bit_size) - 1ULL);
2945 } else {
2946 vpushi((1 << bit_size) - 1);
2948 gen_op('&');
2950 vpushi(bit_pos);
2951 gen_op(TOK_SHL);
2952 /* load destination, mask and or with source */
2953 vswap();
2954 if((ft & VT_BTYPE) == VT_LLONG) {
2955 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2956 } else {
2957 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2959 gen_op('&');
2960 gen_op('|');
2961 /* store result */
2962 vstore();
2963 /* ... and discard */
2964 vpop();
2966 } else {
2967 #ifdef CONFIG_TCC_BCHECK
2968 /* bound check case */
2969 if (vtop[-1].r & VT_MUSTBOUND) {
2970 vswap();
2971 gbound();
2972 vswap();
2974 #endif
2975 rc = RC_INT;
2976 if (is_float(ft)) {
2977 rc = RC_FLOAT;
2978 #ifdef TCC_TARGET_X86_64
2979 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2980 rc = RC_ST0;
2981 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2982 rc = RC_FRET;
2984 #endif
2986 r = gv(rc); /* generate value */
2987 /* if lvalue was saved on stack, must read it */
2988 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2989 SValue sv;
2990 t = get_reg(RC_INT);
2991 #if PTR_SIZE == 8
2992 sv.type.t = VT_PTR;
2993 #else
2994 sv.type.t = VT_INT;
2995 #endif
2996 sv.r = VT_LOCAL | VT_LVAL;
2997 sv.c.i = vtop[-1].c.i;
2998 load(t, &sv);
2999 vtop[-1].r = t | VT_LVAL;
3001 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3002 #if PTR_SIZE == 8
3003 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3004 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3005 #else
3006 if ((ft & VT_BTYPE) == VT_LLONG) {
3007 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3008 #endif
3009 vtop[-1].type.t = load_type;
3010 store(r, vtop - 1);
3011 vswap();
3012 /* convert to int to increment easily */
3013 vtop->type.t = addr_type;
3014 gaddrof();
3015 vpushi(load_size);
3016 gen_op('+');
3017 vtop->r |= VT_LVAL;
3018 vswap();
3019 vtop[-1].type.t = load_type;
3020 /* XXX: it works because r2 is spilled last ! */
3021 store(vtop->r2, vtop - 1);
3022 } else {
3023 store(r, vtop - 1);
3026 vswap();
3027 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3028 vtop->r |= delayed_cast;
3032 /* post defines POST/PRE add. c is the token ++ or -- */
3033 ST_FUNC void inc(int post, int c)
3035 test_lvalue();
3036 vdup(); /* save lvalue */
3037 if (post) {
3038 gv_dup(); /* duplicate value */
3039 vrotb(3);
3040 vrotb(3);
3042 /* add constant */
3043 vpushi(c - TOK_MID);
3044 gen_op('+');
3045 vstore(); /* store value */
3046 if (post)
3047 vpop(); /* if post op, return saved value */
3050 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3052 /* read the string */
3053 if (tok != TOK_STR)
3054 expect(msg);
3055 cstr_new(astr);
3056 while (tok == TOK_STR) {
3057 /* XXX: add \0 handling too ? */
3058 cstr_cat(astr, tokc.str.data, -1);
3059 next();
3061 cstr_ccat(astr, '\0');
3064 /* If I is >= 1 and a power of two, returns log2(i)+1.
3065 If I is 0 returns 0. */
3066 static int exact_log2p1(int i)
3068 int ret;
3069 if (!i)
3070 return 0;
3071 for (ret = 1; i >= 1 << 8; ret += 8)
3072 i >>= 8;
3073 if (i >= 1 << 4)
3074 ret += 4, i >>= 4;
3075 if (i >= 1 << 2)
3076 ret += 2, i >>= 2;
3077 if (i >= 1 << 1)
3078 ret++;
3079 return ret;
3082 /* Parse GNUC __attribute__ extension. Currently, the following
3083 extensions are recognized:
3084 - aligned(n) : set data/function alignment.
3085 - packed : force data alignment to 1
3086 - section(x) : generate data/code in this section.
3087 - unused : currently ignored, but may be used someday.
3088 - regparm(n) : pass function parameters in registers (i386 only)
3090 static void parse_attribute(AttributeDef *ad)
3092 int t, n;
3093 CString astr;
3095 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3096 next();
3097 skip('(');
3098 skip('(');
3099 while (tok != ')') {
3100 if (tok < TOK_IDENT)
3101 expect("attribute name");
3102 t = tok;
3103 next();
3104 switch(t) {
3105 case TOK_SECTION1:
3106 case TOK_SECTION2:
3107 skip('(');
3108 parse_mult_str(&astr, "section name");
3109 ad->section = find_section(tcc_state, (char *)astr.data);
3110 skip(')');
3111 cstr_free(&astr);
3112 break;
3113 case TOK_ALIAS1:
3114 case TOK_ALIAS2:
3115 skip('(');
3116 parse_mult_str(&astr, "alias(\"target\")");
3117 ad->alias_target = /* save string as token, for later */
3118 tok_alloc((char*)astr.data, astr.size-1)->tok;
3119 skip(')');
3120 cstr_free(&astr);
3121 break;
3122 case TOK_VISIBILITY1:
3123 case TOK_VISIBILITY2:
3124 skip('(');
3125 parse_mult_str(&astr,
3126 "visibility(\"default|hidden|internal|protected\")");
3127 if (!strcmp (astr.data, "default"))
3128 ad->a.visibility = STV_DEFAULT;
3129 else if (!strcmp (astr.data, "hidden"))
3130 ad->a.visibility = STV_HIDDEN;
3131 else if (!strcmp (astr.data, "internal"))
3132 ad->a.visibility = STV_INTERNAL;
3133 else if (!strcmp (astr.data, "protected"))
3134 ad->a.visibility = STV_PROTECTED;
3135 else
3136 expect("visibility(\"default|hidden|internal|protected\")");
3137 skip(')');
3138 cstr_free(&astr);
3139 break;
3140 case TOK_ALIGNED1:
3141 case TOK_ALIGNED2:
3142 if (tok == '(') {
3143 next();
3144 n = expr_const();
3145 if (n <= 0 || (n & (n - 1)) != 0)
3146 tcc_error("alignment must be a positive power of two");
3147 skip(')');
3148 } else {
3149 n = MAX_ALIGN;
3151 ad->a.aligned = exact_log2p1(n);
3152 if (n != 1 << (ad->a.aligned - 1))
3153 tcc_error("alignment of %d is larger than implemented", n);
3154 break;
3155 case TOK_PACKED1:
3156 case TOK_PACKED2:
3157 ad->a.packed = 1;
3158 break;
3159 case TOK_WEAK1:
3160 case TOK_WEAK2:
3161 ad->a.weak = 1;
3162 break;
3163 case TOK_UNUSED1:
3164 case TOK_UNUSED2:
3165 /* currently, no need to handle it because tcc does not
3166 track unused objects */
3167 break;
3168 case TOK_NORETURN1:
3169 case TOK_NORETURN2:
3170 /* currently, no need to handle it because tcc does not
3171 track unused objects */
3172 break;
3173 case TOK_CDECL1:
3174 case TOK_CDECL2:
3175 case TOK_CDECL3:
3176 ad->a.func_call = FUNC_CDECL;
3177 break;
3178 case TOK_STDCALL1:
3179 case TOK_STDCALL2:
3180 case TOK_STDCALL3:
3181 ad->a.func_call = FUNC_STDCALL;
3182 break;
3183 #ifdef TCC_TARGET_I386
3184 case TOK_REGPARM1:
3185 case TOK_REGPARM2:
3186 skip('(');
3187 n = expr_const();
3188 if (n > 3)
3189 n = 3;
3190 else if (n < 0)
3191 n = 0;
3192 if (n > 0)
3193 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3194 skip(')');
3195 break;
3196 case TOK_FASTCALL1:
3197 case TOK_FASTCALL2:
3198 case TOK_FASTCALL3:
3199 ad->a.func_call = FUNC_FASTCALLW;
3200 break;
3201 #endif
3202 case TOK_MODE:
3203 skip('(');
3204 switch(tok) {
3205 case TOK_MODE_DI:
3206 ad->a.mode = VT_LLONG + 1;
3207 break;
3208 case TOK_MODE_QI:
3209 ad->a.mode = VT_BYTE + 1;
3210 break;
3211 case TOK_MODE_HI:
3212 ad->a.mode = VT_SHORT + 1;
3213 break;
3214 case TOK_MODE_SI:
3215 case TOK_MODE_word:
3216 ad->a.mode = VT_INT + 1;
3217 break;
3218 default:
3219 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3220 break;
3222 next();
3223 skip(')');
3224 break;
3225 case TOK_DLLEXPORT:
3226 ad->a.func_export = 1;
3227 break;
3228 case TOK_DLLIMPORT:
3229 ad->a.func_import = 1;
3230 break;
3231 default:
3232 if (tcc_state->warn_unsupported)
3233 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3234 /* skip parameters */
3235 if (tok == '(') {
3236 int parenthesis = 0;
3237 do {
3238 if (tok == '(')
3239 parenthesis++;
3240 else if (tok == ')')
3241 parenthesis--;
3242 next();
3243 } while (parenthesis && tok != -1);
3245 break;
3247 if (tok != ',')
3248 break;
3249 next();
3251 skip(')');
3252 skip(')');
3256 static Sym * find_field (CType *type, int v)
3258 Sym *s = type->ref;
3259 v |= SYM_FIELD;
3260 while ((s = s->next) != NULL) {
3261 if ((s->v & SYM_FIELD) &&
3262 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3263 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3264 Sym *ret = find_field (&s->type, v);
3265 if (ret)
3266 return ret;
3268 if (s->v == v)
3269 break;
3271 return s;
3274 static void struct_add_offset (Sym *s, int offset)
3276 while ((s = s->next) != NULL) {
3277 if ((s->v & SYM_FIELD) &&
3278 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3279 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3280 struct_add_offset(s->type.ref, offset);
3281 } else
3282 s->c += offset;
3286 static void struct_layout(CType *type, AttributeDef *ad)
3288 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3289 int pcc = !tcc_state->ms_bitfields;
3290 int packwarn = tcc_state->warn_gcc_compat;
3291 int typealign, bit_size, size;
3293 Sym *f;
3294 if (ad->a.aligned)
3295 maxalign = 1 << (ad->a.aligned - 1);
3296 else
3297 maxalign = 1;
3298 offset = 0;
3299 c = 0;
3300 bit_pos = 0;
3301 prevbt = VT_STRUCT; /* make it never match */
3302 prev_bit_size = 0;
3303 size = 0;
3305 for (f = type->ref->next; f; f = f->next) {
3306 size = type_size(&f->type, &typealign);
3307 if (f->type.t & VT_BITFIELD)
3308 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3309 else
3310 bit_size = -1;
3311 if (bit_size == 0 && pcc) {
3312 /* Zero-width bit-fields in PCC mode aren't affected
3313 by any packing (attribute or pragma). */
3314 align = typealign;
3315 } else if (f->r > 1) {
3316 align = f->r;
3317 } else if (ad->a.packed || f->r == 1) {
3318 align = 1;
3319 /* Packed fields or packed records don't let the base type
3320 influence the records type alignment. */
3321 typealign = 1;
3322 } else {
3323 align = typealign;
3325 if (type->ref->type.t != TOK_STRUCT) {
3326 if (pcc && bit_size >= 0)
3327 size = (bit_size + 7) >> 3;
3328 /* Bit position is already zero from our caller. */
3329 offset = 0;
3330 if (size > c)
3331 c = size;
3332 } else if (bit_size < 0) {
3333 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3334 prevbt = VT_STRUCT;
3335 prev_bit_size = 0;
3336 c = (c + addbytes + align - 1) & -align;
3337 offset = c;
3338 if (size > 0)
3339 c += size;
3340 bit_pos = 0;
3341 } else {
3342 /* A bit-field. Layout is more complicated. There are two
3343 options TCC implements: PCC compatible and MS compatible
3344 (PCC compatible is what GCC uses for almost all targets).
3345 In PCC layout the overall size of the struct (in c) is
3346 _excluding_ the current run of bit-fields (that is,
3347 there's at least additional bit_pos bits after c). In
3348 MS layout c does include the current run of bit-fields.
3350 This matters for calculating the natural alignment buckets
3351 in PCC mode. */
3353 /* 'align' will be used to influence records alignment,
3354 so it's the max of specified and type alignment, except
3355 in certain cases that depend on the mode. */
3356 if (align < typealign)
3357 align = typealign;
3358 if (pcc) {
3359 /* In PCC layout a non-packed bit-field is placed adjacent
3360 to the preceding bit-fields, except if it would overflow
3361 its container (depending on base type) or it's a zero-width
3362 bit-field. Packed non-zero-width bit-fields always are
3363 placed adjacent. */
3364 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3365 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3366 if (bit_size == 0 ||
3367 (typealign != 1 &&
3368 (ofs2 / (typealign * 8)) > (size/typealign))) {
3369 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3370 bit_pos = 0;
3371 } else if (bit_pos + bit_size > size * 8) {
3372 c += bit_pos >> 3;
3373 bit_pos &= 7;
3374 if (bit_pos + bit_size > size * 8) {
3375 c += 1, bit_pos = 0;
3376 if ((ad->a.packed || f->r) && packwarn) {
3377 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3378 packwarn = 0;
3382 offset = c;
3383 /* In PCC layout named bit-fields influence the alignment
3384 of the containing struct using the base types alignment,
3385 except for packed fields (which here have correct
3386 align/typealign). */
3387 if ((f->v & SYM_FIRST_ANOM))
3388 align = 1;
3389 } else {
3390 bt = f->type.t & VT_BTYPE;
3391 if ((bit_pos + bit_size > size * 8) ||
3392 (bit_size > 0) == (bt != prevbt)) {
3393 c = (c + typealign - 1) & -typealign;
3394 offset = c;
3395 bit_pos = 0;
3396 /* In MS bitfield mode a bit-field run always uses
3397 at least as many bits as the underlying type.
3398 To start a new run it's also required that this
3399 or the last bit-field had non-zero width. */
3400 if (bit_size || prev_bit_size)
3401 c += size;
3403 /* In MS layout the records alignment is normally
3404 influenced by the field, except for a zero-width
3405 field at the start of a run (but by further zero-width
3406 fields it is again). */
3407 if (bit_size == 0 && prevbt != bt)
3408 align = 1;
3409 prevbt = bt;
3410 prev_bit_size = bit_size;
3412 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3413 | (bit_pos << VT_STRUCT_SHIFT);
3414 bit_pos += bit_size;
3415 if (pcc && bit_pos >= size * 8) {
3416 c += size;
3417 bit_pos -= size * 8;
3420 if (align > maxalign)
3421 maxalign = align;
3422 #if 0
3423 printf("set field %s offset=%d",
3424 get_tok_str(f->v & ~SYM_FIELD, NULL), offset);
3425 if (f->type.t & VT_BITFIELD) {
3426 printf(" pos=%d size=%d",
3427 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3428 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3430 printf("\n");
3431 #endif
3433 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3434 Sym *ass;
3435 /* An anonymous struct/union. Adjust member offsets
3436 to reflect the real offset of our containing struct.
3437 Also set the offset of this anon member inside
3438 the outer struct to be zero. Via this it
3439 works when accessing the field offset directly
3440 (from base object), as well as when recursing
3441 members in initializer handling. */
3442 int v2 = f->type.ref->v;
3443 if (!(v2 & SYM_FIELD) &&
3444 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3445 Sym **pps;
3446 /* This happens only with MS extensions. The
3447 anon member has a named struct type, so it
3448 potentially is shared with other references.
3449 We need to unshare members so we can modify
3450 them. */
3451 ass = f->type.ref;
3452 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3453 &f->type.ref->type, 0,
3454 f->type.ref->c);
3455 pps = &f->type.ref->next;
3456 while ((ass = ass->next) != NULL) {
3457 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3458 pps = &((*pps)->next);
3460 *pps = NULL;
3462 struct_add_offset(f->type.ref, offset);
3463 f->c = 0;
3464 } else {
3465 f->c = offset;
3468 f->r = 0;
3470 /* store size and alignment */
3471 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3472 + maxalign - 1) & -maxalign;
3473 type->ref->r = maxalign;
3474 if (offset + size > type->ref->c)
3475 tcc_warning("will touch memory past end of the struct (internal limitation)");
3478 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3479 static void struct_decl(CType *type, AttributeDef *ad, int u)
3481 int a, v, size, align, flexible, alignoverride;
3482 long c;
3483 int bit_size, bsize, bt;
3484 Sym *s, *ss, **ps;
3485 AttributeDef ad1;
3486 CType type1, btype;
3488 a = tok; /* save decl type */
3489 next();
3490 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3491 parse_attribute(ad);
3492 if (tok != '{') {
3493 v = tok;
3494 next();
3495 /* struct already defined ? return it */
3496 if (v < TOK_IDENT)
3497 expect("struct/union/enum name");
3498 s = struct_find(v);
3499 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3500 if (s->type.t != a)
3501 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3502 goto do_decl;
3504 } else {
3505 v = anon_sym++;
3507 /* Record the original enum/struct/union token. */
3508 type1.t = a;
3509 type1.ref = NULL;
3510 /* we put an undefined size for struct/union */
3511 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3512 s->r = 0; /* default alignment is zero as gcc */
3513 /* put struct/union/enum name in type */
3514 do_decl:
3515 type->t = u;
3516 type->ref = s;
3518 if (tok == '{') {
3519 next();
3520 if (s->c != -1)
3521 tcc_error("struct/union/enum already defined");
3522 /* cannot be empty */
3523 c = 0;
3524 /* non empty enums are not allowed */
3525 if (a == TOK_ENUM) {
3526 int seen_neg = 0;
3527 int seen_wide = 0;
3528 for(;;) {
3529 CType *t = &int_type;
3530 v = tok;
3531 if (v < TOK_UIDENT)
3532 expect("identifier");
3533 ss = sym_find(v);
3534 if (ss && !local_stack)
3535 tcc_error("redefinition of enumerator '%s'",
3536 get_tok_str(v, NULL));
3537 next();
3538 if (tok == '=') {
3539 next();
3540 #if PTR_SIZE == 8
3541 c = expr_const64();
3542 #else
3543 /* We really want to support long long enums
3544 on i386 as well, but the Sym structure only
3545 holds a 'long' for associated constants,
3546 and enlarging it would bump its size (no
3547 available padding). So punt for now. */
3548 c = expr_const();
3549 #endif
3551 if (c < 0)
3552 seen_neg = 1;
3553 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3554 seen_wide = 1, t = &size_type;
3555 /* enum symbols have static storage */
3556 ss = sym_push(v, t, VT_CONST, c);
3557 ss->type.t |= VT_STATIC;
3558 if (tok != ',')
3559 break;
3560 next();
3561 c++;
3562 /* NOTE: we accept a trailing comma */
3563 if (tok == '}')
3564 break;
3566 if (!seen_neg)
3567 s->a.unsigned_enum = 1;
3568 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3569 skip('}');
3570 } else {
3571 ps = &s->next;
3572 flexible = 0;
3573 while (tok != '}') {
3574 if (!parse_btype(&btype, &ad1)) {
3575 skip(';');
3576 continue;
3578 while (1) {
3579 if (flexible)
3580 tcc_error("flexible array member '%s' not at the end of struct",
3581 get_tok_str(v, NULL));
3582 bit_size = -1;
3583 v = 0;
3584 type1 = btype;
3585 if (tok != ':') {
3586 if (tok != ';')
3587 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3588 if (v == 0) {
3589 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3590 expect("identifier");
3591 else {
3592 int v = btype.ref->v;
3593 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3594 if (tcc_state->ms_extensions == 0)
3595 expect("identifier");
3599 if (type_size(&type1, &align) < 0) {
3600 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3601 flexible = 1;
3602 else
3603 tcc_error("field '%s' has incomplete type",
3604 get_tok_str(v, NULL));
3606 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3607 (type1.t & VT_STORAGE))
3608 tcc_error("invalid type for '%s'",
3609 get_tok_str(v, NULL));
3611 if (tok == ':') {
3612 next();
3613 bit_size = expr_const();
3614 /* XXX: handle v = 0 case for messages */
3615 if (bit_size < 0)
3616 tcc_error("negative width in bit-field '%s'",
3617 get_tok_str(v, NULL));
3618 if (v && bit_size == 0)
3619 tcc_error("zero width for bit-field '%s'",
3620 get_tok_str(v, NULL));
3621 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3622 parse_attribute(&ad1);
3624 size = type_size(&type1, &align);
3625 /* Only remember non-default alignment. */
3626 alignoverride = 0;
3627 if (ad1.a.aligned) {
3628 int speca = 1 << (ad1.a.aligned - 1);
3629 alignoverride = speca;
3630 } else if (ad1.a.packed || ad->a.packed) {
3631 alignoverride = 1;
3632 } else if (*tcc_state->pack_stack_ptr) {
3633 if (align >= *tcc_state->pack_stack_ptr)
3634 alignoverride = *tcc_state->pack_stack_ptr;
3636 if (bit_size >= 0) {
3637 bt = type1.t & VT_BTYPE;
3638 if (bt != VT_INT &&
3639 bt != VT_BYTE &&
3640 bt != VT_SHORT &&
3641 bt != VT_BOOL &&
3642 bt != VT_ENUM &&
3643 bt != VT_LLONG)
3644 tcc_error("bitfields must have scalar type");
3645 bsize = size * 8;
3646 if (bit_size > bsize) {
3647 tcc_error("width of '%s' exceeds its type",
3648 get_tok_str(v, NULL));
3649 } else if (bit_size == bsize) {
3650 /* no need for bit fields */
3652 } else {
3653 type1.t |= VT_BITFIELD |
3654 (0 << VT_STRUCT_SHIFT) |
3655 (bit_size << (VT_STRUCT_SHIFT + 6));
3658 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3659 /* Remember we've seen a real field to check
3660 for placement of flexible array member. */
3661 c = 1;
3663 /* If member is a struct or bit-field, enforce
3664 placing into the struct (as anonymous). */
3665 if (v == 0 &&
3666 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3667 bit_size >= 0)) {
3668 v = anon_sym++;
3670 if (v) {
3671 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3672 *ps = ss;
3673 ps = &ss->next;
3675 if (tok == ';' || tok == TOK_EOF)
3676 break;
3677 skip(',');
3679 skip(';');
3681 skip('}');
3682 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3683 parse_attribute(ad);
3684 struct_layout(type, ad);
3689 /* return 1 if basic type is a type size (short, long, long long) */
3690 ST_FUNC int is_btype_size(int bt)
3692 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3695 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3696 are added to the element type, copied because it could be a typedef. */
3697 static void parse_btype_qualify(CType *type, int qualifiers)
3699 while (type->t & VT_ARRAY) {
3700 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3701 type = &type->ref->type;
3703 type->t |= qualifiers;
3706 /* return 0 if no type declaration. otherwise, return the basic type
3707 and skip it.
3709 static int parse_btype(CType *type, AttributeDef *ad)
3711 int t, u, bt_size, complete, type_found, typespec_found, g;
3712 Sym *s;
3713 CType type1;
3715 memset(ad, 0, sizeof(AttributeDef));
3716 complete = 0;
3717 type_found = 0;
3718 typespec_found = 0;
3719 t = 0;
3720 while(1) {
3721 switch(tok) {
3722 case TOK_EXTENSION:
3723 /* currently, we really ignore extension */
3724 next();
3725 continue;
3727 /* basic types */
3728 case TOK_CHAR:
3729 u = VT_BYTE;
3730 basic_type:
3731 next();
3732 basic_type1:
3733 if (complete)
3734 tcc_error("too many basic types");
3735 t |= u;
3736 bt_size = is_btype_size (u & VT_BTYPE);
3737 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3738 complete = 1;
3739 typespec_found = 1;
3740 break;
3741 case TOK_VOID:
3742 u = VT_VOID;
3743 goto basic_type;
3744 case TOK_SHORT:
3745 u = VT_SHORT;
3746 goto basic_type;
3747 case TOK_INT:
3748 u = VT_INT;
3749 goto basic_type;
3750 case TOK_LONG:
3751 next();
3752 if ((t & VT_BTYPE) == VT_DOUBLE) {
3753 #ifndef TCC_TARGET_PE
3754 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3755 #endif
3756 } else if ((t & VT_BTYPE) == VT_LONG) {
3757 t = (t & ~VT_BTYPE) | VT_LLONG;
3758 } else {
3759 u = VT_LONG;
3760 goto basic_type1;
3762 break;
3763 #ifdef TCC_TARGET_ARM64
3764 case TOK_UINT128:
3765 /* GCC's __uint128_t appears in some Linux header files. Make it a
3766 synonym for long double to get the size and alignment right. */
3767 u = VT_LDOUBLE;
3768 goto basic_type;
3769 #endif
3770 case TOK_BOOL:
3771 u = VT_BOOL;
3772 goto basic_type;
3773 case TOK_FLOAT:
3774 u = VT_FLOAT;
3775 goto basic_type;
3776 case TOK_DOUBLE:
3777 next();
3778 if ((t & VT_BTYPE) == VT_LONG) {
3779 #ifdef TCC_TARGET_PE
3780 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3781 #else
3782 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3783 #endif
3784 } else {
3785 u = VT_DOUBLE;
3786 goto basic_type1;
3788 break;
3789 case TOK_ENUM:
3790 struct_decl(&type1, ad, VT_ENUM);
3791 basic_type2:
3792 u = type1.t;
3793 type->ref = type1.ref;
3794 goto basic_type1;
3795 case TOK_STRUCT:
3796 case TOK_UNION:
3797 struct_decl(&type1, ad, VT_STRUCT);
3798 goto basic_type2;
3800 /* type modifiers */
3801 case TOK_CONST1:
3802 case TOK_CONST2:
3803 case TOK_CONST3:
3804 type->t = t;
3805 parse_btype_qualify(type, VT_CONSTANT);
3806 t = type->t;
3807 next();
3808 break;
3809 case TOK_VOLATILE1:
3810 case TOK_VOLATILE2:
3811 case TOK_VOLATILE3:
3812 type->t = t;
3813 parse_btype_qualify(type, VT_VOLATILE);
3814 t = type->t;
3815 next();
3816 break;
3817 case TOK_SIGNED1:
3818 case TOK_SIGNED2:
3819 case TOK_SIGNED3:
3820 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3821 tcc_error("signed and unsigned modifier");
3822 typespec_found = 1;
3823 t |= VT_DEFSIGN;
3824 next();
3825 break;
3826 case TOK_REGISTER:
3827 case TOK_AUTO:
3828 case TOK_RESTRICT1:
3829 case TOK_RESTRICT2:
3830 case TOK_RESTRICT3:
3831 next();
3832 break;
3833 case TOK_UNSIGNED:
3834 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3835 tcc_error("signed and unsigned modifier");
3836 t |= VT_DEFSIGN | VT_UNSIGNED;
3837 next();
3838 typespec_found = 1;
3839 break;
3841 /* storage */
3842 case TOK_EXTERN:
3843 g = VT_EXTERN;
3844 goto storage;
3845 case TOK_STATIC:
3846 g = VT_STATIC;
3847 goto storage;
3848 case TOK_TYPEDEF:
3849 g = VT_TYPEDEF;
3850 goto storage;
3851 storage:
3852 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3853 tcc_error("multiple storage classes");
3854 t |= g;
3855 next();
3856 break;
3857 case TOK_INLINE1:
3858 case TOK_INLINE2:
3859 case TOK_INLINE3:
3860 t |= VT_INLINE;
3861 next();
3862 break;
3864 /* GNUC attribute */
3865 case TOK_ATTRIBUTE1:
3866 case TOK_ATTRIBUTE2:
3867 parse_attribute(ad);
3868 if (ad->a.mode) {
3869 u = ad->a.mode -1;
3870 t = (t & ~VT_BTYPE) | u;
3872 break;
3873 /* GNUC typeof */
3874 case TOK_TYPEOF1:
3875 case TOK_TYPEOF2:
3876 case TOK_TYPEOF3:
3877 next();
3878 parse_expr_type(&type1);
3879 /* remove all storage modifiers except typedef */
3880 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3881 goto basic_type2;
3882 default:
3883 if (typespec_found)
3884 goto the_end;
3885 s = sym_find(tok);
3886 if (!s || !(s->type.t & VT_TYPEDEF))
3887 goto the_end;
3889 type->t = ((s->type.t & ~VT_TYPEDEF) |
3890 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3891 type->ref = s->type.ref;
3892 if (t & (VT_CONSTANT | VT_VOLATILE))
3893 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3894 t = type->t;
3896 if (s->r) {
3897 /* get attributes from typedef */
3898 if (0 == ad->a.aligned)
3899 ad->a.aligned = s->a.aligned;
3900 if (0 == ad->a.func_call)
3901 ad->a.func_call = s->a.func_call;
3902 ad->a.packed |= s->a.packed;
3904 next();
3905 typespec_found = 1;
3906 break;
3908 type_found = 1;
3910 the_end:
3911 if (tcc_state->char_is_unsigned) {
3912 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3913 t |= VT_UNSIGNED;
3916 /* long is never used as type */
3917 if ((t & VT_BTYPE) == VT_LONG)
3918 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
3919 t = (t & ~VT_BTYPE) | VT_LLONG;
3920 #else
3921 t = (t & ~VT_BTYPE) | VT_INT;
3922 #endif
3923 type->t = t;
3924 return type_found;
3927 /* convert a function parameter type (array to pointer and function to
3928 function pointer) */
3929 static inline void convert_parameter_type(CType *pt)
3931 /* remove const and volatile qualifiers (XXX: const could be used
3932 to indicate a const function parameter */
3933 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3934 /* array must be transformed to pointer according to ANSI C */
3935 pt->t &= ~VT_ARRAY;
3936 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3937 mk_pointer(pt);
3941 ST_FUNC void parse_asm_str(CString *astr)
3943 skip('(');
3944 parse_mult_str(astr, "string constant");
3947 /* Parse an asm label and return the token */
3948 static int asm_label_instr(void)
3950 int v;
3951 CString astr;
3953 next();
3954 parse_asm_str(&astr);
3955 skip(')');
3956 #ifdef ASM_DEBUG
3957 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3958 #endif
3959 v = tok_alloc(astr.data, astr.size - 1)->tok;
3960 cstr_free(&astr);
3961 return v;
3964 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
3966 int n, l, t1, arg_size, align;
3967 Sym **plast, *s, *first;
3968 AttributeDef ad1;
3969 CType pt;
3971 if (tok == '(') {
3972 /* function type, or recursive declarator (return if so) */
3973 next();
3974 if (td && !(td & TYPE_ABSTRACT))
3975 return 0;
3976 if (tok == ')')
3977 l = 0;
3978 else if (parse_btype(&pt, &ad1))
3979 l = FUNC_NEW;
3980 else if (td)
3981 return 0;
3982 else
3983 l = FUNC_OLD;
3984 first = NULL;
3985 plast = &first;
3986 arg_size = 0;
3987 if (l) {
3988 for(;;) {
3989 /* read param name and compute offset */
3990 if (l != FUNC_OLD) {
3991 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3992 break;
3993 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3994 if ((pt.t & VT_BTYPE) == VT_VOID)
3995 tcc_error("parameter declared as void");
3996 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3997 } else {
3998 n = tok;
3999 if (n < TOK_UIDENT)
4000 expect("identifier");
4001 pt.t = VT_VOID; /* invalid type */
4002 next();
4004 convert_parameter_type(&pt);
4005 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4006 *plast = s;
4007 plast = &s->next;
4008 if (tok == ')')
4009 break;
4010 skip(',');
4011 if (l == FUNC_NEW && tok == TOK_DOTS) {
4012 l = FUNC_ELLIPSIS;
4013 next();
4014 break;
4016 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4017 tcc_error("invalid type");
4019 } else
4020 /* if no parameters, then old type prototype */
4021 l = FUNC_OLD;
4022 skip(')');
4023 /* NOTE: const is ignored in returned type as it has a special
4024 meaning in gcc / C++ */
4025 type->t &= ~VT_CONSTANT;
4026 /* some ancient pre-K&R C allows a function to return an array
4027 and the array brackets to be put after the arguments, such
4028 that "int c()[]" means something like "int[] c()" */
4029 if (tok == '[') {
4030 next();
4031 skip(']'); /* only handle simple "[]" */
4032 mk_pointer(type);
4034 /* we push a anonymous symbol which will contain the function prototype */
4035 ad->a.func_args = arg_size;
4036 s = sym_push(SYM_FIELD, type, 0, l);
4037 s->a = ad->a;
4038 s->next = first;
4039 type->t = VT_FUNC;
4040 type->ref = s;
4041 } else if (tok == '[') {
4042 int saved_nocode_wanted = nocode_wanted;
4043 /* array definition */
4044 next();
4045 if (tok == TOK_RESTRICT1)
4046 next();
4047 n = -1;
4048 t1 = 0;
4049 if (tok != ']') {
4050 if (!local_stack || (storage & VT_STATIC))
4051 vpushi(expr_const());
4052 else {
4053 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4054 length must always be evaluated, even under nocode_wanted,
4055 so that its size slot is initialized (e.g. under sizeof
4056 or typeof). */
4057 nocode_wanted = 0;
4058 gexpr();
4060 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4061 n = vtop->c.i;
4062 if (n < 0)
4063 tcc_error("invalid array size");
4064 } else {
4065 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4066 tcc_error("size of variable length array should be an integer");
4067 t1 = VT_VLA;
4070 skip(']');
4071 /* parse next post type */
4072 post_type(type, ad, storage, 0);
4073 if (type->t == VT_FUNC)
4074 tcc_error("declaration of an array of functions");
4075 t1 |= type->t & VT_VLA;
4077 if (t1 & VT_VLA) {
4078 loc -= type_size(&int_type, &align);
4079 loc &= -align;
4080 n = loc;
4082 vla_runtime_type_size(type, &align);
4083 gen_op('*');
4084 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4085 vswap();
4086 vstore();
4088 if (n != -1)
4089 vpop();
4090 nocode_wanted = saved_nocode_wanted;
4092 /* we push an anonymous symbol which will contain the array
4093 element type */
4094 s = sym_push(SYM_FIELD, type, 0, n);
4095 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4096 type->ref = s;
4098 return 1;
4101 /* Parse a type declarator (except basic type), and return the type
4102 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4103 expected. 'type' should contain the basic type. 'ad' is the
4104 attribute definition of the basic type. It can be modified by
4105 type_decl(). If this (possibly abstract) declarator is a pointer chain
4106 it returns the innermost pointed to type (equals *type, but is a different
4107 pointer), otherwise returns type itself, that's used for recursive calls. */
4108 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4110 CType *post, *ret;
4111 int qualifiers, storage;
4113 /* recursive type, remove storage bits first, apply them later again */
4114 storage = type->t & VT_STORAGE;
4115 type->t &= ~VT_STORAGE;
4116 post = ret = type;
4117 while (tok == '*') {
4118 qualifiers = 0;
4119 redo:
4120 next();
4121 switch(tok) {
4122 case TOK_CONST1:
4123 case TOK_CONST2:
4124 case TOK_CONST3:
4125 qualifiers |= VT_CONSTANT;
4126 goto redo;
4127 case TOK_VOLATILE1:
4128 case TOK_VOLATILE2:
4129 case TOK_VOLATILE3:
4130 qualifiers |= VT_VOLATILE;
4131 goto redo;
4132 case TOK_RESTRICT1:
4133 case TOK_RESTRICT2:
4134 case TOK_RESTRICT3:
4135 goto redo;
4136 /* XXX: clarify attribute handling */
4137 case TOK_ATTRIBUTE1:
4138 case TOK_ATTRIBUTE2:
4139 parse_attribute(ad);
4140 break;
4142 mk_pointer(type);
4143 type->t |= qualifiers;
4144 if (ret == type)
4145 /* innermost pointed to type is the one for the first derivation */
4146 ret = pointed_type(type);
4149 if (tok == '(') {
4150 /* This is possibly a parameter type list for abstract declarators
4151 ('int ()'), use post_type for testing this. */
4152 if (!post_type(type, ad, 0, td)) {
4153 /* It's not, so it's a nested declarator, and the post operations
4154 apply to the innermost pointed to type (if any). */
4155 /* XXX: this is not correct to modify 'ad' at this point, but
4156 the syntax is not clear */
4157 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4158 parse_attribute(ad);
4159 post = type_decl(type, ad, v, td);
4160 skip(')');
4162 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4163 /* type identifier */
4164 *v = tok;
4165 next();
4166 } else {
4167 if (!(td & TYPE_ABSTRACT))
4168 expect("identifier");
4169 *v = 0;
4171 post_type(post, ad, storage, 0);
4172 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4173 parse_attribute(ad);
4174 type->t |= storage;
4175 return ret;
4178 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4179 ST_FUNC int lvalue_type(int t)
4181 int bt, r;
4182 r = VT_LVAL;
4183 bt = t & VT_BTYPE;
4184 if (bt == VT_BYTE || bt == VT_BOOL)
4185 r |= VT_LVAL_BYTE;
4186 else if (bt == VT_SHORT)
4187 r |= VT_LVAL_SHORT;
4188 else
4189 return r;
4190 if (t & VT_UNSIGNED)
4191 r |= VT_LVAL_UNSIGNED;
4192 return r;
4195 /* indirection with full error checking and bound check */
4196 ST_FUNC void indir(void)
4198 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4199 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4200 return;
4201 expect("pointer");
4203 if (vtop->r & VT_LVAL)
4204 gv(RC_INT);
4205 vtop->type = *pointed_type(&vtop->type);
4206 /* Arrays and functions are never lvalues */
4207 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4208 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4209 vtop->r |= lvalue_type(vtop->type.t);
4210 /* if bound checking, the referenced pointer must be checked */
4211 #ifdef CONFIG_TCC_BCHECK
4212 if (tcc_state->do_bounds_check)
4213 vtop->r |= VT_MUSTBOUND;
4214 #endif
4218 /* pass a parameter to a function and do type checking and casting */
4219 static void gfunc_param_typed(Sym *func, Sym *arg)
4221 int func_type;
4222 CType type;
4224 func_type = func->c;
4225 if (func_type == FUNC_OLD ||
4226 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4227 /* default casting : only need to convert float to double */
4228 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4229 type.t = VT_DOUBLE;
4230 gen_cast(&type);
4231 } else if (vtop->type.t & VT_BITFIELD) {
4232 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4233 type.ref = vtop->type.ref;
4234 gen_cast(&type);
4236 } else if (arg == NULL) {
4237 tcc_error("too many arguments to function");
4238 } else {
4239 type = arg->type;
4240 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4241 gen_assign_cast(&type);
4245 /* parse an expression and return its type without any side effect.
4246 If UNRY we parse an unary expression, otherwise a full one. */
4247 static void expr_type(CType *type, int unry)
4249 nocode_wanted++;
4250 if (unry)
4251 unary();
4252 else
4253 gexpr();
4254 *type = vtop->type;
4255 vpop();
4256 nocode_wanted--;
4259 /* parse an expression of the form '(type)' or '(expr)' and return its
4260 type */
4261 static void parse_expr_type(CType *type)
4263 int n;
4264 AttributeDef ad;
4266 skip('(');
4267 if (parse_btype(type, &ad)) {
4268 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4269 } else {
4270 expr_type(type, 0);
4272 skip(')');
4275 static void parse_type(CType *type)
4277 AttributeDef ad;
4278 int n;
4280 if (!parse_btype(type, &ad)) {
4281 expect("type");
4283 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4286 static void parse_builtin_params(int nc, const char *args)
4288 char c, sep = '(';
4289 CType t;
4290 if (nc)
4291 nocode_wanted++;
4292 next();
4293 while ((c = *args++)) {
4294 skip(sep);
4295 sep = ',';
4296 switch (c) {
4297 case 'e': expr_eq(); continue;
4298 case 't': parse_type(&t); vpush(&t); continue;
4299 default: tcc_error("internal error"); break;
4302 skip(')');
4303 if (nc)
4304 nocode_wanted--;
4307 ST_FUNC void unary(void)
4309 int n, t, align, size, r, sizeof_caller;
4310 CType type;
4311 Sym *s;
4312 AttributeDef ad;
4314 sizeof_caller = in_sizeof;
4315 in_sizeof = 0;
4316 /* XXX: GCC 2.95.3 does not generate a table although it should be
4317 better here */
4318 tok_next:
4319 switch(tok) {
4320 case TOK_EXTENSION:
4321 next();
4322 goto tok_next;
4323 case TOK_CINT:
4324 case TOK_CCHAR:
4325 case TOK_LCHAR:
4326 t = VT_INT;
4327 push_tokc:
4328 type.t = t;
4329 type.ref = 0;
4330 vsetc(&type, VT_CONST, &tokc);
4331 next();
4332 break;
4333 case TOK_CUINT:
4334 t = VT_INT | VT_UNSIGNED;
4335 goto push_tokc;
4336 case TOK_CLLONG:
4337 t = VT_LLONG;
4338 goto push_tokc;
4339 case TOK_CULLONG:
4340 t = VT_LLONG | VT_UNSIGNED;
4341 goto push_tokc;
4342 case TOK_CFLOAT:
4343 t = VT_FLOAT;
4344 goto push_tokc;
4345 case TOK_CDOUBLE:
4346 t = VT_DOUBLE;
4347 goto push_tokc;
4348 case TOK_CLDOUBLE:
4349 t = VT_LDOUBLE;
4350 goto push_tokc;
4352 case TOK___FUNCTION__:
4353 if (!gnu_ext)
4354 goto tok_identifier;
4355 /* fall thru */
4356 case TOK___FUNC__:
4358 void *ptr;
4359 int len;
4360 /* special function name identifier */
4361 len = strlen(funcname) + 1;
4362 /* generate char[len] type */
4363 type.t = VT_BYTE;
4364 mk_pointer(&type);
4365 type.t |= VT_ARRAY;
4366 type.ref->c = len;
4367 vpush_ref(&type, data_section, data_section->data_offset, len);
4368 ptr = section_ptr_add(data_section, len);
4369 memcpy(ptr, funcname, len);
4370 next();
4372 break;
4373 case TOK_LSTR:
4374 #ifdef TCC_TARGET_PE
4375 t = VT_SHORT | VT_UNSIGNED;
4376 #else
4377 t = VT_INT;
4378 #endif
4379 goto str_init;
4380 case TOK_STR:
4381 /* string parsing */
4382 t = VT_BYTE;
4383 str_init:
4384 if (tcc_state->warn_write_strings)
4385 t |= VT_CONSTANT;
4386 type.t = t;
4387 mk_pointer(&type);
4388 type.t |= VT_ARRAY;
4389 memset(&ad, 0, sizeof(AttributeDef));
4390 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4391 break;
4392 case '(':
4393 next();
4394 /* cast ? */
4395 if (parse_btype(&type, &ad)) {
4396 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4397 skip(')');
4398 /* check ISOC99 compound literal */
4399 if (tok == '{') {
4400 /* data is allocated locally by default */
4401 if (global_expr)
4402 r = VT_CONST;
4403 else
4404 r = VT_LOCAL;
4405 /* all except arrays are lvalues */
4406 if (!(type.t & VT_ARRAY))
4407 r |= lvalue_type(type.t);
4408 memset(&ad, 0, sizeof(AttributeDef));
4409 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4410 } else {
4411 if (sizeof_caller) {
4412 vpush(&type);
4413 return;
4415 unary();
4416 gen_cast(&type);
4418 } else if (tok == '{') {
4419 int saved_nocode_wanted = nocode_wanted;
4420 if (const_wanted)
4421 tcc_error("expected constant");
4422 /* save all registers */
4423 save_regs(0);
4424 /* statement expression : we do not accept break/continue
4425 inside as GCC does. We do retain the nocode_wanted state,
4426 as statement expressions can't ever be entered from the
4427 outside, so any reactivation of code emission (from labels
4428 or loop heads) can be disabled again after the end of it. */
4429 block(NULL, NULL, 1);
4430 nocode_wanted = saved_nocode_wanted;
4431 skip(')');
4432 } else {
4433 gexpr();
4434 skip(')');
4436 break;
4437 case '*':
4438 next();
4439 unary();
4440 indir();
4441 break;
4442 case '&':
4443 next();
4444 unary();
4445 /* functions names must be treated as function pointers,
4446 except for unary '&' and sizeof. Since we consider that
4447 functions are not lvalues, we only have to handle it
4448 there and in function calls. */
4449 /* arrays can also be used although they are not lvalues */
4450 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4451 !(vtop->type.t & VT_ARRAY))
4452 test_lvalue();
4453 mk_pointer(&vtop->type);
4454 gaddrof();
4455 break;
4456 case '!':
4457 next();
4458 unary();
4459 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4460 CType boolean;
4461 boolean.t = VT_BOOL;
4462 gen_cast(&boolean);
4463 vtop->c.i = !vtop->c.i;
4464 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4465 vtop->c.i ^= 1;
4466 else {
4467 save_regs(1);
4468 vseti(VT_JMP, gvtst(1, 0));
4470 break;
4471 case '~':
4472 next();
4473 unary();
4474 vpushi(-1);
4475 gen_op('^');
4476 break;
4477 case '+':
4478 next();
4479 unary();
4480 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4481 tcc_error("pointer not accepted for unary plus");
4482 /* In order to force cast, we add zero, except for floating point
4483 where we really need an noop (otherwise -0.0 will be transformed
4484 into +0.0). */
4485 if (!is_float(vtop->type.t)) {
4486 vpushi(0);
4487 gen_op('+');
4489 break;
4490 case TOK_SIZEOF:
4491 case TOK_ALIGNOF1:
4492 case TOK_ALIGNOF2:
4493 t = tok;
4494 next();
4495 in_sizeof++;
4496 expr_type(&type, 1); // Perform a in_sizeof = 0;
4497 size = type_size(&type, &align);
4498 if (t == TOK_SIZEOF) {
4499 if (!(type.t & VT_VLA)) {
4500 if (size < 0)
4501 tcc_error("sizeof applied to an incomplete type");
4502 vpushs(size);
4503 } else {
4504 vla_runtime_type_size(&type, &align);
4506 } else {
4507 vpushs(align);
4509 vtop->type.t |= VT_UNSIGNED;
4510 break;
4512 case TOK_builtin_expect:
4513 /* __builtin_expect is a no-op for now */
4514 parse_builtin_params(0, "ee");
4515 vpop();
4516 break;
4517 case TOK_builtin_types_compatible_p:
4518 parse_builtin_params(0, "tt");
4519 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4520 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4521 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4522 vtop -= 2;
4523 vpushi(n);
4524 break;
4525 case TOK_builtin_choose_expr:
4527 int64_t c;
4528 next();
4529 skip('(');
4530 c = expr_const64();
4531 skip(',');
4532 if (!c) {
4533 nocode_wanted++;
4535 expr_eq();
4536 if (!c) {
4537 vpop();
4538 nocode_wanted--;
4540 skip(',');
4541 if (c) {
4542 nocode_wanted++;
4544 expr_eq();
4545 if (c) {
4546 vpop();
4547 nocode_wanted--;
4549 skip(')');
4551 break;
4552 case TOK_builtin_constant_p:
4553 parse_builtin_params(1, "e");
4554 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4555 vtop--;
4556 vpushi(n);
4557 break;
4558 case TOK_builtin_frame_address:
4559 case TOK_builtin_return_address:
4561 int tok1 = tok;
4562 int level;
4563 CType type;
4564 next();
4565 skip('(');
4566 if (tok != TOK_CINT) {
4567 tcc_error("%s only takes positive integers",
4568 tok1 == TOK_builtin_return_address ?
4569 "__builtin_return_address" :
4570 "__builtin_frame_address");
4572 level = (uint32_t)tokc.i;
4573 next();
4574 skip(')');
4575 type.t = VT_VOID;
4576 mk_pointer(&type);
4577 vset(&type, VT_LOCAL, 0); /* local frame */
4578 while (level--) {
4579 mk_pointer(&vtop->type);
4580 indir(); /* -> parent frame */
4582 if (tok1 == TOK_builtin_return_address) {
4583 // assume return address is just above frame pointer on stack
4584 vpushi(PTR_SIZE);
4585 gen_op('+');
4586 mk_pointer(&vtop->type);
4587 indir();
4590 break;
4591 #ifdef TCC_TARGET_X86_64
4592 #ifdef TCC_TARGET_PE
4593 case TOK_builtin_va_start:
4594 parse_builtin_params(0, "ee");
4595 r = vtop->r & VT_VALMASK;
4596 if (r == VT_LLOCAL)
4597 r = VT_LOCAL;
4598 if (r != VT_LOCAL)
4599 tcc_error("__builtin_va_start expects a local variable");
4600 vtop->r = r;
4601 vtop->type = char_pointer_type;
4602 vtop->c.i += 8;
4603 vstore();
4604 break;
4605 #else
4606 case TOK_builtin_va_arg_types:
4607 parse_builtin_params(0, "t");
4608 vpushi(classify_x86_64_va_arg(&vtop->type));
4609 vswap();
4610 vpop();
4611 break;
4612 #endif
4613 #endif
4615 #ifdef TCC_TARGET_ARM64
4616 case TOK___va_start: {
4617 parse_builtin_params(0, "ee");
4618 //xx check types
4619 gen_va_start();
4620 vpushi(0);
4621 vtop->type.t = VT_VOID;
4622 break;
4624 case TOK___va_arg: {
4625 CType type;
4626 parse_builtin_params(0, "et");
4627 type = vtop->type;
4628 vpop();
4629 //xx check types
4630 gen_va_arg(&type);
4631 vtop->type = type;
4632 break;
4634 case TOK___arm64_clear_cache: {
4635 parse_builtin_params(0, "ee");
4636 gen_clear_cache();
4637 vpushi(0);
4638 vtop->type.t = VT_VOID;
4639 break;
4641 #endif
4642 /* pre operations */
4643 case TOK_INC:
4644 case TOK_DEC:
4645 t = tok;
4646 next();
4647 unary();
4648 inc(0, t);
4649 break;
4650 case '-':
4651 next();
4652 unary();
4653 t = vtop->type.t & VT_BTYPE;
4654 if (is_float(t)) {
4655 /* In IEEE negate(x) isn't subtract(0,x), but rather
4656 subtract(-0, x). */
4657 vpush(&vtop->type);
4658 if (t == VT_FLOAT)
4659 vtop->c.f = -1.0 * 0.0;
4660 else if (t == VT_DOUBLE)
4661 vtop->c.d = -1.0 * 0.0;
4662 else
4663 vtop->c.ld = -1.0 * 0.0;
4664 } else
4665 vpushi(0);
4666 vswap();
4667 gen_op('-');
4668 break;
4669 case TOK_LAND:
4670 if (!gnu_ext)
4671 goto tok_identifier;
4672 next();
4673 /* allow to take the address of a label */
4674 if (tok < TOK_UIDENT)
4675 expect("label identifier");
4676 s = label_find(tok);
4677 if (!s) {
4678 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4679 } else {
4680 if (s->r == LABEL_DECLARED)
4681 s->r = LABEL_FORWARD;
4683 if (!s->type.t) {
4684 s->type.t = VT_VOID;
4685 mk_pointer(&s->type);
4686 s->type.t |= VT_STATIC;
4688 vpushsym(&s->type, s);
4689 next();
4690 break;
4692 // special qnan , snan and infinity values
4693 case TOK___NAN__: