Remove a bit-field TODO
[tinycc.git] / tccgen.c
blob4252c57d7f54217dfa5c632d788905d3a48842cc
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC void tcc_debug_start(TCCState *s1)
146 if (s1->do_debug) {
147 char buf[512];
149 /* file info: full path + filename */
150 section_sym = put_elf_sym(symtab_section, 0, 0,
151 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
152 text_section->sh_num, NULL);
153 getcwd(buf, sizeof(buf));
154 #ifdef _WIN32
155 normalize_slashes(buf);
156 #endif
157 pstrcat(buf, sizeof(buf), "/");
158 put_stabs_r(buf, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 put_stabs_r(file->filename, N_SO, 0, 0,
161 text_section->data_offset, text_section, section_sym);
162 last_ind = 0;
163 last_line_num = 0;
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
170 SHN_ABS, file->filename);
173 /* put end of translation unit info */
174 ST_FUNC void tcc_debug_end(TCCState *s1)
176 if (!s1->do_debug)
177 return;
178 put_stabs_r(NULL, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
183 /* generate line number info */
184 ST_FUNC void tcc_debug_line(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 if ((last_line_num != file->line_num || last_ind != ind)) {
189 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
190 last_ind = ind;
191 last_line_num = file->line_num;
195 /* put function symbol */
196 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
198 char buf[512];
200 if (!s1->do_debug)
201 return;
203 /* stabs info */
204 /* XXX: we put here a dummy type */
205 snprintf(buf, sizeof(buf), "%s:%c1",
206 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
207 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
208 cur_text_section, sym->c);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE, 0, file->line_num, 0);
212 last_ind = 0;
213 last_line_num = 0;
216 /* put function size */
217 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
219 if (!s1->do_debug)
220 return;
221 put_stabn(N_FUN, 0, 0, size);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC void tccgen_start(TCCState *s1)
227 cur_text_section = NULL;
228 funcname = "";
229 anon_sym = SYM_FIRST_ANOM;
230 section_sym = 0;
231 const_wanted = 0;
232 nocode_wanted = 1;
234 /* define some often used types */
235 int_type.t = VT_INT;
236 char_pointer_type.t = VT_BYTE;
237 mk_pointer(&char_pointer_type);
238 #if PTR_SIZE == 4
239 size_type.t = VT_INT;
240 #else
241 size_type.t = VT_LLONG;
242 #endif
243 func_old_type.t = VT_FUNC;
244 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
246 tcc_debug_start(s1);
248 #ifdef TCC_TARGET_ARM
249 arm_init(s1);
250 #endif
253 ST_FUNC void tccgen_end(TCCState *s1)
255 gen_inline_functions(s1);
256 check_vstack();
257 /* end of translation unit info */
258 tcc_debug_end(s1);
261 /* ------------------------------------------------------------------------- */
262 /* apply storage attibutes to Elf symbol */
264 static void update_storage(Sym *sym)
266 int t;
267 ElfW(Sym) *esym;
269 if (0 == sym->c)
270 return;
272 t = sym->type.t;
273 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
275 if (t & VT_VIS_MASK)
276 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
277 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
279 if (t & VT_WEAK)
280 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
282 #ifdef TCC_TARGET_PE
283 if (t & VT_EXPORT)
284 esym->st_other |= ST_PE_EXPORT;
285 #endif
288 /* ------------------------------------------------------------------------- */
289 /* update sym->c so that it points to an external symbol in section
290 'section' with value 'value' */
292 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
293 addr_t value, unsigned long size,
294 int can_add_underscore)
296 int sym_type, sym_bind, sh_num, info, other, t;
297 ElfW(Sym) *esym;
298 const char *name;
299 char buf1[256];
300 #ifdef CONFIG_TCC_BCHECK
301 char buf[32];
302 #endif
304 if (section == NULL)
305 sh_num = SHN_UNDEF;
306 else if (section == SECTION_ABS)
307 sh_num = SHN_ABS;
308 else if (section == SECTION_COMMON)
309 sh_num = SHN_COMMON;
310 else
311 sh_num = section->sh_num;
313 if (!sym->c) {
314 name = get_tok_str(sym->v, NULL);
315 #ifdef CONFIG_TCC_BCHECK
316 if (tcc_state->do_bounds_check) {
317 /* XXX: avoid doing that for statics ? */
318 /* if bound checking is activated, we change some function
319 names by adding the "__bound" prefix */
320 switch(sym->v) {
321 #ifdef TCC_TARGET_PE
322 /* XXX: we rely only on malloc hooks */
323 case TOK_malloc:
324 case TOK_free:
325 case TOK_realloc:
326 case TOK_memalign:
327 case TOK_calloc:
328 #endif
329 case TOK_memcpy:
330 case TOK_memmove:
331 case TOK_memset:
332 case TOK_strlen:
333 case TOK_strcpy:
334 case TOK_alloca:
335 strcpy(buf, "__bound_");
336 strcat(buf, name);
337 name = buf;
338 break;
341 #endif
342 t = sym->type.t;
343 if ((t & VT_BTYPE) == VT_FUNC) {
344 sym_type = STT_FUNC;
345 } else if ((t & VT_BTYPE) == VT_VOID) {
346 sym_type = STT_NOTYPE;
347 } else {
348 sym_type = STT_OBJECT;
350 if (t & VT_STATIC)
351 sym_bind = STB_LOCAL;
352 else
353 sym_bind = STB_GLOBAL;
354 other = 0;
355 #ifdef TCC_TARGET_PE
356 if (sym_type == STT_FUNC && sym->type.ref) {
357 Sym *ref = sym->type.ref;
358 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
359 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
360 name = buf1;
361 other |= ST_PE_STDCALL;
362 can_add_underscore = 0;
365 if (t & VT_IMPORT)
366 other |= ST_PE_IMPORT;
367 #endif
368 if (tcc_state->leading_underscore && can_add_underscore) {
369 buf1[0] = '_';
370 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
371 name = buf1;
373 if (sym->asm_label)
374 name = get_tok_str(sym->asm_label, NULL);
375 info = ELFW(ST_INFO)(sym_bind, sym_type);
376 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
377 } else {
378 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
379 esym->st_value = value;
380 esym->st_size = size;
381 esym->st_shndx = sh_num;
383 update_storage(sym);
386 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
387 addr_t value, unsigned long size)
389 put_extern_sym2(sym, section, value, size, 1);
392 /* add a new relocation entry to symbol 'sym' in section 's' */
393 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
394 addr_t addend)
396 int c = 0;
398 if (nocode_wanted && s == cur_text_section)
399 return;
401 if (sym) {
402 if (0 == sym->c)
403 put_extern_sym(sym, NULL, 0, 0);
404 c = sym->c;
407 /* now we can add ELF relocation info */
408 put_elf_reloca(symtab_section, s, offset, type, c, addend);
411 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
413 greloca(s, sym, offset, type, 0);
416 /* ------------------------------------------------------------------------- */
417 /* symbol allocator */
418 static Sym *__sym_malloc(void)
420 Sym *sym_pool, *sym, *last_sym;
421 int i;
423 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
424 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
426 last_sym = sym_free_first;
427 sym = sym_pool;
428 for(i = 0; i < SYM_POOL_NB; i++) {
429 sym->next = last_sym;
430 last_sym = sym;
431 sym++;
433 sym_free_first = last_sym;
434 return last_sym;
437 static inline Sym *sym_malloc(void)
439 Sym *sym;
440 #ifndef SYM_DEBUG
441 sym = sym_free_first;
442 if (!sym)
443 sym = __sym_malloc();
444 sym_free_first = sym->next;
445 return sym;
446 #else
447 sym = tcc_malloc(sizeof(Sym));
448 return sym;
449 #endif
452 ST_INLN void sym_free(Sym *sym)
454 #ifndef SYM_DEBUG
455 sym->next = sym_free_first;
456 sym_free_first = sym;
457 #else
458 tcc_free(sym);
459 #endif
462 /* push, without hashing */
463 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
465 Sym *s;
467 s = sym_malloc();
468 s->scope = 0;
469 s->v = v;
470 s->type.t = t;
471 s->type.ref = NULL;
472 #ifdef _WIN64
473 s->d = NULL;
474 #endif
475 s->c = c;
476 s->next = NULL;
477 /* add in stack */
478 s->prev = *ps;
479 *ps = s;
480 return s;
483 /* find a symbol and return its associated structure. 's' is the top
484 of the symbol stack */
485 ST_FUNC Sym *sym_find2(Sym *s, int v)
487 while (s) {
488 if (s->v == v)
489 return s;
490 else if (s->v == -1)
491 return NULL;
492 s = s->prev;
494 return NULL;
497 /* structure lookup */
498 ST_INLN Sym *struct_find(int v)
500 v -= TOK_IDENT;
501 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
502 return NULL;
503 return table_ident[v]->sym_struct;
506 /* find an identifier */
507 ST_INLN Sym *sym_find(int v)
509 v -= TOK_IDENT;
510 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
511 return NULL;
512 return table_ident[v]->sym_identifier;
515 /* push a given symbol on the symbol stack */
516 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
518 Sym *s, **ps;
519 TokenSym *ts;
521 if (local_stack)
522 ps = &local_stack;
523 else
524 ps = &global_stack;
525 s = sym_push2(ps, v, type->t, c);
526 s->type.ref = type->ref;
527 s->r = r;
528 /* don't record fields or anonymous symbols */
529 /* XXX: simplify */
530 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
531 /* record symbol in token array */
532 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
533 if (v & SYM_STRUCT)
534 ps = &ts->sym_struct;
535 else
536 ps = &ts->sym_identifier;
537 s->prev_tok = *ps;
538 *ps = s;
539 s->scope = local_scope;
540 if (s->prev_tok && s->prev_tok->scope == s->scope)
541 tcc_error("redeclaration of '%s'",
542 get_tok_str(v & ~SYM_STRUCT, NULL));
544 return s;
547 /* push a global identifier */
548 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
550 Sym *s, **ps;
551 s = sym_push2(&global_stack, v, t, c);
552 /* don't record anonymous symbol */
553 if (v < SYM_FIRST_ANOM) {
554 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
555 /* modify the top most local identifier, so that
556 sym_identifier will point to 's' when popped */
557 while (*ps != NULL)
558 ps = &(*ps)->prev_tok;
559 s->prev_tok = NULL;
560 *ps = s;
562 return s;
565 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
566 pop them yet from the list, but do remove them from the token array. */
567 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
569 Sym *s, *ss, **ps;
570 TokenSym *ts;
571 int v;
573 s = *ptop;
574 while(s != b) {
575 ss = s->prev;
576 v = s->v;
577 /* remove symbol in token array */
578 /* XXX: simplify */
579 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
580 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
581 if (v & SYM_STRUCT)
582 ps = &ts->sym_struct;
583 else
584 ps = &ts->sym_identifier;
585 *ps = s->prev_tok;
587 if (!keep)
588 sym_free(s);
589 s = ss;
591 if (!keep)
592 *ptop = b;
595 /* ------------------------------------------------------------------------- */
597 static void vsetc(CType *type, int r, CValue *vc)
599 int v;
601 if (vtop >= vstack + (VSTACK_SIZE - 1))
602 tcc_error("memory full (vstack)");
603 /* cannot let cpu flags if other instruction are generated. Also
604 avoid leaving VT_JMP anywhere except on the top of the stack
605 because it would complicate the code generator.
607 Don't do this when nocode_wanted. vtop might come from
608 !nocode_wanted regions (see 88_codeopt.c) and transforming
609 it to a register without actually generating code is wrong
610 as their value might still be used for real. All values
611 we push under nocode_wanted will eventually be popped
612 again, so that the VT_CMP/VT_JMP value will be in vtop
613 when code is unsuppressed again.
615 Same logic below in vswap(); */
616 if (vtop >= vstack && !nocode_wanted) {
617 v = vtop->r & VT_VALMASK;
618 if (v == VT_CMP || (v & ~1) == VT_JMP)
619 gv(RC_INT);
622 vtop++;
623 vtop->type = *type;
624 vtop->r = r;
625 vtop->r2 = VT_CONST;
626 vtop->c = *vc;
627 vtop->sym = NULL;
630 ST_FUNC void vswap(void)
632 SValue tmp;
633 /* cannot vswap cpu flags. See comment at vsetc() above */
634 if (vtop >= vstack && !nocode_wanted) {
635 int v = vtop->r & VT_VALMASK;
636 if (v == VT_CMP || (v & ~1) == VT_JMP)
637 gv(RC_INT);
639 tmp = vtop[0];
640 vtop[0] = vtop[-1];
641 vtop[-1] = tmp;
644 /* pop stack value */
645 ST_FUNC void vpop(void)
647 int v;
648 v = vtop->r & VT_VALMASK;
649 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
650 /* for x86, we need to pop the FP stack */
651 if (v == TREG_ST0) {
652 o(0xd8dd); /* fstp %st(0) */
653 } else
654 #endif
655 if (v == VT_JMP || v == VT_JMPI) {
656 /* need to put correct jump if && or || without test */
657 gsym(vtop->c.i);
659 vtop--;
662 /* push constant of type "type" with useless value */
663 ST_FUNC void vpush(CType *type)
665 CValue cval;
666 vsetc(type, VT_CONST, &cval);
669 /* push integer constant */
670 ST_FUNC void vpushi(int v)
672 CValue cval;
673 cval.i = v;
674 vsetc(&int_type, VT_CONST, &cval);
677 /* push a pointer sized constant */
678 static void vpushs(addr_t v)
680 CValue cval;
681 cval.i = v;
682 vsetc(&size_type, VT_CONST, &cval);
685 /* push arbitrary 64bit constant */
686 ST_FUNC void vpush64(int ty, unsigned long long v)
688 CValue cval;
689 CType ctype;
690 ctype.t = ty;
691 ctype.ref = NULL;
692 cval.i = v;
693 vsetc(&ctype, VT_CONST, &cval);
696 /* push long long constant */
697 static inline void vpushll(long long v)
699 vpush64(VT_LLONG, v);
702 ST_FUNC void vset(CType *type, int r, long v)
704 CValue cval;
706 cval.i = v;
707 vsetc(type, r, &cval);
710 static void vseti(int r, int v)
712 CType type;
713 type.t = VT_INT;
714 type.ref = 0;
715 vset(&type, r, v);
718 ST_FUNC void vpushv(SValue *v)
720 if (vtop >= vstack + (VSTACK_SIZE - 1))
721 tcc_error("memory full (vstack)");
722 vtop++;
723 *vtop = *v;
726 static void vdup(void)
728 vpushv(vtop);
731 /* rotate n first stack elements to the bottom
732 I1 ... In -> I2 ... In I1 [top is right]
734 ST_FUNC void vrotb(int n)
736 int i;
737 SValue tmp;
739 tmp = vtop[-n + 1];
740 for(i=-n+1;i!=0;i++)
741 vtop[i] = vtop[i+1];
742 vtop[0] = tmp;
745 /* rotate the n elements before entry e towards the top
746 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
748 ST_FUNC void vrote(SValue *e, int n)
750 int i;
751 SValue tmp;
753 tmp = *e;
754 for(i = 0;i < n - 1; i++)
755 e[-i] = e[-i - 1];
756 e[-n + 1] = tmp;
759 /* rotate n first stack elements to the top
760 I1 ... In -> In I1 ... I(n-1) [top is right]
762 ST_FUNC void vrott(int n)
764 vrote(vtop, n);
767 /* push a symbol value of TYPE */
768 static inline void vpushsym(CType *type, Sym *sym)
770 CValue cval;
771 cval.i = 0;
772 vsetc(type, VT_CONST | VT_SYM, &cval);
773 vtop->sym = sym;
776 /* Return a static symbol pointing to a section */
777 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
779 int v;
780 Sym *sym;
782 v = anon_sym++;
783 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
784 sym->type.ref = type->ref;
785 sym->r = VT_CONST | VT_SYM;
786 put_extern_sym(sym, sec, offset, size);
787 return sym;
790 /* push a reference to a section offset by adding a dummy symbol */
791 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
793 vpushsym(type, get_sym_ref(type, sec, offset, size));
796 /* define a new external reference to a symbol 'v' of type 'u' */
797 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
799 Sym *s;
801 s = sym_find(v);
802 if (!s) {
803 /* push forward reference */
804 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
805 s->type.ref = type->ref;
806 s->r = r | VT_CONST | VT_SYM;
808 return s;
811 /* Merge some storage attributes. */
812 static void patch_storage(Sym *sym, CType *type)
814 int t;
815 if (!is_compatible_types(&sym->type, type))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym->v, NULL));
818 t = type->t;
819 #ifdef TCC_TARGET_PE
820 if ((sym->type.t ^ t) & VT_IMPORT)
821 tcc_error("incompatible dll linkage for redefinition of '%s'",
822 get_tok_str(sym->v, NULL));
823 #endif
824 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
825 if (t & VT_VIS_MASK) {
826 int vis = sym->type.t & VT_VIS_MASK;
827 int vis2 = t & VT_VIS_MASK;
828 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
829 vis = vis2;
830 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
831 vis = (vis < vis2) ? vis : vis2;
832 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
836 /* define a new external reference to a symbol 'v' */
837 static Sym *external_sym(int v, CType *type, int r)
839 Sym *s;
840 s = sym_find(v);
841 if (!s) {
842 /* push forward reference */
843 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
844 s->type.t |= VT_EXTERN;
845 } else {
846 if (s->type.ref == func_old_type.ref) {
847 s->type.ref = type->ref;
848 s->r = r | VT_CONST | VT_SYM;
849 s->type.t |= VT_EXTERN;
851 patch_storage(s, type);
852 update_storage(s);
854 return s;
857 /* push a reference to global symbol v */
858 ST_FUNC void vpush_global_sym(CType *type, int v)
860 vpushsym(type, external_global_sym(v, type, 0));
863 /* save registers up to (vtop - n) stack entry */
864 ST_FUNC void save_regs(int n)
866 SValue *p, *p1;
867 for(p = vstack, p1 = vtop - n; p <= p1; p++)
868 save_reg(p->r);
871 /* save r to the memory stack, and mark it as being free */
872 ST_FUNC void save_reg(int r)
874 save_reg_upstack(r, 0);
877 /* save r to the memory stack, and mark it as being free,
878 if seen up to (vtop - n) stack entry */
879 ST_FUNC void save_reg_upstack(int r, int n)
881 int l, saved, size, align;
882 SValue *p, *p1, sv;
883 CType *type;
885 if ((r &= VT_VALMASK) >= VT_CONST)
886 return;
887 if (nocode_wanted)
888 return;
890 /* modify all stack values */
891 saved = 0;
892 l = 0;
893 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
894 if ((p->r & VT_VALMASK) == r ||
895 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
896 /* must save value on stack if not already done */
897 if (!saved) {
898 /* NOTE: must reload 'r' because r might be equal to r2 */
899 r = p->r & VT_VALMASK;
900 /* store register in the stack */
901 type = &p->type;
902 if ((p->r & VT_LVAL) ||
903 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
904 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
905 type = &char_pointer_type;
906 #else
907 type = &int_type;
908 #endif
909 size = type_size(type, &align);
910 loc = (loc - size) & -align;
911 sv.type.t = type->t;
912 sv.r = VT_LOCAL | VT_LVAL;
913 sv.c.i = loc;
914 store(r, &sv);
915 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
916 /* x86 specific: need to pop fp register ST0 if saved */
917 if (r == TREG_ST0) {
918 o(0xd8dd); /* fstp %st(0) */
920 #endif
921 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
922 /* special long long case */
923 if ((type->t & VT_BTYPE) == VT_LLONG) {
924 sv.c.i += 4;
925 store(p->r2, &sv);
927 #endif
928 l = loc;
929 saved = 1;
931 /* mark that stack entry as being saved on the stack */
932 if (p->r & VT_LVAL) {
933 /* also clear the bounded flag because the
934 relocation address of the function was stored in
935 p->c.i */
936 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
937 } else {
938 p->r = lvalue_type(p->type.t) | VT_LOCAL;
940 p->r2 = VT_CONST;
941 p->c.i = l;
946 #ifdef TCC_TARGET_ARM
947 /* find a register of class 'rc2' with at most one reference on stack.
948 * If none, call get_reg(rc) */
949 ST_FUNC int get_reg_ex(int rc, int rc2)
951 int r;
952 SValue *p;
954 for(r=0;r<NB_REGS;r++) {
955 if (reg_classes[r] & rc2) {
956 int n;
957 n=0;
958 for(p = vstack; p <= vtop; p++) {
959 if ((p->r & VT_VALMASK) == r ||
960 (p->r2 & VT_VALMASK) == r)
961 n++;
963 if (n <= 1)
964 return r;
967 return get_reg(rc);
969 #endif
971 /* find a free register of class 'rc'. If none, save one register */
972 ST_FUNC int get_reg(int rc)
974 int r;
975 SValue *p;
977 /* find a free register */
978 for(r=0;r<NB_REGS;r++) {
979 if (reg_classes[r] & rc) {
980 if (nocode_wanted)
981 return r;
982 for(p=vstack;p<=vtop;p++) {
983 if ((p->r & VT_VALMASK) == r ||
984 (p->r2 & VT_VALMASK) == r)
985 goto notfound;
987 return r;
989 notfound: ;
992 /* no register left : free the first one on the stack (VERY
993 IMPORTANT to start from the bottom to ensure that we don't
994 spill registers used in gen_opi()) */
995 for(p=vstack;p<=vtop;p++) {
996 /* look at second register (if long long) */
997 r = p->r2 & VT_VALMASK;
998 if (r < VT_CONST && (reg_classes[r] & rc))
999 goto save_found;
1000 r = p->r & VT_VALMASK;
1001 if (r < VT_CONST && (reg_classes[r] & rc)) {
1002 save_found:
1003 save_reg(r);
1004 return r;
1007 /* Should never comes here */
1008 return -1;
1011 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1012 if needed */
1013 static void move_reg(int r, int s, int t)
1015 SValue sv;
1017 if (r != s) {
1018 save_reg(r);
1019 sv.type.t = t;
1020 sv.type.ref = NULL;
1021 sv.r = s;
1022 sv.c.i = 0;
1023 load(r, &sv);
1027 /* get address of vtop (vtop MUST BE an lvalue) */
1028 ST_FUNC void gaddrof(void)
1030 if (vtop->r & VT_REF)
1031 gv(RC_INT);
1032 vtop->r &= ~VT_LVAL;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1035 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1044 int lval_type;
1045 CType type1;
1047 vtop->r &= ~VT_MUSTBOUND;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop->r & VT_LVAL) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop->r & VT_BOUNDED)) {
1052 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1053 /* must save type because we must set it to int to get pointer */
1054 type1 = vtop->type;
1055 vtop->type.t = VT_PTR;
1056 gaddrof();
1057 vpushi(0);
1058 gen_bounded_ptr_add();
1059 vtop->r |= lval_type;
1060 vtop->type = type1;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1066 #endif
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC int gv(int rc)
1073 int r, bit_pos, bit_size, size, align, i;
1074 int rc2;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop->type.t & VT_BITFIELD) {
1078 CType type;
1079 int bits = 32;
1080 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1081 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1086 type.t = VT_LLONG;
1087 bits = 64;
1088 } else
1089 type.t = VT_INT;
1090 if((vtop->type.t & VT_UNSIGNED) ||
1091 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1092 type.t |= VT_UNSIGNED;
1093 gen_cast(&type);
1094 /* generate shifts */
1095 vpushi(bits - (bit_pos + bit_size));
1096 gen_op(TOK_SHL);
1097 vpushi(bits - bit_size);
1098 /* NOTE: transformed to SHR if unsigned */
1099 gen_op(TOK_SAR);
1100 r = gv(rc);
1101 } else {
1102 if (is_float(vtop->type.t) &&
1103 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1104 Sym *sym;
1105 int *ptr;
1106 unsigned long offset;
1107 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1108 CValue check;
1109 #endif
1111 /* XXX: unify with initializers handling ? */
1112 /* CPUs usually cannot use float constants, so we store them
1113 generically in data segment */
1114 size = type_size(&vtop->type, &align);
1115 offset = (data_section->data_offset + align - 1) & -align;
1116 data_section->data_offset = offset;
1117 /* XXX: not portable yet */
1118 #if defined(__i386__) || defined(__x86_64__)
1119 /* Zero pad x87 tenbyte long doubles */
1120 if (size == LDOUBLE_SIZE) {
1121 vtop->c.tab[2] &= 0xffff;
1122 #if LDOUBLE_SIZE == 16
1123 vtop->c.tab[3] = 0;
1124 #endif
1126 #endif
1127 ptr = section_ptr_add(data_section, size);
1128 size = size >> 2;
1129 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1130 check.d = 1;
1131 if(check.tab[0])
1132 for(i=0;i<size;i++)
1133 ptr[i] = vtop->c.tab[size-1-i];
1134 else
1135 #endif
1136 for(i=0;i<size;i++)
1137 ptr[i] = vtop->c.tab[i];
1138 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1139 vtop->r |= VT_LVAL | VT_SYM;
1140 vtop->sym = sym;
1141 vtop->c.i = 0;
1143 #ifdef CONFIG_TCC_BCHECK
1144 if (vtop->r & VT_MUSTBOUND)
1145 gbound();
1146 #endif
1148 r = vtop->r & VT_VALMASK;
1149 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1150 #ifndef TCC_TARGET_ARM64
1151 if (rc == RC_IRET)
1152 rc2 = RC_LRET;
1153 #ifdef TCC_TARGET_X86_64
1154 else if (rc == RC_FRET)
1155 rc2 = RC_QRET;
1156 #endif
1157 #endif
1158 /* need to reload if:
1159 - constant
1160 - lvalue (need to dereference pointer)
1161 - already a register, but not in the right class */
1162 if (r >= VT_CONST
1163 || (vtop->r & VT_LVAL)
1164 || !(reg_classes[r] & rc)
1165 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1166 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1167 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1168 #else
1169 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1170 #endif
1173 r = get_reg(rc);
1174 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1175 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1176 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1177 #else
1178 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1179 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1180 unsigned long long ll;
1181 #endif
1182 int r2, original_type;
1183 original_type = vtop->type.t;
1184 /* two register type load : expand to two words
1185 temporarily */
1186 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1187 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1188 /* load constant */
1189 ll = vtop->c.i;
1190 vtop->c.i = ll; /* first word */
1191 load(r, vtop);
1192 vtop->r = r; /* save register value */
1193 vpushi(ll >> 32); /* second word */
1194 } else
1195 #endif
1196 if (vtop->r & VT_LVAL) {
1197 /* We do not want to modifier the long long
1198 pointer here, so the safest (and less
1199 efficient) is to save all the other registers
1200 in the stack. XXX: totally inefficient. */
1201 #if 0
1202 save_regs(1);
1203 #else
1204 /* lvalue_save: save only if used further down the stack */
1205 save_reg_upstack(vtop->r, 1);
1206 #endif
1207 /* load from memory */
1208 vtop->type.t = load_type;
1209 load(r, vtop);
1210 vdup();
1211 vtop[-1].r = r; /* save register value */
1212 /* increment pointer to get second word */
1213 vtop->type.t = addr_type;
1214 gaddrof();
1215 vpushi(load_size);
1216 gen_op('+');
1217 vtop->r |= VT_LVAL;
1218 vtop->type.t = load_type;
1219 } else {
1220 /* move registers */
1221 load(r, vtop);
1222 vdup();
1223 vtop[-1].r = r; /* save register value */
1224 vtop->r = vtop[-1].r2;
1226 /* Allocate second register. Here we rely on the fact that
1227 get_reg() tries first to free r2 of an SValue. */
1228 r2 = get_reg(rc2);
1229 load(r2, vtop);
1230 vpop();
1231 /* write second register */
1232 vtop->r2 = r2;
1233 vtop->type.t = original_type;
1234 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1235 int t1, t;
1236 /* lvalue of scalar type : need to use lvalue type
1237 because of possible cast */
1238 t = vtop->type.t;
1239 t1 = t;
1240 /* compute memory access type */
1241 if (vtop->r & VT_REF)
1242 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1243 t = VT_PTR;
1244 #else
1245 t = VT_INT;
1246 #endif
1247 else if (vtop->r & VT_LVAL_BYTE)
1248 t = VT_BYTE;
1249 else if (vtop->r & VT_LVAL_SHORT)
1250 t = VT_SHORT;
1251 if (vtop->r & VT_LVAL_UNSIGNED)
1252 t |= VT_UNSIGNED;
1253 vtop->type.t = t;
1254 load(r, vtop);
1255 /* restore wanted type */
1256 vtop->type.t = t1;
1257 } else {
1258 /* one register type load */
1259 load(r, vtop);
1262 vtop->r = r;
1263 #ifdef TCC_TARGET_C67
1264 /* uses register pairs for doubles */
1265 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1266 vtop->r2 = r+1;
1267 #endif
1269 return r;
1272 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1273 ST_FUNC void gv2(int rc1, int rc2)
1275 int v;
1277 /* generate more generic register first. But VT_JMP or VT_CMP
1278 values must be generated first in all cases to avoid possible
1279 reload errors */
1280 v = vtop[0].r & VT_VALMASK;
1281 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1282 vswap();
1283 gv(rc1);
1284 vswap();
1285 gv(rc2);
1286 /* test if reload is needed for first register */
1287 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1288 vswap();
1289 gv(rc1);
1290 vswap();
1292 } else {
1293 gv(rc2);
1294 vswap();
1295 gv(rc1);
1296 vswap();
1297 /* test if reload is needed for first register */
1298 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1299 gv(rc2);
1304 #ifndef TCC_TARGET_ARM64
1305 /* wrapper around RC_FRET to return a register by type */
1306 static int rc_fret(int t)
1308 #ifdef TCC_TARGET_X86_64
1309 if (t == VT_LDOUBLE) {
1310 return RC_ST0;
1312 #endif
1313 return RC_FRET;
1315 #endif
1317 /* wrapper around REG_FRET to return a register by type */
1318 static int reg_fret(int t)
1320 #ifdef TCC_TARGET_X86_64
1321 if (t == VT_LDOUBLE) {
1322 return TREG_ST0;
1324 #endif
1325 return REG_FRET;
1328 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1329 /* expand 64bit on stack in two ints */
1330 static void lexpand(void)
1332 int u, v;
1333 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1334 v = vtop->r & (VT_VALMASK | VT_LVAL);
1335 if (v == VT_CONST) {
1336 vdup();
1337 vtop[0].c.i >>= 32;
1338 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1339 vdup();
1340 vtop[0].c.i += 4;
1341 } else {
1342 gv(RC_INT);
1343 vdup();
1344 vtop[0].r = vtop[-1].r2;
1345 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1347 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1349 #endif
1351 #ifdef TCC_TARGET_ARM
1352 /* expand long long on stack */
1353 ST_FUNC void lexpand_nr(void)
1355 int u,v;
1357 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1358 vdup();
1359 vtop->r2 = VT_CONST;
1360 vtop->type.t = VT_INT | u;
1361 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1362 if (v == VT_CONST) {
1363 vtop[-1].c.i = vtop->c.i;
1364 vtop->c.i = vtop->c.i >> 32;
1365 vtop->r = VT_CONST;
1366 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1367 vtop->c.i += 4;
1368 vtop->r = vtop[-1].r;
1369 } else if (v > VT_CONST) {
1370 vtop--;
1371 lexpand();
1372 } else
1373 vtop->r = vtop[-1].r2;
1374 vtop[-1].r2 = VT_CONST;
1375 vtop[-1].type.t = VT_INT | u;
1377 #endif
1379 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1380 /* build a long long from two ints */
1381 static void lbuild(int t)
1383 gv2(RC_INT, RC_INT);
1384 vtop[-1].r2 = vtop[0].r;
1385 vtop[-1].type.t = t;
1386 vpop();
1388 #endif
1390 /* convert stack entry to register and duplicate its value in another
1391 register */
1392 static void gv_dup(void)
1394 int rc, t, r, r1;
1395 SValue sv;
1397 t = vtop->type.t;
1398 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1399 if ((t & VT_BTYPE) == VT_LLONG) {
1400 lexpand();
1401 gv_dup();
1402 vswap();
1403 vrotb(3);
1404 gv_dup();
1405 vrotb(4);
1406 /* stack: H L L1 H1 */
1407 lbuild(t);
1408 vrotb(3);
1409 vrotb(3);
1410 vswap();
1411 lbuild(t);
1412 vswap();
1413 } else
1414 #endif
1416 /* duplicate value */
1417 rc = RC_INT;
1418 sv.type.t = VT_INT;
1419 if (is_float(t)) {
1420 rc = RC_FLOAT;
1421 #ifdef TCC_TARGET_X86_64
1422 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1423 rc = RC_ST0;
1425 #endif
1426 sv.type.t = t;
1428 r = gv(rc);
1429 r1 = get_reg(rc);
1430 sv.r = r;
1431 sv.c.i = 0;
1432 load(r1, &sv); /* move r to r1 */
1433 vdup();
1434 /* duplicates value */
1435 if (r != r1)
1436 vtop->r = r1;
1440 /* Generate value test
1442 * Generate a test for any value (jump, comparison and integers) */
1443 ST_FUNC int gvtst(int inv, int t)
1445 int v = vtop->r & VT_VALMASK;
1446 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1447 vpushi(0);
1448 gen_op(TOK_NE);
1450 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1451 /* constant jmp optimization */
1452 if ((vtop->c.i != 0) != inv)
1453 t = gjmp(t);
1454 vtop--;
1455 return t;
1457 return gtst(inv, t);
1460 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1461 /* generate CPU independent (unsigned) long long operations */
1462 static void gen_opl(int op)
1464 int t, a, b, op1, c, i;
1465 int func;
1466 unsigned short reg_iret = REG_IRET;
1467 unsigned short reg_lret = REG_LRET;
1468 SValue tmp;
1470 switch(op) {
1471 case '/':
1472 case TOK_PDIV:
1473 func = TOK___divdi3;
1474 goto gen_func;
1475 case TOK_UDIV:
1476 func = TOK___udivdi3;
1477 goto gen_func;
1478 case '%':
1479 func = TOK___moddi3;
1480 goto gen_mod_func;
1481 case TOK_UMOD:
1482 func = TOK___umoddi3;
1483 gen_mod_func:
1484 #ifdef TCC_ARM_EABI
1485 reg_iret = TREG_R2;
1486 reg_lret = TREG_R3;
1487 #endif
1488 gen_func:
1489 /* call generic long long function */
1490 vpush_global_sym(&func_old_type, func);
1491 vrott(3);
1492 gfunc_call(2);
1493 vpushi(0);
1494 vtop->r = reg_iret;
1495 vtop->r2 = reg_lret;
1496 break;
1497 case '^':
1498 case '&':
1499 case '|':
1500 case '*':
1501 case '+':
1502 case '-':
1503 //pv("gen_opl A",0,2);
1504 t = vtop->type.t;
1505 vswap();
1506 lexpand();
1507 vrotb(3);
1508 lexpand();
1509 /* stack: L1 H1 L2 H2 */
1510 tmp = vtop[0];
1511 vtop[0] = vtop[-3];
1512 vtop[-3] = tmp;
1513 tmp = vtop[-2];
1514 vtop[-2] = vtop[-3];
1515 vtop[-3] = tmp;
1516 vswap();
1517 /* stack: H1 H2 L1 L2 */
1518 //pv("gen_opl B",0,4);
1519 if (op == '*') {
1520 vpushv(vtop - 1);
1521 vpushv(vtop - 1);
1522 gen_op(TOK_UMULL);
1523 lexpand();
1524 /* stack: H1 H2 L1 L2 ML MH */
1525 for(i=0;i<4;i++)
1526 vrotb(6);
1527 /* stack: ML MH H1 H2 L1 L2 */
1528 tmp = vtop[0];
1529 vtop[0] = vtop[-2];
1530 vtop[-2] = tmp;
1531 /* stack: ML MH H1 L2 H2 L1 */
1532 gen_op('*');
1533 vrotb(3);
1534 vrotb(3);
1535 gen_op('*');
1536 /* stack: ML MH M1 M2 */
1537 gen_op('+');
1538 gen_op('+');
1539 } else if (op == '+' || op == '-') {
1540 /* XXX: add non carry method too (for MIPS or alpha) */
1541 if (op == '+')
1542 op1 = TOK_ADDC1;
1543 else
1544 op1 = TOK_SUBC1;
1545 gen_op(op1);
1546 /* stack: H1 H2 (L1 op L2) */
1547 vrotb(3);
1548 vrotb(3);
1549 gen_op(op1 + 1); /* TOK_xxxC2 */
1550 } else {
1551 gen_op(op);
1552 /* stack: H1 H2 (L1 op L2) */
1553 vrotb(3);
1554 vrotb(3);
1555 /* stack: (L1 op L2) H1 H2 */
1556 gen_op(op);
1557 /* stack: (L1 op L2) (H1 op H2) */
1559 /* stack: L H */
1560 lbuild(t);
1561 break;
1562 case TOK_SAR:
1563 case TOK_SHR:
1564 case TOK_SHL:
1565 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1566 t = vtop[-1].type.t;
1567 vswap();
1568 lexpand();
1569 vrotb(3);
1570 /* stack: L H shift */
1571 c = (int)vtop->c.i;
1572 /* constant: simpler */
1573 /* NOTE: all comments are for SHL. the other cases are
1574 done by swaping words */
1575 vpop();
1576 if (op != TOK_SHL)
1577 vswap();
1578 if (c >= 32) {
1579 /* stack: L H */
1580 vpop();
1581 if (c > 32) {
1582 vpushi(c - 32);
1583 gen_op(op);
1585 if (op != TOK_SAR) {
1586 vpushi(0);
1587 } else {
1588 gv_dup();
1589 vpushi(31);
1590 gen_op(TOK_SAR);
1592 vswap();
1593 } else {
1594 vswap();
1595 gv_dup();
1596 /* stack: H L L */
1597 vpushi(c);
1598 gen_op(op);
1599 vswap();
1600 vpushi(32 - c);
1601 if (op == TOK_SHL)
1602 gen_op(TOK_SHR);
1603 else
1604 gen_op(TOK_SHL);
1605 vrotb(3);
1606 /* stack: L L H */
1607 vpushi(c);
1608 if (op == TOK_SHL)
1609 gen_op(TOK_SHL);
1610 else
1611 gen_op(TOK_SHR);
1612 gen_op('|');
1614 if (op != TOK_SHL)
1615 vswap();
1616 lbuild(t);
1617 } else {
1618 /* XXX: should provide a faster fallback on x86 ? */
1619 switch(op) {
1620 case TOK_SAR:
1621 func = TOK___ashrdi3;
1622 goto gen_func;
1623 case TOK_SHR:
1624 func = TOK___lshrdi3;
1625 goto gen_func;
1626 case TOK_SHL:
1627 func = TOK___ashldi3;
1628 goto gen_func;
1631 break;
1632 default:
1633 /* compare operations */
1634 t = vtop->type.t;
1635 vswap();
1636 lexpand();
1637 vrotb(3);
1638 lexpand();
1639 /* stack: L1 H1 L2 H2 */
1640 tmp = vtop[-1];
1641 vtop[-1] = vtop[-2];
1642 vtop[-2] = tmp;
1643 /* stack: L1 L2 H1 H2 */
1644 /* compare high */
1645 op1 = op;
1646 /* when values are equal, we need to compare low words. since
1647 the jump is inverted, we invert the test too. */
1648 if (op1 == TOK_LT)
1649 op1 = TOK_LE;
1650 else if (op1 == TOK_GT)
1651 op1 = TOK_GE;
1652 else if (op1 == TOK_ULT)
1653 op1 = TOK_ULE;
1654 else if (op1 == TOK_UGT)
1655 op1 = TOK_UGE;
1656 a = 0;
1657 b = 0;
1658 gen_op(op1);
1659 if (op == TOK_NE) {
1660 b = gvtst(0, 0);
1661 } else {
1662 a = gvtst(1, 0);
1663 if (op != TOK_EQ) {
1664 /* generate non equal test */
1665 vpushi(TOK_NE);
1666 vtop->r = VT_CMP;
1667 b = gvtst(0, 0);
1670 /* compare low. Always unsigned */
1671 op1 = op;
1672 if (op1 == TOK_LT)
1673 op1 = TOK_ULT;
1674 else if (op1 == TOK_LE)
1675 op1 = TOK_ULE;
1676 else if (op1 == TOK_GT)
1677 op1 = TOK_UGT;
1678 else if (op1 == TOK_GE)
1679 op1 = TOK_UGE;
1680 gen_op(op1);
1681 a = gvtst(1, a);
1682 gsym(b);
1683 vseti(VT_JMPI, a);
1684 break;
1687 #endif
1689 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1691 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1692 return (a ^ b) >> 63 ? -x : x;
1695 static int gen_opic_lt(uint64_t a, uint64_t b)
1697 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1700 /* handle integer constant optimizations and various machine
1701 independent opt */
1702 static void gen_opic(int op)
1704 SValue *v1 = vtop - 1;
1705 SValue *v2 = vtop;
1706 int t1 = v1->type.t & VT_BTYPE;
1707 int t2 = v2->type.t & VT_BTYPE;
1708 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1709 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1710 uint64_t l1 = c1 ? v1->c.i : 0;
1711 uint64_t l2 = c2 ? v2->c.i : 0;
1712 int shm = (t1 == VT_LLONG) ? 63 : 31;
1714 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1715 l1 = ((uint32_t)l1 |
1716 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1717 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1718 l2 = ((uint32_t)l2 |
1719 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1721 if (c1 && c2) {
1722 switch(op) {
1723 case '+': l1 += l2; break;
1724 case '-': l1 -= l2; break;
1725 case '&': l1 &= l2; break;
1726 case '^': l1 ^= l2; break;
1727 case '|': l1 |= l2; break;
1728 case '*': l1 *= l2; break;
1730 case TOK_PDIV:
1731 case '/':
1732 case '%':
1733 case TOK_UDIV:
1734 case TOK_UMOD:
1735 /* if division by zero, generate explicit division */
1736 if (l2 == 0) {
1737 if (const_wanted)
1738 tcc_error("division by zero in constant");
1739 goto general_case;
1741 switch(op) {
1742 default: l1 = gen_opic_sdiv(l1, l2); break;
1743 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1744 case TOK_UDIV: l1 = l1 / l2; break;
1745 case TOK_UMOD: l1 = l1 % l2; break;
1747 break;
1748 case TOK_SHL: l1 <<= (l2 & shm); break;
1749 case TOK_SHR: l1 >>= (l2 & shm); break;
1750 case TOK_SAR:
1751 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1752 break;
1753 /* tests */
1754 case TOK_ULT: l1 = l1 < l2; break;
1755 case TOK_UGE: l1 = l1 >= l2; break;
1756 case TOK_EQ: l1 = l1 == l2; break;
1757 case TOK_NE: l1 = l1 != l2; break;
1758 case TOK_ULE: l1 = l1 <= l2; break;
1759 case TOK_UGT: l1 = l1 > l2; break;
1760 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1761 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1762 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1763 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1764 /* logical */
1765 case TOK_LAND: l1 = l1 && l2; break;
1766 case TOK_LOR: l1 = l1 || l2; break;
1767 default:
1768 goto general_case;
1770 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1771 l1 = ((uint32_t)l1 |
1772 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1773 v1->c.i = l1;
1774 vtop--;
1775 } else {
1776 /* if commutative ops, put c2 as constant */
1777 if (c1 && (op == '+' || op == '&' || op == '^' ||
1778 op == '|' || op == '*')) {
1779 vswap();
1780 c2 = c1; //c = c1, c1 = c2, c2 = c;
1781 l2 = l1; //l = l1, l1 = l2, l2 = l;
1783 if (!const_wanted &&
1784 c1 && ((l1 == 0 &&
1785 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1786 (l1 == -1 && op == TOK_SAR))) {
1787 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1788 vtop--;
1789 } else if (!const_wanted &&
1790 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1791 (l2 == -1 && op == '|') ||
1792 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1793 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1794 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1795 if (l2 == 1)
1796 vtop->c.i = 0;
1797 vswap();
1798 vtop--;
1799 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1800 op == TOK_PDIV) &&
1801 l2 == 1) ||
1802 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1803 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1804 l2 == 0) ||
1805 (op == '&' &&
1806 l2 == -1))) {
1807 /* filter out NOP operations like x*1, x-0, x&-1... */
1808 vtop--;
1809 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1810 /* try to use shifts instead of muls or divs */
1811 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1812 int n = -1;
1813 while (l2) {
1814 l2 >>= 1;
1815 n++;
1817 vtop->c.i = n;
1818 if (op == '*')
1819 op = TOK_SHL;
1820 else if (op == TOK_PDIV)
1821 op = TOK_SAR;
1822 else
1823 op = TOK_SHR;
1825 goto general_case;
1826 } else if (c2 && (op == '+' || op == '-') &&
1827 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1828 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1829 /* symbol + constant case */
1830 if (op == '-')
1831 l2 = -l2;
1832 l2 += vtop[-1].c.i;
1833 /* The backends can't always deal with addends to symbols
1834 larger than +-1<<31. Don't construct such. */
1835 if ((int)l2 != l2)
1836 goto general_case;
1837 vtop--;
1838 vtop->c.i = l2;
1839 } else {
1840 general_case:
1841 /* call low level op generator */
1842 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1843 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1844 gen_opl(op);
1845 else
1846 gen_opi(op);
1851 /* generate a floating point operation with constant propagation */
1852 static void gen_opif(int op)
1854 int c1, c2;
1855 SValue *v1, *v2;
1856 long double f1, f2;
1858 v1 = vtop - 1;
1859 v2 = vtop;
1860 /* currently, we cannot do computations with forward symbols */
1861 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1862 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1863 if (c1 && c2) {
1864 if (v1->type.t == VT_FLOAT) {
1865 f1 = v1->c.f;
1866 f2 = v2->c.f;
1867 } else if (v1->type.t == VT_DOUBLE) {
1868 f1 = v1->c.d;
1869 f2 = v2->c.d;
1870 } else {
1871 f1 = v1->c.ld;
1872 f2 = v2->c.ld;
1875 /* NOTE: we only do constant propagation if finite number (not
1876 NaN or infinity) (ANSI spec) */
1877 if (!ieee_finite(f1) || !ieee_finite(f2))
1878 goto general_case;
1880 switch(op) {
1881 case '+': f1 += f2; break;
1882 case '-': f1 -= f2; break;
1883 case '*': f1 *= f2; break;
1884 case '/':
1885 if (f2 == 0.0) {
1886 if (const_wanted)
1887 tcc_error("division by zero in constant");
1888 goto general_case;
1890 f1 /= f2;
1891 break;
1892 /* XXX: also handles tests ? */
1893 default:
1894 goto general_case;
1896 /* XXX: overflow test ? */
1897 if (v1->type.t == VT_FLOAT) {
1898 v1->c.f = f1;
1899 } else if (v1->type.t == VT_DOUBLE) {
1900 v1->c.d = f1;
1901 } else {
1902 v1->c.ld = f1;
1904 vtop--;
1905 } else {
1906 general_case:
1907 gen_opf(op);
1911 static int pointed_size(CType *type)
1913 int align;
1914 return type_size(pointed_type(type), &align);
1917 static void vla_runtime_pointed_size(CType *type)
1919 int align;
1920 vla_runtime_type_size(pointed_type(type), &align);
1923 static inline int is_null_pointer(SValue *p)
1925 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1926 return 0;
1927 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1928 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1929 ((p->type.t & VT_BTYPE) == VT_PTR &&
1930 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1933 static inline int is_integer_btype(int bt)
1935 return (bt == VT_BYTE || bt == VT_SHORT ||
1936 bt == VT_INT || bt == VT_LLONG);
1939 /* check types for comparison or subtraction of pointers */
1940 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1942 CType *type1, *type2, tmp_type1, tmp_type2;
1943 int bt1, bt2;
1945 /* null pointers are accepted for all comparisons as gcc */
1946 if (is_null_pointer(p1) || is_null_pointer(p2))
1947 return;
1948 type1 = &p1->type;
1949 type2 = &p2->type;
1950 bt1 = type1->t & VT_BTYPE;
1951 bt2 = type2->t & VT_BTYPE;
1952 /* accept comparison between pointer and integer with a warning */
1953 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1954 if (op != TOK_LOR && op != TOK_LAND )
1955 tcc_warning("comparison between pointer and integer");
1956 return;
1959 /* both must be pointers or implicit function pointers */
1960 if (bt1 == VT_PTR) {
1961 type1 = pointed_type(type1);
1962 } else if (bt1 != VT_FUNC)
1963 goto invalid_operands;
1965 if (bt2 == VT_PTR) {
1966 type2 = pointed_type(type2);
1967 } else if (bt2 != VT_FUNC) {
1968 invalid_operands:
1969 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1971 if ((type1->t & VT_BTYPE) == VT_VOID ||
1972 (type2->t & VT_BTYPE) == VT_VOID)
1973 return;
1974 tmp_type1 = *type1;
1975 tmp_type2 = *type2;
1976 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1977 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1978 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1979 /* gcc-like error if '-' is used */
1980 if (op == '-')
1981 goto invalid_operands;
1982 else
1983 tcc_warning("comparison of distinct pointer types lacks a cast");
1987 /* generic gen_op: handles types problems */
1988 ST_FUNC void gen_op(int op)
1990 int u, t1, t2, bt1, bt2, t;
1991 CType type1;
1993 redo:
1994 t1 = vtop[-1].type.t;
1995 t2 = vtop[0].type.t;
1996 bt1 = t1 & VT_BTYPE;
1997 bt2 = t2 & VT_BTYPE;
1999 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2000 tcc_error("operation on a struct");
2001 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2002 if (bt2 == VT_FUNC) {
2003 mk_pointer(&vtop->type);
2004 gaddrof();
2006 if (bt1 == VT_FUNC) {
2007 vswap();
2008 mk_pointer(&vtop->type);
2009 gaddrof();
2010 vswap();
2012 goto redo;
2013 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2014 /* at least one operand is a pointer */
2015 /* relationnal op: must be both pointers */
2016 if (op >= TOK_ULT && op <= TOK_LOR) {
2017 check_comparison_pointer_types(vtop - 1, vtop, op);
2018 /* pointers are handled are unsigned */
2019 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2020 t = VT_LLONG | VT_UNSIGNED;
2021 #else
2022 t = VT_INT | VT_UNSIGNED;
2023 #endif
2024 goto std_op;
2026 /* if both pointers, then it must be the '-' op */
2027 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2028 if (op != '-')
2029 tcc_error("cannot use pointers here");
2030 check_comparison_pointer_types(vtop - 1, vtop, op);
2031 /* XXX: check that types are compatible */
2032 if (vtop[-1].type.t & VT_VLA) {
2033 vla_runtime_pointed_size(&vtop[-1].type);
2034 } else {
2035 vpushi(pointed_size(&vtop[-1].type));
2037 vrott(3);
2038 gen_opic(op);
2039 /* set to integer type */
2040 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2041 vtop->type.t = VT_LLONG;
2042 #else
2043 vtop->type.t = VT_INT;
2044 #endif
2045 vswap();
2046 gen_op(TOK_PDIV);
2047 } else {
2048 /* exactly one pointer : must be '+' or '-'. */
2049 if (op != '-' && op != '+')
2050 tcc_error("cannot use pointers here");
2051 /* Put pointer as first operand */
2052 if (bt2 == VT_PTR) {
2053 vswap();
2054 t = t1, t1 = t2, t2 = t;
2056 #if PTR_SIZE == 4
2057 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2058 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2059 gen_cast(&int_type);
2060 #endif
2061 type1 = vtop[-1].type;
2062 type1.t &= ~VT_ARRAY;
2063 if (vtop[-1].type.t & VT_VLA)
2064 vla_runtime_pointed_size(&vtop[-1].type);
2065 else {
2066 u = pointed_size(&vtop[-1].type);
2067 if (u < 0)
2068 tcc_error("unknown array element size");
2069 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2070 vpushll(u);
2071 #else
2072 /* XXX: cast to int ? (long long case) */
2073 vpushi(u);
2074 #endif
2076 gen_op('*');
2077 #if 0
2078 /* #ifdef CONFIG_TCC_BCHECK
2079 The main reason to removing this code:
2080 #include <stdio.h>
2081 int main ()
2083 int v[10];
2084 int i = 10;
2085 int j = 9;
2086 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2087 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2089 When this code is on. then the output looks like
2090 v+i-j = 0xfffffffe
2091 v+(i-j) = 0xbff84000
2093 /* if evaluating constant expression, no code should be
2094 generated, so no bound check */
2095 if (tcc_state->do_bounds_check && !const_wanted) {
2096 /* if bounded pointers, we generate a special code to
2097 test bounds */
2098 if (op == '-') {
2099 vpushi(0);
2100 vswap();
2101 gen_op('-');
2103 gen_bounded_ptr_add();
2104 } else
2105 #endif
2107 gen_opic(op);
2109 /* put again type if gen_opic() swaped operands */
2110 vtop->type = type1;
2112 } else if (is_float(bt1) || is_float(bt2)) {
2113 /* compute bigger type and do implicit casts */
2114 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2115 t = VT_LDOUBLE;
2116 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2117 t = VT_DOUBLE;
2118 } else {
2119 t = VT_FLOAT;
2121 /* floats can only be used for a few operations */
2122 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2123 (op < TOK_ULT || op > TOK_GT))
2124 tcc_error("invalid operands for binary operation");
2125 goto std_op;
2126 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2127 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2128 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2129 t |= VT_UNSIGNED;
2130 goto std_op;
2131 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2132 /* cast to biggest op */
2133 t = VT_LLONG;
2134 /* convert to unsigned if it does not fit in a long long */
2135 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2136 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2137 t |= VT_UNSIGNED;
2138 goto std_op;
2139 } else {
2140 /* integer operations */
2141 t = VT_INT;
2142 /* convert to unsigned if it does not fit in an integer */
2143 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2144 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2145 t |= VT_UNSIGNED;
2146 std_op:
2147 /* XXX: currently, some unsigned operations are explicit, so
2148 we modify them here */
2149 if (t & VT_UNSIGNED) {
2150 if (op == TOK_SAR)
2151 op = TOK_SHR;
2152 else if (op == '/')
2153 op = TOK_UDIV;
2154 else if (op == '%')
2155 op = TOK_UMOD;
2156 else if (op == TOK_LT)
2157 op = TOK_ULT;
2158 else if (op == TOK_GT)
2159 op = TOK_UGT;
2160 else if (op == TOK_LE)
2161 op = TOK_ULE;
2162 else if (op == TOK_GE)
2163 op = TOK_UGE;
2165 vswap();
2166 type1.t = t;
2167 gen_cast(&type1);
2168 vswap();
2169 /* special case for shifts and long long: we keep the shift as
2170 an integer */
2171 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2172 type1.t = VT_INT;
2173 gen_cast(&type1);
2174 if (is_float(t))
2175 gen_opif(op);
2176 else
2177 gen_opic(op);
2178 if (op >= TOK_ULT && op <= TOK_GT) {
2179 /* relationnal op: the result is an int */
2180 vtop->type.t = VT_INT;
2181 } else {
2182 vtop->type.t = t;
2185 // Make sure that we have converted to an rvalue:
2186 if (vtop->r & VT_LVAL)
2187 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2190 #ifndef TCC_TARGET_ARM
2191 /* generic itof for unsigned long long case */
2192 static void gen_cvt_itof1(int t)
2194 #ifdef TCC_TARGET_ARM64
2195 gen_cvt_itof(t);
2196 #else
2197 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2198 (VT_LLONG | VT_UNSIGNED)) {
2200 if (t == VT_FLOAT)
2201 vpush_global_sym(&func_old_type, TOK___floatundisf);
2202 #if LDOUBLE_SIZE != 8
2203 else if (t == VT_LDOUBLE)
2204 vpush_global_sym(&func_old_type, TOK___floatundixf);
2205 #endif
2206 else
2207 vpush_global_sym(&func_old_type, TOK___floatundidf);
2208 vrott(2);
2209 gfunc_call(1);
2210 vpushi(0);
2211 vtop->r = reg_fret(t);
2212 } else {
2213 gen_cvt_itof(t);
2215 #endif
2217 #endif
2219 /* generic ftoi for unsigned long long case */
2220 static void gen_cvt_ftoi1(int t)
2222 #ifdef TCC_TARGET_ARM64
2223 gen_cvt_ftoi(t);
2224 #else
2225 int st;
2227 if (t == (VT_LLONG | VT_UNSIGNED)) {
2228 /* not handled natively */
2229 st = vtop->type.t & VT_BTYPE;
2230 if (st == VT_FLOAT)
2231 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2232 #if LDOUBLE_SIZE != 8
2233 else if (st == VT_LDOUBLE)
2234 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2235 #endif
2236 else
2237 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2238 vrott(2);
2239 gfunc_call(1);
2240 vpushi(0);
2241 vtop->r = REG_IRET;
2242 vtop->r2 = REG_LRET;
2243 } else {
2244 gen_cvt_ftoi(t);
2246 #endif
2249 /* force char or short cast */
2250 static void force_charshort_cast(int t)
2252 int bits, dbt;
2253 dbt = t & VT_BTYPE;
2254 /* XXX: add optimization if lvalue : just change type and offset */
2255 if (dbt == VT_BYTE)
2256 bits = 8;
2257 else
2258 bits = 16;
2259 if (t & VT_UNSIGNED) {
2260 vpushi((1 << bits) - 1);
2261 gen_op('&');
2262 } else {
2263 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2264 bits = 64 - bits;
2265 else
2266 bits = 32 - bits;
2267 vpushi(bits);
2268 gen_op(TOK_SHL);
2269 /* result must be signed or the SAR is converted to an SHL
2270 This was not the case when "t" was a signed short
2271 and the last value on the stack was an unsigned int */
2272 vtop->type.t &= ~VT_UNSIGNED;
2273 vpushi(bits);
2274 gen_op(TOK_SAR);
2278 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2279 static void gen_cast(CType *type)
2281 int sbt, dbt, sf, df, c, p;
2283 /* special delayed cast for char/short */
2284 /* XXX: in some cases (multiple cascaded casts), it may still
2285 be incorrect */
2286 if (vtop->r & VT_MUSTCAST) {
2287 vtop->r &= ~VT_MUSTCAST;
2288 force_charshort_cast(vtop->type.t);
2291 /* bitfields first get cast to ints */
2292 if (vtop->type.t & VT_BITFIELD) {
2293 gv(RC_INT);
2296 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2297 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2299 if (sbt != dbt) {
2300 sf = is_float(sbt);
2301 df = is_float(dbt);
2302 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2303 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2304 if (c) {
2305 /* constant case: we can do it now */
2306 /* XXX: in ISOC, cannot do it if error in convert */
2307 if (sbt == VT_FLOAT)
2308 vtop->c.ld = vtop->c.f;
2309 else if (sbt == VT_DOUBLE)
2310 vtop->c.ld = vtop->c.d;
2312 if (df) {
2313 if ((sbt & VT_BTYPE) == VT_LLONG) {
2314 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2315 vtop->c.ld = vtop->c.i;
2316 else
2317 vtop->c.ld = -(long double)-vtop->c.i;
2318 } else if(!sf) {
2319 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2320 vtop->c.ld = (uint32_t)vtop->c.i;
2321 else
2322 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2325 if (dbt == VT_FLOAT)
2326 vtop->c.f = (float)vtop->c.ld;
2327 else if (dbt == VT_DOUBLE)
2328 vtop->c.d = (double)vtop->c.ld;
2329 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2330 vtop->c.i = vtop->c.ld;
2331 } else if (sf && dbt == VT_BOOL) {
2332 vtop->c.i = (vtop->c.ld != 0);
2333 } else {
2334 if(sf)
2335 vtop->c.i = vtop->c.ld;
2336 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2338 else if (sbt & VT_UNSIGNED)
2339 vtop->c.i = (uint32_t)vtop->c.i;
2340 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2341 else if (sbt == VT_PTR)
2343 #endif
2344 else if (sbt != VT_LLONG)
2345 vtop->c.i = ((uint32_t)vtop->c.i |
2346 -(vtop->c.i & 0x80000000));
2348 if (dbt == (VT_LLONG|VT_UNSIGNED))
2350 else if (dbt == VT_BOOL)
2351 vtop->c.i = (vtop->c.i != 0);
2352 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2353 else if (dbt == VT_PTR)
2355 #endif
2356 else if (dbt != VT_LLONG) {
2357 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2358 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2359 0xffffffff);
2360 vtop->c.i &= m;
2361 if (!(dbt & VT_UNSIGNED))
2362 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2365 } else if (p && dbt == VT_BOOL) {
2366 vtop->r = VT_CONST;
2367 vtop->c.i = 1;
2368 } else {
2369 /* non constant case: generate code */
2370 if (sf && df) {
2371 /* convert from fp to fp */
2372 gen_cvt_ftof(dbt);
2373 } else if (df) {
2374 /* convert int to fp */
2375 gen_cvt_itof1(dbt);
2376 } else if (sf) {
2377 /* convert fp to int */
2378 if (dbt == VT_BOOL) {
2379 vpushi(0);
2380 gen_op(TOK_NE);
2381 } else {
2382 /* we handle char/short/etc... with generic code */
2383 if (dbt != (VT_INT | VT_UNSIGNED) &&
2384 dbt != (VT_LLONG | VT_UNSIGNED) &&
2385 dbt != VT_LLONG)
2386 dbt = VT_INT;
2387 gen_cvt_ftoi1(dbt);
2388 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2389 /* additional cast for char/short... */
2390 vtop->type.t = dbt;
2391 gen_cast(type);
2394 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2395 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2396 if ((sbt & VT_BTYPE) != VT_LLONG) {
2397 /* scalar to long long */
2398 /* machine independent conversion */
2399 gv(RC_INT);
2400 /* generate high word */
2401 if (sbt == (VT_INT | VT_UNSIGNED)) {
2402 vpushi(0);
2403 gv(RC_INT);
2404 } else {
2405 if (sbt == VT_PTR) {
2406 /* cast from pointer to int before we apply
2407 shift operation, which pointers don't support*/
2408 gen_cast(&int_type);
2410 gv_dup();
2411 vpushi(31);
2412 gen_op(TOK_SAR);
2414 /* patch second register */
2415 vtop[-1].r2 = vtop->r;
2416 vpop();
2418 #else
2419 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2420 (dbt & VT_BTYPE) == VT_PTR ||
2421 (dbt & VT_BTYPE) == VT_FUNC) {
2422 if ((sbt & VT_BTYPE) != VT_LLONG &&
2423 (sbt & VT_BTYPE) != VT_PTR &&
2424 (sbt & VT_BTYPE) != VT_FUNC) {
2425 /* need to convert from 32bit to 64bit */
2426 gv(RC_INT);
2427 if (sbt != (VT_INT | VT_UNSIGNED)) {
2428 #if defined(TCC_TARGET_ARM64)
2429 gen_cvt_sxtw();
2430 #elif defined(TCC_TARGET_X86_64)
2431 int r = gv(RC_INT);
2432 /* x86_64 specific: movslq */
2433 o(0x6348);
2434 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2435 #else
2436 #error
2437 #endif
2440 #endif
2441 } else if (dbt == VT_BOOL) {
2442 /* scalar to bool */
2443 vpushi(0);
2444 gen_op(TOK_NE);
2445 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2446 (dbt & VT_BTYPE) == VT_SHORT) {
2447 if (sbt == VT_PTR) {
2448 vtop->type.t = VT_INT;
2449 tcc_warning("nonportable conversion from pointer to char/short");
2451 force_charshort_cast(dbt);
2452 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2453 } else if ((dbt & VT_BTYPE) == VT_INT) {
2454 /* scalar to int */
2455 if ((sbt & VT_BTYPE) == VT_LLONG) {
2456 /* from long long: just take low order word */
2457 lexpand();
2458 vpop();
2460 /* if lvalue and single word type, nothing to do because
2461 the lvalue already contains the real type size (see
2462 VT_LVAL_xxx constants) */
2463 #endif
2466 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2467 /* if we are casting between pointer types,
2468 we must update the VT_LVAL_xxx size */
2469 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2470 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2472 vtop->type = *type;
2475 /* return type size as known at compile time. Put alignment at 'a' */
2476 ST_FUNC int type_size(CType *type, int *a)
2478 Sym *s;
2479 int bt;
2481 bt = type->t & VT_BTYPE;
2482 if (bt == VT_STRUCT) {
2483 /* struct/union */
2484 s = type->ref;
2485 *a = s->r;
2486 return s->c;
2487 } else if (bt == VT_PTR) {
2488 if (type->t & VT_ARRAY) {
2489 int ts;
2491 s = type->ref;
2492 ts = type_size(&s->type, a);
2494 if (ts < 0 && s->c < 0)
2495 ts = -ts;
2497 return ts * s->c;
2498 } else {
2499 *a = PTR_SIZE;
2500 return PTR_SIZE;
2502 } else if (bt == VT_LDOUBLE) {
2503 *a = LDOUBLE_ALIGN;
2504 return LDOUBLE_SIZE;
2505 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2506 #ifdef TCC_TARGET_I386
2507 #ifdef TCC_TARGET_PE
2508 *a = 8;
2509 #else
2510 *a = 4;
2511 #endif
2512 #elif defined(TCC_TARGET_ARM)
2513 #ifdef TCC_ARM_EABI
2514 *a = 8;
2515 #else
2516 *a = 4;
2517 #endif
2518 #else
2519 *a = 8;
2520 #endif
2521 return 8;
2522 } else if (bt == VT_INT || bt == VT_FLOAT) {
2523 *a = 4;
2524 return 4;
2525 } else if (bt == VT_SHORT) {
2526 *a = 2;
2527 return 2;
2528 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2529 *a = 8;
2530 return 16;
2531 } else if (bt == VT_ENUM) {
2532 *a = 4;
2533 /* Enums might be incomplete, so don't just return '4' here. */
2534 return type->ref->c;
2535 } else {
2536 /* char, void, function, _Bool */
2537 *a = 1;
2538 return 1;
2542 /* push type size as known at runtime time on top of value stack. Put
2543 alignment at 'a' */
2544 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2546 if (type->t & VT_VLA) {
2547 type_size(&type->ref->type, a);
2548 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2549 } else {
2550 vpushi(type_size(type, a));
2554 static void vla_sp_restore(void) {
2555 if (vlas_in_scope) {
2556 gen_vla_sp_restore(vla_sp_loc);
2560 static void vla_sp_restore_root(void) {
2561 if (vlas_in_scope) {
2562 gen_vla_sp_restore(vla_sp_root_loc);
2566 /* return the pointed type of t */
2567 static inline CType *pointed_type(CType *type)
2569 return &type->ref->type;
2572 /* modify type so that its it is a pointer to type. */
2573 ST_FUNC void mk_pointer(CType *type)
2575 Sym *s;
2576 s = sym_push(SYM_FIELD, type, 0, -1);
2577 type->t = VT_PTR | (type->t & ~VT_TYPE);
2578 type->ref = s;
2581 /* compare function types. OLD functions match any new functions */
2582 static int is_compatible_func(CType *type1, CType *type2)
2584 Sym *s1, *s2;
2586 s1 = type1->ref;
2587 s2 = type2->ref;
2588 if (!is_compatible_types(&s1->type, &s2->type))
2589 return 0;
2590 /* check func_call */
2591 if (s1->a.func_call != s2->a.func_call)
2592 return 0;
2593 /* XXX: not complete */
2594 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2595 return 1;
2596 if (s1->c != s2->c)
2597 return 0;
2598 while (s1 != NULL) {
2599 if (s2 == NULL)
2600 return 0;
2601 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2602 return 0;
2603 s1 = s1->next;
2604 s2 = s2->next;
2606 if (s2)
2607 return 0;
2608 return 1;
2611 /* return true if type1 and type2 are the same. If unqualified is
2612 true, qualifiers on the types are ignored.
2614 - enums are not checked as gcc __builtin_types_compatible_p ()
2616 static int compare_types(CType *type1, CType *type2, int unqualified)
2618 int bt1, t1, t2;
2620 t1 = type1->t & VT_TYPE;
2621 t2 = type2->t & VT_TYPE;
2622 if (unqualified) {
2623 /* strip qualifiers before comparing */
2624 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2625 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2627 /* Default Vs explicit signedness only matters for char */
2628 if ((t1 & VT_BTYPE) != VT_BYTE) {
2629 t1 &= ~VT_DEFSIGN;
2630 t2 &= ~VT_DEFSIGN;
2632 /* An enum is compatible with (unsigned) int. Ideally we would
2633 store the enums signedness in type->ref.a.<some_bit> and
2634 only accept unsigned enums with unsigned int and vice versa.
2635 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2636 from pointer target types, so we can't add it here either. */
2637 if ((t1 & VT_BTYPE) == VT_ENUM) {
2638 t1 = VT_INT;
2639 if (type1->ref->a.unsigned_enum)
2640 t1 |= VT_UNSIGNED;
2642 if ((t2 & VT_BTYPE) == VT_ENUM) {
2643 t2 = VT_INT;
2644 if (type2->ref->a.unsigned_enum)
2645 t2 |= VT_UNSIGNED;
2647 /* XXX: bitfields ? */
2648 if (t1 != t2)
2649 return 0;
2650 /* test more complicated cases */
2651 bt1 = t1 & VT_BTYPE;
2652 if (bt1 == VT_PTR) {
2653 type1 = pointed_type(type1);
2654 type2 = pointed_type(type2);
2655 return is_compatible_types(type1, type2);
2656 } else if (bt1 == VT_STRUCT) {
2657 return (type1->ref == type2->ref);
2658 } else if (bt1 == VT_FUNC) {
2659 return is_compatible_func(type1, type2);
2660 } else {
2661 return 1;
2665 /* return true if type1 and type2 are exactly the same (including
2666 qualifiers).
2668 static int is_compatible_types(CType *type1, CType *type2)
2670 return compare_types(type1,type2,0);
2673 /* return true if type1 and type2 are the same (ignoring qualifiers).
2675 static int is_compatible_parameter_types(CType *type1, CType *type2)
2677 return compare_types(type1,type2,1);
2680 /* print a type. If 'varstr' is not NULL, then the variable is also
2681 printed in the type */
2682 /* XXX: union */
2683 /* XXX: add array and function pointers */
2684 static void type_to_str(char *buf, int buf_size,
2685 CType *type, const char *varstr)
2687 int bt, v, t;
2688 Sym *s, *sa;
2689 char buf1[256];
2690 const char *tstr;
2692 t = type->t & VT_TYPE;
2693 bt = t & VT_BTYPE;
2694 buf[0] = '\0';
2695 if (t & VT_CONSTANT)
2696 pstrcat(buf, buf_size, "const ");
2697 if (t & VT_VOLATILE)
2698 pstrcat(buf, buf_size, "volatile ");
2699 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2700 pstrcat(buf, buf_size, "unsigned ");
2701 else if (t & VT_DEFSIGN)
2702 pstrcat(buf, buf_size, "signed ");
2703 switch(bt) {
2704 case VT_VOID:
2705 tstr = "void";
2706 goto add_tstr;
2707 case VT_BOOL:
2708 tstr = "_Bool";
2709 goto add_tstr;
2710 case VT_BYTE:
2711 tstr = "char";
2712 goto add_tstr;
2713 case VT_SHORT:
2714 tstr = "short";
2715 goto add_tstr;
2716 case VT_INT:
2717 tstr = "int";
2718 goto add_tstr;
2719 case VT_LONG:
2720 tstr = "long";
2721 goto add_tstr;
2722 case VT_LLONG:
2723 tstr = "long long";
2724 goto add_tstr;
2725 case VT_FLOAT:
2726 tstr = "float";
2727 goto add_tstr;
2728 case VT_DOUBLE:
2729 tstr = "double";
2730 goto add_tstr;
2731 case VT_LDOUBLE:
2732 tstr = "long double";
2733 add_tstr:
2734 pstrcat(buf, buf_size, tstr);
2735 break;
2736 case VT_ENUM:
2737 case VT_STRUCT:
2738 if (bt == VT_STRUCT)
2739 tstr = "struct ";
2740 else
2741 tstr = "enum ";
2742 pstrcat(buf, buf_size, tstr);
2743 v = type->ref->v & ~SYM_STRUCT;
2744 if (v >= SYM_FIRST_ANOM)
2745 pstrcat(buf, buf_size, "<anonymous>");
2746 else
2747 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2748 break;
2749 case VT_FUNC:
2750 s = type->ref;
2751 type_to_str(buf, buf_size, &s->type, varstr);
2752 pstrcat(buf, buf_size, "(");
2753 sa = s->next;
2754 while (sa != NULL) {
2755 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2756 pstrcat(buf, buf_size, buf1);
2757 sa = sa->next;
2758 if (sa)
2759 pstrcat(buf, buf_size, ", ");
2761 pstrcat(buf, buf_size, ")");
2762 goto no_var;
2763 case VT_PTR:
2764 s = type->ref;
2765 if (t & VT_ARRAY) {
2766 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2767 type_to_str(buf, buf_size, &s->type, buf1);
2768 goto no_var;
2770 pstrcpy(buf1, sizeof(buf1), "*");
2771 if (t & VT_CONSTANT)
2772 pstrcat(buf1, buf_size, "const ");
2773 if (t & VT_VOLATILE)
2774 pstrcat(buf1, buf_size, "volatile ");
2775 if (varstr)
2776 pstrcat(buf1, sizeof(buf1), varstr);
2777 type_to_str(buf, buf_size, &s->type, buf1);
2778 goto no_var;
2780 if (varstr) {
2781 pstrcat(buf, buf_size, " ");
2782 pstrcat(buf, buf_size, varstr);
2784 no_var: ;
2787 /* verify type compatibility to store vtop in 'dt' type, and generate
2788 casts if needed. */
2789 static void gen_assign_cast(CType *dt)
2791 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2792 char buf1[256], buf2[256];
2793 int dbt, sbt;
2795 st = &vtop->type; /* source type */
2796 dbt = dt->t & VT_BTYPE;
2797 sbt = st->t & VT_BTYPE;
2798 if (sbt == VT_VOID || dbt == VT_VOID) {
2799 if (sbt == VT_VOID && dbt == VT_VOID)
2800 ; /*
2801 It is Ok if both are void
2802 A test program:
2803 void func1() {}
2804 void func2() {
2805 return func1();
2807 gcc accepts this program
2809 else
2810 tcc_error("cannot cast from/to void");
2812 if (dt->t & VT_CONSTANT)
2813 tcc_warning("assignment of read-only location");
2814 switch(dbt) {
2815 case VT_PTR:
2816 /* special cases for pointers */
2817 /* '0' can also be a pointer */
2818 if (is_null_pointer(vtop))
2819 goto type_ok;
2820 /* accept implicit pointer to integer cast with warning */
2821 if (is_integer_btype(sbt)) {
2822 tcc_warning("assignment makes pointer from integer without a cast");
2823 goto type_ok;
2825 type1 = pointed_type(dt);
2826 /* a function is implicitely a function pointer */
2827 if (sbt == VT_FUNC) {
2828 if ((type1->t & VT_BTYPE) != VT_VOID &&
2829 !is_compatible_types(pointed_type(dt), st))
2830 tcc_warning("assignment from incompatible pointer type");
2831 goto type_ok;
2833 if (sbt != VT_PTR)
2834 goto error;
2835 type2 = pointed_type(st);
2836 if ((type1->t & VT_BTYPE) == VT_VOID ||
2837 (type2->t & VT_BTYPE) == VT_VOID) {
2838 /* void * can match anything */
2839 } else {
2840 /* exact type match, except for qualifiers */
2841 tmp_type1 = *type1;
2842 tmp_type2 = *type2;
2843 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2844 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2846 /* Like GCC don't warn by default for merely changes
2847 in pointer target signedness. Do warn for different
2848 base types, though, in particular for unsigned enums
2849 and signed int targets. */
2850 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2851 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2852 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2854 else
2855 tcc_warning("assignment from incompatible pointer type");
2858 /* check const and volatile */
2859 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2860 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2861 tcc_warning("assignment discards qualifiers from pointer target type");
2862 break;
2863 case VT_BYTE:
2864 case VT_SHORT:
2865 case VT_INT:
2866 case VT_LLONG:
2867 if (sbt == VT_PTR || sbt == VT_FUNC) {
2868 tcc_warning("assignment makes integer from pointer without a cast");
2869 } else if (sbt == VT_STRUCT) {
2870 goto case_VT_STRUCT;
2872 /* XXX: more tests */
2873 break;
2874 case VT_STRUCT:
2875 case_VT_STRUCT:
2876 tmp_type1 = *dt;
2877 tmp_type2 = *st;
2878 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2879 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2880 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2881 error:
2882 type_to_str(buf1, sizeof(buf1), st, NULL);
2883 type_to_str(buf2, sizeof(buf2), dt, NULL);
2884 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2886 break;
2888 type_ok:
2889 gen_cast(dt);
2892 /* store vtop in lvalue pushed on stack */
2893 ST_FUNC void vstore(void)
2895 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2897 ft = vtop[-1].type.t;
2898 sbt = vtop->type.t & VT_BTYPE;
2899 dbt = ft & VT_BTYPE;
2900 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2901 (sbt == VT_INT && dbt == VT_SHORT))
2902 && !(vtop->type.t & VT_BITFIELD)) {
2903 /* optimize char/short casts */
2904 delayed_cast = VT_MUSTCAST;
2905 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2906 ((1 << VT_STRUCT_SHIFT) - 1));
2907 /* XXX: factorize */
2908 if (ft & VT_CONSTANT)
2909 tcc_warning("assignment of read-only location");
2910 } else {
2911 delayed_cast = 0;
2912 if (!(ft & VT_BITFIELD))
2913 gen_assign_cast(&vtop[-1].type);
2916 if (sbt == VT_STRUCT) {
2917 /* if structure, only generate pointer */
2918 /* structure assignment : generate memcpy */
2919 /* XXX: optimize if small size */
2920 size = type_size(&vtop->type, &align);
2922 /* destination */
2923 vswap();
2924 vtop->type.t = VT_PTR;
2925 gaddrof();
2927 /* address of memcpy() */
2928 #ifdef TCC_ARM_EABI
2929 if(!(align & 7))
2930 vpush_global_sym(&func_old_type, TOK_memcpy8);
2931 else if(!(align & 3))
2932 vpush_global_sym(&func_old_type, TOK_memcpy4);
2933 else
2934 #endif
2935 /* Use memmove, rather than memcpy, as dest and src may be same: */
2936 vpush_global_sym(&func_old_type, TOK_memmove);
2938 vswap();
2939 /* source */
2940 vpushv(vtop - 2);
2941 vtop->type.t = VT_PTR;
2942 gaddrof();
2943 /* type size */
2944 vpushi(size);
2945 gfunc_call(3);
2947 /* leave source on stack */
2948 } else if (ft & VT_BITFIELD) {
2949 /* bitfield store handling */
2951 /* save lvalue as expression result (example: s.b = s.a = n;) */
2952 vdup(), vtop[-1] = vtop[-2];
2954 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2955 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2956 /* remove bit field info to avoid loops */
2957 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2959 if((ft & VT_BTYPE) == VT_BOOL) {
2960 gen_cast(&vtop[-1].type);
2961 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2964 /* duplicate destination */
2965 vdup();
2966 vtop[-1] = vtop[-2];
2968 /* mask and shift source */
2969 if((ft & VT_BTYPE) != VT_BOOL) {
2970 if((ft & VT_BTYPE) == VT_LLONG) {
2971 vpushll((1ULL << bit_size) - 1ULL);
2972 } else {
2973 vpushi((1 << bit_size) - 1);
2975 gen_op('&');
2977 vpushi(bit_pos);
2978 gen_op(TOK_SHL);
2979 /* load destination, mask and or with source */
2980 vswap();
2981 if((ft & VT_BTYPE) == VT_LLONG) {
2982 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2983 } else {
2984 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2986 gen_op('&');
2987 gen_op('|');
2988 /* store result */
2989 vstore();
2990 /* ... and discard */
2991 vpop();
2993 } else {
2994 #ifdef CONFIG_TCC_BCHECK
2995 /* bound check case */
2996 if (vtop[-1].r & VT_MUSTBOUND) {
2997 vswap();
2998 gbound();
2999 vswap();
3001 #endif
3002 rc = RC_INT;
3003 if (is_float(ft)) {
3004 rc = RC_FLOAT;
3005 #ifdef TCC_TARGET_X86_64
3006 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3007 rc = RC_ST0;
3008 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3009 rc = RC_FRET;
3011 #endif
3013 r = gv(rc); /* generate value */
3014 /* if lvalue was saved on stack, must read it */
3015 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3016 SValue sv;
3017 t = get_reg(RC_INT);
3018 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3019 sv.type.t = VT_PTR;
3020 #else
3021 sv.type.t = VT_INT;
3022 #endif
3023 sv.r = VT_LOCAL | VT_LVAL;
3024 sv.c.i = vtop[-1].c.i;
3025 load(t, &sv);
3026 vtop[-1].r = t | VT_LVAL;
3028 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3030 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3031 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3032 #else
3033 if ((ft & VT_BTYPE) == VT_LLONG) {
3034 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3035 #endif
3036 vtop[-1].type.t = load_type;
3037 store(r, vtop - 1);
3038 vswap();
3039 /* convert to int to increment easily */
3040 vtop->type.t = addr_type;
3041 gaddrof();
3042 vpushi(load_size);
3043 gen_op('+');
3044 vtop->r |= VT_LVAL;
3045 vswap();
3046 vtop[-1].type.t = load_type;
3047 /* XXX: it works because r2 is spilled last ! */
3048 store(vtop->r2, vtop - 1);
3049 } else {
3050 store(r, vtop - 1);
3053 vswap();
3054 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3055 vtop->r |= delayed_cast;
3059 /* post defines POST/PRE add. c is the token ++ or -- */
3060 ST_FUNC void inc(int post, int c)
3062 test_lvalue();
3063 vdup(); /* save lvalue */
3064 if (post) {
3065 gv_dup(); /* duplicate value */
3066 vrotb(3);
3067 vrotb(3);
3069 /* add constant */
3070 vpushi(c - TOK_MID);
3071 gen_op('+');
3072 vstore(); /* store value */
3073 if (post)
3074 vpop(); /* if post op, return saved value */
3077 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3079 /* read the string */
3080 if (tok != TOK_STR)
3081 expect(msg);
3082 cstr_new(astr);
3083 while (tok == TOK_STR) {
3084 /* XXX: add \0 handling too ? */
3085 cstr_cat(astr, tokc.str.data, -1);
3086 next();
3088 cstr_ccat(astr, '\0');
3091 /* If I is >= 1 and a power of two, returns log2(i)+1.
3092 If I is 0 returns 0. */
3093 static int exact_log2p1(int i)
3095 int ret;
3096 if (!i)
3097 return 0;
3098 for (ret = 1; i >= 1 << 8; ret += 8)
3099 i >>= 8;
3100 if (i >= 1 << 4)
3101 ret += 4, i >>= 4;
3102 if (i >= 1 << 2)
3103 ret += 2, i >>= 2;
3104 if (i >= 1 << 1)
3105 ret++;
3106 return ret;
3109 /* Parse GNUC __attribute__ extension. Currently, the following
3110 extensions are recognized:
3111 - aligned(n) : set data/function alignment.
3112 - packed : force data alignment to 1
3113 - section(x) : generate data/code in this section.
3114 - unused : currently ignored, but may be used someday.
3115 - regparm(n) : pass function parameters in registers (i386 only)
3117 static void parse_attribute(AttributeDef *ad)
3119 int t, n;
3120 CString astr;
3122 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3123 next();
3124 skip('(');
3125 skip('(');
3126 while (tok != ')') {
3127 if (tok < TOK_IDENT)
3128 expect("attribute name");
3129 t = tok;
3130 next();
3131 switch(t) {
3132 case TOK_SECTION1:
3133 case TOK_SECTION2:
3134 skip('(');
3135 parse_mult_str(&astr, "section name");
3136 ad->section = find_section(tcc_state, (char *)astr.data);
3137 skip(')');
3138 cstr_free(&astr);
3139 break;
3140 case TOK_ALIAS1:
3141 case TOK_ALIAS2:
3142 skip('(');
3143 parse_mult_str(&astr, "alias(\"target\")");
3144 ad->alias_target = /* save string as token, for later */
3145 tok_alloc((char*)astr.data, astr.size-1)->tok;
3146 skip(')');
3147 cstr_free(&astr);
3148 break;
3149 case TOK_VISIBILITY1:
3150 case TOK_VISIBILITY2:
3151 skip('(');
3152 parse_mult_str(&astr,
3153 "visibility(\"default|hidden|internal|protected\")");
3154 if (!strcmp (astr.data, "default"))
3155 ad->a.visibility = STV_DEFAULT;
3156 else if (!strcmp (astr.data, "hidden"))
3157 ad->a.visibility = STV_HIDDEN;
3158 else if (!strcmp (astr.data, "internal"))
3159 ad->a.visibility = STV_INTERNAL;
3160 else if (!strcmp (astr.data, "protected"))
3161 ad->a.visibility = STV_PROTECTED;
3162 else
3163 expect("visibility(\"default|hidden|internal|protected\")");
3164 skip(')');
3165 cstr_free(&astr);
3166 break;
3167 case TOK_ALIGNED1:
3168 case TOK_ALIGNED2:
3169 if (tok == '(') {
3170 next();
3171 n = expr_const();
3172 if (n <= 0 || (n & (n - 1)) != 0)
3173 tcc_error("alignment must be a positive power of two");
3174 skip(')');
3175 } else {
3176 n = MAX_ALIGN;
3178 ad->a.aligned = exact_log2p1(n);
3179 if (n != 1 << (ad->a.aligned - 1))
3180 tcc_error("alignment of %d is larger than implemented", n);
3181 break;
3182 case TOK_PACKED1:
3183 case TOK_PACKED2:
3184 ad->a.packed = 1;
3185 break;
3186 case TOK_WEAK1:
3187 case TOK_WEAK2:
3188 ad->a.weak = 1;
3189 break;
3190 case TOK_UNUSED1:
3191 case TOK_UNUSED2:
3192 /* currently, no need to handle it because tcc does not
3193 track unused objects */
3194 break;
3195 case TOK_NORETURN1:
3196 case TOK_NORETURN2:
3197 /* currently, no need to handle it because tcc does not
3198 track unused objects */
3199 break;
3200 case TOK_CDECL1:
3201 case TOK_CDECL2:
3202 case TOK_CDECL3:
3203 ad->a.func_call = FUNC_CDECL;
3204 break;
3205 case TOK_STDCALL1:
3206 case TOK_STDCALL2:
3207 case TOK_STDCALL3:
3208 ad->a.func_call = FUNC_STDCALL;
3209 break;
3210 #ifdef TCC_TARGET_I386
3211 case TOK_REGPARM1:
3212 case TOK_REGPARM2:
3213 skip('(');
3214 n = expr_const();
3215 if (n > 3)
3216 n = 3;
3217 else if (n < 0)
3218 n = 0;
3219 if (n > 0)
3220 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3221 skip(')');
3222 break;
3223 case TOK_FASTCALL1:
3224 case TOK_FASTCALL2:
3225 case TOK_FASTCALL3:
3226 ad->a.func_call = FUNC_FASTCALLW;
3227 break;
3228 #endif
3229 case TOK_MODE:
3230 skip('(');
3231 switch(tok) {
3232 case TOK_MODE_DI:
3233 ad->a.mode = VT_LLONG + 1;
3234 break;
3235 case TOK_MODE_QI:
3236 ad->a.mode = VT_BYTE + 1;
3237 break;
3238 case TOK_MODE_HI:
3239 ad->a.mode = VT_SHORT + 1;
3240 break;
3241 case TOK_MODE_SI:
3242 case TOK_MODE_word:
3243 ad->a.mode = VT_INT + 1;
3244 break;
3245 default:
3246 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3247 break;
3249 next();
3250 skip(')');
3251 break;
3252 case TOK_DLLEXPORT:
3253 ad->a.func_export = 1;
3254 break;
3255 case TOK_DLLIMPORT:
3256 ad->a.func_import = 1;
3257 break;
3258 default:
3259 if (tcc_state->warn_unsupported)
3260 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3261 /* skip parameters */
3262 if (tok == '(') {
3263 int parenthesis = 0;
3264 do {
3265 if (tok == '(')
3266 parenthesis++;
3267 else if (tok == ')')
3268 parenthesis--;
3269 next();
3270 } while (parenthesis && tok != -1);
3272 break;
3274 if (tok != ',')
3275 break;
3276 next();
3278 skip(')');
3279 skip(')');
3283 static Sym * find_field (CType *type, int v)
3285 Sym *s = type->ref;
3286 v |= SYM_FIELD;
3287 while ((s = s->next) != NULL) {
3288 if ((s->v & SYM_FIELD) &&
3289 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3290 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3291 Sym *ret = find_field (&s->type, v);
3292 if (ret)
3293 return ret;
3295 if (s->v == v)
3296 break;
3298 return s;
3301 static void struct_add_offset (Sym *s, int offset)
3303 while ((s = s->next) != NULL) {
3304 if ((s->v & SYM_FIELD) &&
3305 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3306 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3307 struct_add_offset(s->type.ref, offset);
3308 } else
3309 s->c += offset;
3313 static void struct_layout(CType *type, AttributeDef *ad)
3315 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3316 int pcc = !tcc_state->ms_bitfields;
3317 Sym *f;
3318 if (ad->a.aligned)
3319 maxalign = 1 << (ad->a.aligned - 1);
3320 else
3321 maxalign = 1;
3322 offset = 0;
3323 c = 0;
3324 bit_pos = 0;
3325 prevbt = VT_STRUCT; /* make it never match */
3326 prev_bit_size = 0;
3327 for (f = type->ref->next; f; f = f->next) {
3328 int typealign, bit_size;
3329 int size = type_size(&f->type, &typealign);
3330 if (f->type.t & VT_BITFIELD)
3331 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3332 else
3333 bit_size = -1;
3334 if (bit_size == 0 && pcc) {
3335 /* Zero-width bit-fields in PCC mode aren't affected
3336 by any packing (attribute or pragma). */
3337 align = typealign;
3338 } else if (f->r > 1) {
3339 align = f->r;
3340 } else if (ad->a.packed || f->r == 1) {
3341 align = 1;
3342 /* Packed fields or packed records don't let the base type
3343 influence the records type alignment. */
3344 typealign = 1;
3345 } else {
3346 align = typealign;
3348 if (type->ref->type.t != TOK_STRUCT) {
3349 if (pcc && bit_size >= 0)
3350 size = (bit_size + 7) >> 3;
3351 /* Bit position is already zero from our caller. */
3352 offset = 0;
3353 if (size > c)
3354 c = size;
3355 } else if (bit_size < 0) {
3356 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3357 prevbt = VT_STRUCT;
3358 prev_bit_size = 0;
3359 c = (c + addbytes + align - 1) & -align;
3360 offset = c;
3361 if (size > 0)
3362 c += size;
3363 bit_pos = 0;
3364 } else {
3365 /* A bit-field. Layout is more complicated. There are two
3366 options TCC implements: PCC compatible and MS compatible
3367 (PCC compatible is what GCC uses for almost all targets).
3368 In PCC layout the overall size of the struct (in c) is
3369 _excluding_ the current run of bit-fields (that is,
3370 there's at least additional bit_pos bits after c). In
3371 MS layout c does include the current run of bit-fields.
3373 This matters for calculating the natural alignment buckets
3374 in PCC mode. */
3376 /* 'align' will be used to influence records alignment,
3377 so it's the max of specified and type alignment, except
3378 in certain cases that depend on the mode. */
3379 if (align < typealign)
3380 align = typealign;
3381 if (pcc) {
3382 /* In PCC layout a non-packed bit-field is placed adjacent
3383 to the preceding bit-fields, except if it would overflow
3384 its container (depending on base type) or it's a zero-width
3385 bit-field. Packed non-zero-width bit-fields always are
3386 placed adjacent. */
3387 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3388 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3389 if (bit_size == 0 ||
3390 ((typealign != 1 || size == 1) &&
3391 (ofs2 / (typealign * 8)) > (size/typealign))) {
3392 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3393 bit_pos = 0;
3395 offset = c;
3396 /* In PCC layout named bit-fields influence the alignment
3397 of the containing struct using the base types alignment,
3398 except for packed fields (which here have correct
3399 align/typealign). */
3400 if ((f->v & SYM_FIRST_ANOM))
3401 align = 1;
3402 } else {
3403 bt = f->type.t & VT_BTYPE;
3404 if ((bit_pos + bit_size > size * 8) ||
3405 (bit_size > 0) == (bt != prevbt)) {
3406 c = (c + typealign - 1) & -typealign;
3407 offset = c;
3408 bit_pos = 0;
3409 /* In MS bitfield mode a bit-field run always uses
3410 at least as many bits as the underlying type.
3411 To start a new run it's also required that this
3412 or the last bit-field had non-zero width. */
3413 if (bit_size || prev_bit_size)
3414 c += size;
3416 /* In MS layout the records alignment is normally
3417 influenced by the field, except for a zero-width
3418 field at the start of a run (but by further zero-width
3419 fields it is again). */
3420 if (bit_size == 0 && prevbt != bt)
3421 align = 1;
3422 prevbt = bt;
3423 prev_bit_size = bit_size;
3425 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3426 | (bit_pos << VT_STRUCT_SHIFT);
3427 bit_pos += bit_size;
3428 if (pcc && bit_pos >= size * 8) {
3429 c += size;
3430 bit_pos -= size * 8;
3433 if (align > maxalign)
3434 maxalign = align;
3435 #if 0
3436 printf("set field %s offset=%d c=%d",
3437 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3438 if (f->type.t & VT_BITFIELD) {
3439 printf(" pos=%d size=%d",
3440 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3441 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3443 printf("\n");
3444 #endif
3446 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3447 Sym *ass;
3448 /* An anonymous struct/union. Adjust member offsets
3449 to reflect the real offset of our containing struct.
3450 Also set the offset of this anon member inside
3451 the outer struct to be zero. Via this it
3452 works when accessing the field offset directly
3453 (from base object), as well as when recursing
3454 members in initializer handling. */
3455 int v2 = f->type.ref->v;
3456 if (!(v2 & SYM_FIELD) &&
3457 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3458 Sym **pps;
3459 /* This happens only with MS extensions. The
3460 anon member has a named struct type, so it
3461 potentially is shared with other references.
3462 We need to unshare members so we can modify
3463 them. */
3464 ass = f->type.ref;
3465 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3466 &f->type.ref->type, 0,
3467 f->type.ref->c);
3468 pps = &f->type.ref->next;
3469 while ((ass = ass->next) != NULL) {
3470 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3471 pps = &((*pps)->next);
3473 *pps = NULL;
3475 struct_add_offset(f->type.ref, offset);
3476 f->c = 0;
3477 } else {
3478 f->c = offset;
3481 f->r = 0;
3483 /* store size and alignment */
3484 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3485 + maxalign - 1) & -maxalign;
3486 type->ref->r = maxalign;
3489 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3490 static void struct_decl(CType *type, AttributeDef *ad, int u)
3492 int a, v, size, align, flexible, alignoverride;
3493 long c;
3494 int bit_size, bsize, bt;
3495 Sym *s, *ss, **ps;
3496 AttributeDef ad1;
3497 CType type1, btype;
3499 a = tok; /* save decl type */
3500 next();
3501 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3502 parse_attribute(ad);
3503 if (tok != '{') {
3504 v = tok;
3505 next();
3506 /* struct already defined ? return it */
3507 if (v < TOK_IDENT)
3508 expect("struct/union/enum name");
3509 s = struct_find(v);
3510 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3511 if (s->type.t != a)
3512 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3513 goto do_decl;
3515 } else {
3516 v = anon_sym++;
3518 /* Record the original enum/struct/union token. */
3519 type1.t = a;
3520 type1.ref = NULL;
3521 /* we put an undefined size for struct/union */
3522 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3523 s->r = 0; /* default alignment is zero as gcc */
3524 /* put struct/union/enum name in type */
3525 do_decl:
3526 type->t = u;
3527 type->ref = s;
3529 if (tok == '{') {
3530 next();
3531 if (s->c != -1)
3532 tcc_error("struct/union/enum already defined");
3533 /* cannot be empty */
3534 c = 0;
3535 /* non empty enums are not allowed */
3536 if (a == TOK_ENUM) {
3537 int seen_neg = 0;
3538 int seen_wide = 0;
3539 for(;;) {
3540 CType *t = &int_type;
3541 v = tok;
3542 if (v < TOK_UIDENT)
3543 expect("identifier");
3544 ss = sym_find(v);
3545 if (ss && !local_stack)
3546 tcc_error("redefinition of enumerator '%s'",
3547 get_tok_str(v, NULL));
3548 next();
3549 if (tok == '=') {
3550 next();
3551 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3552 c = expr_const64();
3553 #else
3554 /* We really want to support long long enums
3555 on i386 as well, but the Sym structure only
3556 holds a 'long' for associated constants,
3557 and enlarging it would bump its size (no
3558 available padding). So punt for now. */
3559 c = expr_const();
3560 #endif
3562 if (c < 0)
3563 seen_neg = 1;
3564 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3565 seen_wide = 1, t = &size_type;
3566 /* enum symbols have static storage */
3567 ss = sym_push(v, t, VT_CONST, c);
3568 ss->type.t |= VT_STATIC;
3569 if (tok != ',')
3570 break;
3571 next();
3572 c++;
3573 /* NOTE: we accept a trailing comma */
3574 if (tok == '}')
3575 break;
3577 if (!seen_neg)
3578 s->a.unsigned_enum = 1;
3579 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3580 skip('}');
3581 } else {
3582 ps = &s->next;
3583 flexible = 0;
3584 while (tok != '}') {
3585 if (!parse_btype(&btype, &ad1)) {
3586 skip(';');
3587 continue;
3589 while (1) {
3590 if (flexible)
3591 tcc_error("flexible array member '%s' not at the end of struct",
3592 get_tok_str(v, NULL));
3593 bit_size = -1;
3594 v = 0;
3595 type1 = btype;
3596 if (tok != ':') {
3597 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3598 if (v == 0) {
3599 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3600 expect("identifier");
3601 else {
3602 int v = btype.ref->v;
3603 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3604 if (tcc_state->ms_extensions == 0)
3605 expect("identifier");
3609 if (type_size(&type1, &align) < 0) {
3610 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3611 flexible = 1;
3612 else
3613 tcc_error("field '%s' has incomplete type",
3614 get_tok_str(v, NULL));
3616 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3617 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3618 tcc_error("invalid type for '%s'",
3619 get_tok_str(v, NULL));
3621 if (tok == ':') {
3622 next();
3623 bit_size = expr_const();
3624 /* XXX: handle v = 0 case for messages */
3625 if (bit_size < 0)
3626 tcc_error("negative width in bit-field '%s'",
3627 get_tok_str(v, NULL));
3628 if (v && bit_size == 0)
3629 tcc_error("zero width for bit-field '%s'",
3630 get_tok_str(v, NULL));
3631 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3632 parse_attribute(&ad1);
3634 size = type_size(&type1, &align);
3635 /* Only remember non-default alignment. */
3636 alignoverride = 0;
3637 if (ad1.a.aligned) {
3638 int speca = 1 << (ad1.a.aligned - 1);
3639 alignoverride = speca;
3640 } else if (ad1.a.packed || ad->a.packed) {
3641 alignoverride = 1;
3642 } else if (*tcc_state->pack_stack_ptr) {
3643 if (align > *tcc_state->pack_stack_ptr)
3644 alignoverride = *tcc_state->pack_stack_ptr;
3646 if (bit_size >= 0) {
3647 bt = type1.t & VT_BTYPE;
3648 if (bt != VT_INT &&
3649 bt != VT_BYTE &&
3650 bt != VT_SHORT &&
3651 bt != VT_BOOL &&
3652 bt != VT_ENUM &&
3653 bt != VT_LLONG)
3654 tcc_error("bitfields must have scalar type");
3655 bsize = size * 8;
3656 if (bit_size > bsize) {
3657 tcc_error("width of '%s' exceeds its type",
3658 get_tok_str(v, NULL));
3659 } else if (bit_size == bsize) {
3660 /* no need for bit fields */
3662 } else {
3663 type1.t |= VT_BITFIELD |
3664 (0 << VT_STRUCT_SHIFT) |
3665 (bit_size << (VT_STRUCT_SHIFT + 6));
3668 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3669 /* Remember we've seen a real field to check
3670 for placement of flexible array member. */
3671 c = 1;
3673 /* If member is a struct or bit-field, enforce
3674 placing into the struct (as anonymous). */
3675 if (v == 0 &&
3676 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3677 bit_size >= 0)) {
3678 v = anon_sym++;
3680 if (v) {
3681 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3682 *ps = ss;
3683 ps = &ss->next;
3685 if (tok == ';' || tok == TOK_EOF)
3686 break;
3687 skip(',');
3689 skip(';');
3691 skip('}');
3692 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3693 parse_attribute(ad);
3694 struct_layout(type, ad);
3699 /* return 1 if basic type is a type size (short, long, long long) */
3700 ST_FUNC int is_btype_size(int bt)
3702 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3705 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3706 are added to the element type, copied because it could be a typedef. */
3707 static void parse_btype_qualify(CType *type, int qualifiers)
3709 while (type->t & VT_ARRAY) {
3710 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3711 type = &type->ref->type;
3713 type->t |= qualifiers;
3716 /* return 0 if no type declaration. otherwise, return the basic type
3717 and skip it.
3719 static int parse_btype(CType *type, AttributeDef *ad)
3721 int t, u, bt_size, complete, type_found, typespec_found, g;
3722 Sym *s;
3723 CType type1;
3725 memset(ad, 0, sizeof(AttributeDef));
3726 complete = 0;
3727 type_found = 0;
3728 typespec_found = 0;
3729 t = 0;
3730 while(1) {
3731 switch(tok) {
3732 case TOK_EXTENSION:
3733 /* currently, we really ignore extension */
3734 next();
3735 continue;
3737 /* basic types */
3738 case TOK_CHAR:
3739 u = VT_BYTE;
3740 basic_type:
3741 next();
3742 basic_type1:
3743 if (complete)
3744 tcc_error("too many basic types");
3745 t |= u;
3746 bt_size = is_btype_size (u & VT_BTYPE);
3747 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3748 complete = 1;
3749 typespec_found = 1;
3750 break;
3751 case TOK_VOID:
3752 u = VT_VOID;
3753 goto basic_type;
3754 case TOK_SHORT:
3755 u = VT_SHORT;
3756 goto basic_type;
3757 case TOK_INT:
3758 u = VT_INT;
3759 goto basic_type;
3760 case TOK_LONG:
3761 next();
3762 if ((t & VT_BTYPE) == VT_DOUBLE) {
3763 #ifndef TCC_TARGET_PE
3764 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3765 #endif
3766 } else if ((t & VT_BTYPE) == VT_LONG) {
3767 t = (t & ~VT_BTYPE) | VT_LLONG;
3768 } else {
3769 u = VT_LONG;
3770 goto basic_type1;
3772 break;
3773 #ifdef TCC_TARGET_ARM64
3774 case TOK_UINT128:
3775 /* GCC's __uint128_t appears in some Linux header files. Make it a
3776 synonym for long double to get the size and alignment right. */
3777 u = VT_LDOUBLE;
3778 goto basic_type;
3779 #endif
3780 case TOK_BOOL:
3781 u = VT_BOOL;
3782 goto basic_type;
3783 case TOK_FLOAT:
3784 u = VT_FLOAT;
3785 goto basic_type;
3786 case TOK_DOUBLE:
3787 next();
3788 if ((t & VT_BTYPE) == VT_LONG) {
3789 #ifdef TCC_TARGET_PE
3790 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3791 #else
3792 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3793 #endif
3794 } else {
3795 u = VT_DOUBLE;
3796 goto basic_type1;
3798 break;
3799 case TOK_ENUM:
3800 struct_decl(&type1, ad, VT_ENUM);
3801 basic_type2:
3802 u = type1.t;
3803 type->ref = type1.ref;
3804 goto basic_type1;
3805 case TOK_STRUCT:
3806 case TOK_UNION:
3807 struct_decl(&type1, ad, VT_STRUCT);
3808 goto basic_type2;
3810 /* type modifiers */
3811 case TOK_CONST1:
3812 case TOK_CONST2:
3813 case TOK_CONST3:
3814 type->t = t;
3815 parse_btype_qualify(type, VT_CONSTANT);
3816 t = type->t;
3817 next();
3818 break;
3819 case TOK_VOLATILE1:
3820 case TOK_VOLATILE2:
3821 case TOK_VOLATILE3:
3822 type->t = t;
3823 parse_btype_qualify(type, VT_VOLATILE);
3824 t = type->t;
3825 next();
3826 break;
3827 case TOK_SIGNED1:
3828 case TOK_SIGNED2:
3829 case TOK_SIGNED3:
3830 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3831 tcc_error("signed and unsigned modifier");
3832 typespec_found = 1;
3833 t |= VT_DEFSIGN;
3834 next();
3835 break;
3836 case TOK_REGISTER:
3837 case TOK_AUTO:
3838 case TOK_RESTRICT1:
3839 case TOK_RESTRICT2:
3840 case TOK_RESTRICT3:
3841 next();
3842 break;
3843 case TOK_UNSIGNED:
3844 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3845 tcc_error("signed and unsigned modifier");
3846 t |= VT_DEFSIGN | VT_UNSIGNED;
3847 next();
3848 typespec_found = 1;
3849 break;
3851 /* storage */
3852 case TOK_EXTERN:
3853 g = VT_EXTERN;
3854 goto storage;
3855 case TOK_STATIC:
3856 g = VT_STATIC;
3857 goto storage;
3858 case TOK_TYPEDEF:
3859 g = VT_TYPEDEF;
3860 goto storage;
3861 storage:
3862 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3863 tcc_error("multiple storage classes");
3864 t |= g;
3865 next();
3866 break;
3867 case TOK_INLINE1:
3868 case TOK_INLINE2:
3869 case TOK_INLINE3:
3870 t |= VT_INLINE;
3871 next();
3872 break;
3874 /* GNUC attribute */
3875 case TOK_ATTRIBUTE1:
3876 case TOK_ATTRIBUTE2:
3877 parse_attribute(ad);
3878 if (ad->a.mode) {
3879 u = ad->a.mode -1;
3880 t = (t & ~VT_BTYPE) | u;
3882 break;
3883 /* GNUC typeof */
3884 case TOK_TYPEOF1:
3885 case TOK_TYPEOF2:
3886 case TOK_TYPEOF3:
3887 next();
3888 parse_expr_type(&type1);
3889 /* remove all storage modifiers except typedef */
3890 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3891 goto basic_type2;
3892 default:
3893 if (typespec_found)
3894 goto the_end;
3895 s = sym_find(tok);
3896 if (!s || !(s->type.t & VT_TYPEDEF))
3897 goto the_end;
3899 type->t = ((s->type.t & ~VT_TYPEDEF) |
3900 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3901 type->ref = s->type.ref;
3902 if (t & (VT_CONSTANT | VT_VOLATILE))
3903 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3904 t = type->t;
3906 if (s->r) {
3907 /* get attributes from typedef */
3908 if (0 == ad->a.aligned)
3909 ad->a.aligned = s->a.aligned;
3910 if (0 == ad->a.func_call)
3911 ad->a.func_call = s->a.func_call;
3912 ad->a.packed |= s->a.packed;
3914 next();
3915 typespec_found = 1;
3916 break;
3918 type_found = 1;
3920 the_end:
3921 if (tcc_state->char_is_unsigned) {
3922 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3923 t |= VT_UNSIGNED;
3926 /* long is never used as type */
3927 if ((t & VT_BTYPE) == VT_LONG)
3928 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3929 defined TCC_TARGET_PE
3930 t = (t & ~VT_BTYPE) | VT_INT;
3931 #else
3932 t = (t & ~VT_BTYPE) | VT_LLONG;
3933 #endif
3934 type->t = t;
3935 return type_found;
3938 /* convert a function parameter type (array to pointer and function to
3939 function pointer) */
3940 static inline void convert_parameter_type(CType *pt)
3942 /* remove const and volatile qualifiers (XXX: const could be used
3943 to indicate a const function parameter */
3944 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3945 /* array must be transformed to pointer according to ANSI C */
3946 pt->t &= ~VT_ARRAY;
3947 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3948 mk_pointer(pt);
3952 ST_FUNC void parse_asm_str(CString *astr)
3954 skip('(');
3955 parse_mult_str(astr, "string constant");
3958 /* Parse an asm label and return the token */
3959 static int asm_label_instr(void)
3961 int v;
3962 CString astr;
3964 next();
3965 parse_asm_str(&astr);
3966 skip(')');
3967 #ifdef ASM_DEBUG
3968 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3969 #endif
3970 v = tok_alloc(astr.data, astr.size - 1)->tok;
3971 cstr_free(&astr);
3972 return v;
3975 static void post_type(CType *type, AttributeDef *ad, int storage)
3977 int n, l, t1, arg_size, align;
3978 Sym **plast, *s, *first;
3979 AttributeDef ad1;
3980 CType pt;
3982 if (tok == '(') {
3983 /* function declaration */
3984 next();
3985 l = 0;
3986 first = NULL;
3987 plast = &first;
3988 arg_size = 0;
3989 if (tok != ')') {
3990 for(;;) {
3991 /* read param name and compute offset */
3992 if (l != FUNC_OLD) {
3993 if (!parse_btype(&pt, &ad1)) {
3994 if (l) {
3995 tcc_error("invalid type");
3996 } else {
3997 l = FUNC_OLD;
3998 goto old_proto;
4001 l = FUNC_NEW;
4002 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4003 break;
4004 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4005 if ((pt.t & VT_BTYPE) == VT_VOID)
4006 tcc_error("parameter declared as void");
4007 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4008 } else {
4009 old_proto:
4010 n = tok;
4011 if (n < TOK_UIDENT)
4012 expect("identifier");
4013 pt.t = VT_INT;
4014 next();
4016 convert_parameter_type(&pt);
4017 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4018 *plast = s;
4019 plast = &s->next;
4020 if (tok == ')')
4021 break;
4022 skip(',');
4023 if (l == FUNC_NEW && tok == TOK_DOTS) {
4024 l = FUNC_ELLIPSIS;
4025 next();
4026 break;
4030 /* if no parameters, then old type prototype */
4031 if (l == 0)
4032 l = FUNC_OLD;
4033 skip(')');
4034 /* NOTE: const is ignored in returned type as it has a special
4035 meaning in gcc / C++ */
4036 type->t &= ~VT_CONSTANT;
4037 /* some ancient pre-K&R C allows a function to return an array
4038 and the array brackets to be put after the arguments, such
4039 that "int c()[]" means something like "int[] c()" */
4040 if (tok == '[') {
4041 next();
4042 skip(']'); /* only handle simple "[]" */
4043 type->t |= VT_PTR;
4045 /* we push a anonymous symbol which will contain the function prototype */
4046 ad->a.func_args = arg_size;
4047 s = sym_push(SYM_FIELD, type, 0, l);
4048 s->a = ad->a;
4049 s->next = first;
4050 type->t = VT_FUNC;
4051 type->ref = s;
4052 } else if (tok == '[') {
4053 int saved_nocode_wanted = nocode_wanted;
4054 /* array definition */
4055 next();
4056 if (tok == TOK_RESTRICT1)
4057 next();
4058 n = -1;
4059 t1 = 0;
4060 if (tok != ']') {
4061 if (!local_stack || (storage & VT_STATIC))
4062 vpushi(expr_const());
4063 else {
4064 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4065 length must always be evaluated, even under nocode_wanted,
4066 so that its size slot is initialized (e.g. under sizeof
4067 or typeof). */
4068 nocode_wanted = 0;
4069 gexpr();
4071 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4072 n = vtop->c.i;
4073 if (n < 0)
4074 tcc_error("invalid array size");
4075 } else {
4076 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4077 tcc_error("size of variable length array should be an integer");
4078 t1 = VT_VLA;
4081 skip(']');
4082 /* parse next post type */
4083 post_type(type, ad, storage);
4084 if (type->t == VT_FUNC)
4085 tcc_error("declaration of an array of functions");
4086 t1 |= type->t & VT_VLA;
4088 if (t1 & VT_VLA) {
4089 loc -= type_size(&int_type, &align);
4090 loc &= -align;
4091 n = loc;
4093 vla_runtime_type_size(type, &align);
4094 gen_op('*');
4095 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4096 vswap();
4097 vstore();
4099 if (n != -1)
4100 vpop();
4101 nocode_wanted = saved_nocode_wanted;
4103 /* we push an anonymous symbol which will contain the array
4104 element type */
4105 s = sym_push(SYM_FIELD, type, 0, n);
4106 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4107 type->ref = s;
4111 /* Parse a type declaration (except basic type), and return the type
4112 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4113 expected. 'type' should contain the basic type. 'ad' is the
4114 attribute definition of the basic type. It can be modified by
4115 type_decl().
4117 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4119 Sym *s;
4120 CType type1, *type2;
4121 int qualifiers, storage;
4123 while (tok == '*') {
4124 qualifiers = 0;
4125 redo:
4126 next();
4127 switch(tok) {
4128 case TOK_CONST1:
4129 case TOK_CONST2:
4130 case TOK_CONST3:
4131 qualifiers |= VT_CONSTANT;
4132 goto redo;
4133 case TOK_VOLATILE1:
4134 case TOK_VOLATILE2:
4135 case TOK_VOLATILE3:
4136 qualifiers |= VT_VOLATILE;
4137 goto redo;
4138 case TOK_RESTRICT1:
4139 case TOK_RESTRICT2:
4140 case TOK_RESTRICT3:
4141 goto redo;
4142 /* XXX: clarify attribute handling */
4143 case TOK_ATTRIBUTE1:
4144 case TOK_ATTRIBUTE2:
4145 parse_attribute(ad);
4146 break;
4148 mk_pointer(type);
4149 type->t |= qualifiers;
4152 /* recursive type */
4153 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4154 type1.t = 0; /* XXX: same as int */
4155 if (tok == '(') {
4156 next();
4157 /* XXX: this is not correct to modify 'ad' at this point, but
4158 the syntax is not clear */
4159 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4160 parse_attribute(ad);
4161 type_decl(&type1, ad, v, td);
4162 skip(')');
4163 } else {
4164 /* type identifier */
4165 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4166 *v = tok;
4167 next();
4168 } else {
4169 if (!(td & TYPE_ABSTRACT))
4170 expect("identifier");
4171 *v = 0;
4174 storage = type->t & VT_STORAGE;
4175 type->t &= ~VT_STORAGE;
4176 post_type(type, ad, storage);
4177 type->t |= storage;
4178 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4179 parse_attribute(ad);
4181 if (!type1.t)
4182 return;
4183 /* append type at the end of type1 */
4184 type2 = &type1;
4185 for(;;) {
4186 s = type2->ref;
4187 type2 = &s->type;
4188 if (!type2->t) {
4189 *type2 = *type;
4190 break;
4193 *type = type1;
4194 type->t |= storage;
4197 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4198 ST_FUNC int lvalue_type(int t)
4200 int bt, r;
4201 r = VT_LVAL;
4202 bt = t & VT_BTYPE;
4203 if (bt == VT_BYTE || bt == VT_BOOL)
4204 r |= VT_LVAL_BYTE;
4205 else if (bt == VT_SHORT)
4206 r |= VT_LVAL_SHORT;
4207 else
4208 return r;
4209 if (t & VT_UNSIGNED)
4210 r |= VT_LVAL_UNSIGNED;
4211 return r;
4214 /* indirection with full error checking and bound check */
4215 ST_FUNC void indir(void)
4217 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4218 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4219 return;
4220 expect("pointer");
4222 if (vtop->r & VT_LVAL)
4223 gv(RC_INT);
4224 vtop->type = *pointed_type(&vtop->type);
4225 /* Arrays and functions are never lvalues */
4226 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4227 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4228 vtop->r |= lvalue_type(vtop->type.t);
4229 /* if bound checking, the referenced pointer must be checked */
4230 #ifdef CONFIG_TCC_BCHECK
4231 if (tcc_state->do_bounds_check)
4232 vtop->r |= VT_MUSTBOUND;
4233 #endif
4237 /* pass a parameter to a function and do type checking and casting */
4238 static void gfunc_param_typed(Sym *func, Sym *arg)
4240 int func_type;
4241 CType type;
4243 func_type = func->c;
4244 if (func_type == FUNC_OLD ||
4245 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4246 /* default casting : only need to convert float to double */
4247 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4248 type.t = VT_DOUBLE;
4249 gen_cast(&type);
4250 } else if (vtop->type.t & VT_BITFIELD) {
4251 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4252 type.ref = vtop->type.ref;
4253 gen_cast(&type);
4255 } else if (arg == NULL) {
4256 tcc_error("too many arguments to function");
4257 } else {
4258 type = arg->type;
4259 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4260 gen_assign_cast(&type);
4264 /* parse an expression of the form '(type)' or '(expr)' and return its
4265 type */
4266 static void parse_expr_type(CType *type)
4268 int n;
4269 AttributeDef ad;
4271 skip('(');
4272 if (parse_btype(type, &ad)) {
4273 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4274 } else {
4275 expr_type(type);
4277 skip(')');
4280 static void parse_type(CType *type)
4282 AttributeDef ad;
4283 int n;
4285 if (!parse_btype(type, &ad)) {
4286 expect("type");
4288 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4291 static void vpush_tokc(int t)
4293 CType type;
4294 type.t = t;
4295 type.ref = 0;
4296 vsetc(&type, VT_CONST, &tokc);
4299 ST_FUNC void unary(void)
4301 int n, t, align, size, r, sizeof_caller;
4302 CType type;
4303 Sym *s;
4304 AttributeDef ad;
4306 sizeof_caller = in_sizeof;
4307 in_sizeof = 0;
4308 /* XXX: GCC 2.95.3 does not generate a table although it should be
4309 better here */
4310 tok_next:
4311 switch(tok) {
4312 case TOK_EXTENSION:
4313 next();
4314 goto tok_next;
4315 case TOK_CINT:
4316 case TOK_CCHAR:
4317 case TOK_LCHAR:
4318 vpushi(tokc.i);
4319 next();
4320 break;
4321 case TOK_CUINT:
4322 vpush_tokc(VT_INT | VT_UNSIGNED);
4323 next();
4324 break;
4325 case TOK_CLLONG:
4326 vpush_tokc(VT_LLONG);
4327 next();
4328 break;
4329 case TOK_CULLONG:
4330 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4331 next();
4332 break;
4333 case TOK_CFLOAT:
4334 vpush_tokc(VT_FLOAT);
4335 next();
4336 break;
4337 case TOK_CDOUBLE:
4338 vpush_tokc(VT_DOUBLE);
4339 next();
4340 break;
4341 case TOK_CLDOUBLE:
4342 vpush_tokc(VT_LDOUBLE);
4343 next();
4344 break;
4345 case TOK___FUNCTION__:
4346 if (!gnu_ext)
4347 goto tok_identifier;
4348 /* fall thru */
4349 case TOK___FUNC__:
4351 void *ptr;
4352 int len;
4353 /* special function name identifier */
4354 len = strlen(funcname) + 1;
4355 /* generate char[len] type */
4356 type.t = VT_BYTE;
4357 mk_pointer(&type);
4358 type.t |= VT_ARRAY;
4359 type.ref->c = len;
4360 vpush_ref(&type, data_section, data_section->data_offset, len);
4361 ptr = section_ptr_add(data_section, len);
4362 memcpy(ptr, funcname, len);
4363 next();
4365 break;
4366 case TOK_LSTR:
4367 #ifdef TCC_TARGET_PE
4368 t = VT_SHORT | VT_UNSIGNED;
4369 #else
4370 t = VT_INT;
4371 #endif
4372 goto str_init;
4373 case TOK_STR:
4374 /* string parsing */
4375 t = VT_BYTE;
4376 str_init:
4377 if (tcc_state->warn_write_strings)
4378 t |= VT_CONSTANT;
4379 type.t = t;
4380 mk_pointer(&type);
4381 type.t |= VT_ARRAY;
4382 memset(&ad, 0, sizeof(AttributeDef));
4383 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4384 break;
4385 case '(':
4386 next();
4387 /* cast ? */
4388 if (parse_btype(&type, &ad)) {
4389 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4390 skip(')');
4391 /* check ISOC99 compound literal */
4392 if (tok == '{') {
4393 /* data is allocated locally by default */
4394 if (global_expr)
4395 r = VT_CONST;
4396 else
4397 r = VT_LOCAL;
4398 /* all except arrays are lvalues */
4399 if (!(type.t & VT_ARRAY))
4400 r |= lvalue_type(type.t);
4401 memset(&ad, 0, sizeof(AttributeDef));
4402 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4403 } else {
4404 if (sizeof_caller) {
4405 vpush(&type);
4406 return;
4408 unary();
4409 gen_cast(&type);
4411 } else if (tok == '{') {
4412 int saved_nocode_wanted = nocode_wanted;
4413 if (const_wanted)
4414 tcc_error("expected constant");
4415 /* save all registers */
4416 save_regs(0);
4417 /* statement expression : we do not accept break/continue
4418 inside as GCC does. We do retain the nocode_wanted state,
4419 as statement expressions can't ever be entered from the
4420 outside, so any reactivation of code emission (from labels
4421 or loop heads) can be disabled again after the end of it. */
4422 block(NULL, NULL, 1);
4423 nocode_wanted = saved_nocode_wanted;
4424 skip(')');
4425 } else {
4426 gexpr();
4427 skip(')');
4429 break;
4430 case '*':
4431 next();
4432 unary();
4433 indir();
4434 break;
4435 case '&':
4436 next();
4437 unary();
4438 /* functions names must be treated as function pointers,
4439 except for unary '&' and sizeof. Since we consider that
4440 functions are not lvalues, we only have to handle it
4441 there and in function calls. */
4442 /* arrays can also be used although they are not lvalues */
4443 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4444 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4445 test_lvalue();
4446 mk_pointer(&vtop->type);
4447 gaddrof();
4448 break;
4449 case '!':
4450 next();
4451 unary();
4452 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4453 CType boolean;
4454 boolean.t = VT_BOOL;
4455 gen_cast(&boolean);
4456 vtop->c.i = !vtop->c.i;
4457 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4458 vtop->c.i ^= 1;
4459 else {
4460 save_regs(1);
4461 vseti(VT_JMP, gvtst(1, 0));
4463 break;
4464 case '~':
4465 next();
4466 unary();
4467 vpushi(-1);
4468 gen_op('^');
4469 break;
4470 case '+':
4471 next();
4472 unary();
4473 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4474 tcc_error("pointer not accepted for unary plus");
4475 /* In order to force cast, we add zero, except for floating point
4476 where we really need an noop (otherwise -0.0 will be transformed
4477 into +0.0). */
4478 if (!is_float(vtop->type.t)) {
4479 vpushi(0);
4480 gen_op('+');
4482 break;
4483 case TOK_SIZEOF:
4484 case TOK_ALIGNOF1:
4485 case TOK_ALIGNOF2:
4486 t = tok;
4487 next();
4488 in_sizeof++;
4489 unary_type(&type); // Perform a in_sizeof = 0;
4490 size = type_size(&type, &align);
4491 if (t == TOK_SIZEOF) {
4492 if (!(type.t & VT_VLA)) {
4493 if (size < 0)
4494 tcc_error("sizeof applied to an incomplete type");
4495 vpushs(size);
4496 } else {
4497 vla_runtime_type_size(&type, &align);
4499 } else {
4500 vpushs(align);
4502 vtop->type.t |= VT_UNSIGNED;
4503 break;
4505 case TOK_builtin_expect:
4507 /* __builtin_expect is a no-op for now */
4508 next();
4509 skip('(');
4510 expr_eq();
4511 skip(',');
4512 nocode_wanted++;
4513 expr_lor_const();
4514 vpop();
4515 nocode_wanted--;
4516 skip(')');
4518 break;
4519 case TOK_builtin_types_compatible_p:
4521 CType type1, type2;
4522 next();
4523 skip('(');
4524 parse_type(&type1);
4525 skip(',');
4526 parse_type(&type2);
4527 skip(')');
4528 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4529 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4530 vpushi(is_compatible_types(&type1, &type2));
4532 break;
4533 case TOK_builtin_choose_expr:
4535 int64_t c;
4536 next();
4537 skip('(');
4538 c = expr_const64();
4539 skip(',');
4540 if (!c) {
4541 nocode_wanted++;
4543 expr_eq();
4544 if (!c) {
4545 vpop();
4546 nocode_wanted--;
4548 skip(',');
4549 if (c) {
4550 nocode_wanted++;
4552 expr_eq();
4553 if (c) {
4554 vpop();
4555 nocode_wanted--;
4557 skip(')');
4559 break;
4560 case TOK_builtin_constant_p:
4562 int res;
4563 next();
4564 skip('(');
4565 nocode_wanted++;
4566 gexpr();
4567 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4568 vpop();
4569 nocode_wanted--;
4570 skip(')');
4571 vpushi(res);
4573 break;
4574 case TOK_builtin_frame_address:
4575 case TOK_builtin_return_address:
4577 int tok1 = tok;
4578 int level;
4579 CType type;
4580 next();
4581 skip('(');
4582 if (tok != TOK_CINT) {
4583 tcc_error("%s only takes positive integers",
4584 tok1 == TOK_builtin_return_address ?
4585 "__builtin_return_address" :
4586 "__builtin_frame_address");
4588 level = (uint32_t)tokc.i;
4589 next();
4590 skip(')');
4591 type.t = VT_VOID;
4592 mk_pointer(&type);
4593 vset(&type, VT_LOCAL, 0); /* local frame */
4594 while (level--) {
4595 mk_pointer(&vtop->type);
4596 indir(); /* -> parent frame */
4598 if (tok1 == TOK_builtin_return_address) {
4599 // assume return address is just above frame pointer on stack
4600 vpushi(PTR_SIZE);
4601 gen_op('+');
4602 mk_pointer(&vtop->type);
4603 indir();
4606 break;
4607 #ifdef TCC_TARGET_X86_64
4608 #ifdef TCC_TARGET_PE
4609 case TOK_builtin_va_start:
4611 next();
4612 skip('(');
4613 expr_eq();
4614 skip(',');
4615 expr_eq();
4616 skip(')');
4617 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4618 tcc_error("__builtin_va_start expects a local variable");
4619 vtop->r &= ~(VT_LVAL | VT_REF);
4620 vtop->type = char_pointer_type;
4621 vtop->c.i += 8;
4622 vstore();
4624 break;
4625 #else
4626 case TOK_builtin_va_arg_types:
4628 CType type;
4629 next();
4630 skip('(');
4631 parse_type(&type);
4632 skip(')');
4633 vpushi(classify_x86_64_va_arg(&type));
4635 break;
4636 #endif
4637 #endif
4639 #ifdef TCC_TARGET_ARM64
4640 case TOK___va_start: {
4641 next();
4642 skip('(');
4643 expr_eq();
4644 skip(',');
4645 expr_eq();
4646 skip(')');
4647 //xx check types
4648 gen_va_start();
4649 vpushi(0);
4650 vtop->type.t = VT_VOID;
4651 break;
4653 case TOK___va_arg: {
4654 CType type;
4655 next();
4656 skip('(');
4657 expr_eq();
4658 skip(',');
4659 parse_type(&type);
4660 skip(')');
4661 //xx check types
4662 gen_va_arg(&type);
4663 vtop->type = type;
4664 break;
4666 case TOK___arm64_clear_cache: {
4667 next();
4668 skip('(');
4669 expr_eq();
4670 skip(',');
4671 expr_eq();
4672 skip(')');
4673 gen_clear_cache();
4674 vpushi(0);
4675 vtop->type.t = VT_VOID;
4676 break;
4678 #endif
4679 /* pre operations */
4680 case TOK_INC:
4681 case TOK_DEC:
4682 t = tok;
4683 next();
4684 unary();
4685 inc(0, t);
4686 break;
4687 case '-':
4688 next();
4689 unary();
4690 t = vtop->type.t & VT_BTYPE;
4691 if (is_float(t)) {
4692 /* In IEEE negate(x) isn't subtract(0,x), but rather
4693 subtract(-0, x). */
4694 vpush(&vtop->type);
4695 if (t == VT_FLOAT)
4696 vtop->c.f = -1.0 * 0.0;
4697 else if (t == VT_DOUBLE)
4698 vtop->c.d = -1.0 * 0.0;
4699 else
4700 vtop->c.ld = -1.0 * 0.0;
4701 } else
4702 vpushi(0);
4703 vswap();
4704 gen_op('-');
4705 break;
4706 case TOK_LAND:
4707 if (!gnu_ext)
4708 goto tok_identifier;
4709 next();
4710 /* allow to take the address of a label */
4711 if (tok < TOK_UIDENT)
4712 expect("label identifier");
4713 s = label_find(tok);
4714 if (!s) {
4715 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4716 } else {
4717 if (s->r == LABEL_DECLARED)
4718 s->r = LABEL_FORWARD;
4720 if (!s->type.t) {
4721 s->type.t = VT_VOID;
4722 mk_pointer(&s->type);
4723 s->type.t |= VT_STATIC;
4725 vpushsym(&s->type, s);
4726 next();
4727 break;
4729 // special qnan , snan and infinity values
4730 case TOK___NAN__:
4731 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4732 next();
4733 break;
4734 case TOK___SNAN__:
4735 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4736 next();
4737 break;
4738 case TOK___INF__:
4739 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4740 next();
4741 break;
4743 default:
4744 tok_identifier:
4745 t = tok;
4746 next();
4747 if (t < TOK_UIDENT)
4748 expect("identifier");
4749 s = sym_find(t);
4750 if (!s) {
4751 const char *name = get_tok_str(t, NULL);
4752 if (tok != '(')
4753 tcc_error("'%s' undeclared", name);
4754 /* for simple function calls, we tolerate undeclared
4755 external reference to int() function */
4756 if (tcc_state->warn_implicit_function_declaration
4757 #ifdef TCC_TARGET_PE
4758 /* people must be warned about using undeclared WINAPI functions
4759 (which usually start with uppercase letter) */
4760 || (name[0] >= 'A' && name[0] <= 'Z')
4761 #endif
4763 tcc_warning("implicit declaration of function '%s'", name);
4764 s = external_global_sym(t, &func_old_type, 0);
4767 r = s->r;
4768 /* A symbol that has a register is a local register variable,
4769 which starts out as VT_LOCAL value. */
4770 if ((r & VT_VALMASK) < VT_CONST)
4771 r = (r & ~VT_VALMASK) | VT_LOCAL;
4773 vset(&s->type, r, s->c);
4774 /* Point to s as backpointer (even without r&VT_SYM).
4775 Will be used by at least the x86 inline asm parser for
4776 regvars. */
4777 vtop->sym = s;
4778 if (vtop->r & VT_SYM) {
4779 vtop->c.i = 0;
4781 break;
4784 /* post operations */
4785 while (1) {
4786 if (tok == TOK_INC || tok == TOK_DEC) {
4787 inc(1, tok);
4788 next();
4789 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4790 int qualifiers;
4791 /* field */
4792 if (tok == TOK_ARROW)
4793 indir();
4794 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4795 test_lvalue();
4796 gaddrof();
4797 /* expect pointer on structure */
4798 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4799 expect("struct or union");
4800 if (tok == TOK_CDOUBLE)
4801 expect("field name");
4802 next();
4803 if (tok == TOK_CINT || tok == TOK_CUINT)
4804 expect("field name");
4805 s = find_field(&vtop->type, tok);
4806 if (!s)
4807 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4808 /* add field offset to pointer */
4809 vtop->type = char_pointer_type; /* change type to 'char *' */
4810 vpushi(s->c);
4811 gen_op('+');
4812 /* change type to field type, and set to lvalue */
4813 vtop->type = s->type;
4814 vtop->type.t |= qualifiers;
4815 /* an array is never an lvalue */
4816 if (!(vtop->type.t & VT_ARRAY)) {
4817 vtop->r |= lvalue_type(vtop->type.t);
4818 #ifdef CONFIG_TCC_BCHECK
4819 /* if bound checking, the referenced pointer must be checked */
4820 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4821 vtop->r |= VT_MUSTBOUND;
4822 #endif
4824 next();
4825 } else if (tok == '[') {
4826 next();
4827 gexpr();
4828 gen_op('+');
4829 indir();
4830 skip(']');
4831 } else if (tok == '(') {
4832 SValue ret;
4833 Sym *sa;
4834 int nb_args, ret_nregs, ret_align, regsize, variadic;
4836 /* function call */
4837 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4838 /* pointer test (no array accepted) */
4839 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4840 vtop->type = *pointed_type(&vtop->type);
4841 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4842 goto error_func;
4843 } else {
4844 error_func:
4845 expect("function pointer");
4847 } else {
4848 vtop->r &= ~VT_LVAL; /* no lvalue */
4850 /* get return type */
4851 s = vtop->type.ref;
4852 next();
4853 sa = s->next; /* first parameter */
4854 nb_args = regsize = 0;
4855 ret.r2 = VT_CONST;
4856 /* compute first implicit argument if a structure is returned */
4857 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4858 variadic = (s->c == FUNC_ELLIPSIS);
4859 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4860 &ret_align, &regsize);
4861 if (!ret_nregs) {
4862 /* get some space for the returned structure */
4863 size = type_size(&s->type, &align);
4864 #ifdef TCC_TARGET_ARM64
4865 /* On arm64, a small struct is return in registers.
4866 It is much easier to write it to memory if we know
4867 that we are allowed to write some extra bytes, so
4868 round the allocated space up to a power of 2: */
4869 if (size < 16)
4870 while (size & (size - 1))
4871 size = (size | (size - 1)) + 1;
4872 #endif
4873 loc = (loc - size) & -align;
4874 ret.type = s->type;
4875 ret.r = VT_LOCAL | VT_LVAL;
4876 /* pass it as 'int' to avoid structure arg passing
4877 problems */
4878 vseti(VT_LOCAL, loc);
4879 ret.c = vtop->c;
4880 nb_args++;
4882 } else {
4883 ret_nregs = 1;
4884 ret.type = s->type;
4887 if (ret_nregs) {
4888 /* return in register */
4889 if (is_float(ret.type.t)) {
4890 ret.r = reg_fret(ret.type.t);
4891 #ifdef TCC_TARGET_X86_64
4892 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4893 ret.r2 = REG_QRET;
4894 #endif
4895 } else {
4896 #ifndef TCC_TARGET_ARM64
4897 #ifdef TCC_TARGET_X86_64
4898 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4899 #else
4900 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4901 #endif
4902 ret.r2 = REG_LRET;
4903 #endif
4904 ret.r = REG_IRET;
4906 ret.c.i = 0;
4908 if (tok != ')') {
4909 for(;;) {
4910 expr_eq();
4911 gfunc_param_typed(s, sa);
4912 nb_args++;
4913 if (sa)
4914 sa = sa->next;
4915 if (tok == ')')
4916 break;
4917 skip(',');
4920 if (sa)
4921 tcc_error("too few arguments to function");
4922 skip(')');
4923 gfunc_call(nb_args);
4925 /* return value */
4926 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4927 vsetc(&ret.type, r, &ret.c);
4928 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4931 /* handle packed struct return */
4932 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4933 int addr, offset;
4935 size = type_size(&s->type, &align);
4936 /* We're writing whole regs often, make sure there's enough
4937 space. Assume register size is power of 2. */
4938 if (regsize > align)
4939 align = regsize;
4940 loc = (loc - size) & -align;
4941 addr = loc;
4942 offset = 0;
4943 for (;;) {
4944 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4945 vswap();
4946 vstore();
4947 vtop--;
4948 if (--ret_nregs == 0)
4949 break;
4950 offset += regsize;
4952 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4954 } else {
4955 break;
4960 ST_FUNC void expr_prod(void)
4962 int t;
4964 unary();
4965 while (tok == '*' || tok == '/' || tok == '%') {
4966 t = tok;
4967 next();
4968 unary();
4969 gen_op(t);
4973 ST_FUNC void expr_sum(void)
4975 int t;
4977 expr_prod();
4978 while (tok == '+' || tok == '-') {
4979 t = tok;
4980 next();
4981 expr_prod();
4982 gen_op(t);
4986 static void expr_shift(void)
4988 int t;
4990 expr_sum();
4991 while (tok == TOK_SHL || tok == TOK_SAR) {
4992 t = tok;
4993 next();
4994 expr_sum();
4995 gen_op(t);
4999 static void expr_cmp(void)
5001 int t;
5003 expr_shift();
5004 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5005 tok == TOK_ULT || tok == TOK_UGE) {
5006 t = tok;
5007 next();
5008 expr_shift();
5009 gen_op(t);
5013 static void expr_cmpeq(void)
5015 int t;
5017 expr_cmp();
5018 while (tok == TOK_EQ || tok == TOK_NE) {
5019 t = tok;
5020 next();
5021 expr_cmp();
5022 gen_op(t);
5026 static void expr_and(void)
5028 expr_cmpeq();
5029 while (tok == '&') {
5030 next();
5031 expr_cmpeq();
5032 gen_op('&');
5036 static void expr_xor(void)
5038 expr_and();
5039 while (tok == '^') {
5040 next();
5041 expr_and();
5042 gen_op('^');
5046 static void expr_or(void)
5048 expr_xor();
5049 while (tok == '|') {
5050 next();
5051 expr_xor();
5052 gen_op('|');
5056 /* XXX: fix this mess */
5057 static void expr_land_const(void)
5059 expr_or();
5060 while (tok == TOK_LAND) {
5061 next();
5062 expr_or();
5063 gen_op(TOK_LAND);
5066 static void expr_lor_const(void)
5068 expr_land_const();
5069 while (tok == TOK_LOR) {
5070 next();
5071 expr_land_const();
5072 gen_op(TOK_LOR);
5076 static void expr_land(void)
5078 expr_or();
5079 if (tok == TOK_LAND) {
5080 int t = 0;
5081 for(;;) {
5082 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5083 CType ctb;
5084 ctb.t = VT_BOOL;
5085 gen_cast(&ctb);
5086 if (vtop->c.i) {
5087 vpop();
5088 } else {
5089 nocode_wanted++;
5090 while (tok == TOK_LAND) {
5091 next();
5092 expr_or();
5093 vpop();
5095 nocode_wanted--;
5096 if (t)
5097 gsym(t);
5098 gen_cast(&int_type);
5099 break;
5101 } else {
5102 if (!t)
5103 save_regs(1);
5104 t = gvtst(1, t);
5106 if (tok != TOK_LAND) {
5107 if (t)
5108 vseti(VT_JMPI, t);
5109 else
5110 vpushi(1);
5111 break;
5113 next();
5114 expr_or();
5119 static void expr_lor(void)
5121 expr_land();
5122 if (tok == TOK_LOR) {
5123 int t = 0;
5124 for(;;) {
5125 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5126 CType ctb;
5127 ctb.t = VT_BOOL;
5128 gen_cast(&ctb);
5129 if (!vtop->c.i) {
5130 vpop();
5131 } else {
5132 nocode_wanted++;
5133 while (tok == TOK_LOR) {
5134 next();
5135 expr_land();
5136 vpop();
5138 nocode_wanted--;
5139 if (t)
5140 gsym(t);
5141 gen_cast(&int_type);
5142 break;
5144 } else {
5145 if (!t)
5146 save_regs(1);
5147 t = gvtst(0, t);
5149 if (tok != TOK_LOR) {
5150 if (t)
5151 vseti(VT_JMP, t);
5152 else
5153 vpushi(0);
5154 break;
5156 next();
5157 expr_land();
5162 /* Assuming vtop is a value used in a conditional context
5163 (i.e. compared with zero) return 0 if it's false, 1 if
5164 true and -1 if it can't be statically determined. */
5165 static int condition_3way(void)
5167 int c = -1;
5168 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5169 (!(vtop->r & VT_SYM) ||
5170 !(vtop->sym->type.t & VT_WEAK))) {
5171 CType boolean;
5172 boolean.t = VT_BOOL;
5173 vdup();
5174 gen_cast(&boolean);
5175 c = vtop->c.i;
5176 vpop();
5178 return c;
5181 static void expr_cond(void)
5183 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5184 SValue sv;
5185 CType type, type1, type2;
5187 expr_lor();
5188 if (tok == '?') {
5189 next();
5190 c = condition_3way();
5191 g = (tok == ':' && gnu_ext);
5192 if (c < 0) {
5193 /* needed to avoid having different registers saved in
5194 each branch */
5195 if (is_float(vtop->type.t)) {
5196 rc = RC_FLOAT;
5197 #ifdef TCC_TARGET_X86_64
5198 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5199 rc = RC_ST0;
5201 #endif
5202 } else
5203 rc = RC_INT;
5204 gv(rc);
5205 save_regs(1);
5206 if (g)
5207 gv_dup();
5208 tt = gvtst(1, 0);
5210 } else {
5211 if (!g)
5212 vpop();
5213 tt = 0;
5216 if (1) {
5217 if (c == 0)
5218 nocode_wanted++;
5219 if (!g)
5220 gexpr();
5222 type1 = vtop->type;
5223 sv = *vtop; /* save value to handle it later */
5224 vtop--; /* no vpop so that FP stack is not flushed */
5225 skip(':');
5227 u = 0;
5228 if (c < 0)
5229 u = gjmp(0);
5230 gsym(tt);
5232 if (c == 0)
5233 nocode_wanted--;
5234 if (c == 1)
5235 nocode_wanted++;
5236 expr_cond();
5237 if (c == 1)
5238 nocode_wanted--;
5240 type2 = vtop->type;
5241 t1 = type1.t;
5242 bt1 = t1 & VT_BTYPE;
5243 t2 = type2.t;
5244 bt2 = t2 & VT_BTYPE;
5245 /* cast operands to correct type according to ISOC rules */
5246 if (is_float(bt1) || is_float(bt2)) {
5247 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5248 type.t = VT_LDOUBLE;
5250 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5251 type.t = VT_DOUBLE;
5252 } else {
5253 type.t = VT_FLOAT;
5255 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5256 /* cast to biggest op */
5257 type.t = VT_LLONG;
5258 /* convert to unsigned if it does not fit in a long long */
5259 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5260 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5261 type.t |= VT_UNSIGNED;
5262 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5263 /* If one is a null ptr constant the result type
5264 is the other. */
5265 if (is_null_pointer (vtop))
5266 type = type1;
5267 else if (is_null_pointer (&sv))
5268 type = type2;
5269 /* XXX: test pointer compatibility, C99 has more elaborate
5270 rules here. */
5271 else
5272 type = type1;
5273 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5274 /* XXX: test function pointer compatibility */
5275 type = bt1 == VT_FUNC ? type1 : type2;
5276 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5277 /* XXX: test structure compatibility */
5278 type = bt1 == VT_STRUCT ? type1 : type2;
5279 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5280 /* NOTE: as an extension, we accept void on only one side */
5281 type.t = VT_VOID;
5282 } else {
5283 /* integer operations */
5284 type.t = VT_INT;
5285 /* convert to unsigned if it does not fit in an integer */
5286 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5287 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5288 type.t |= VT_UNSIGNED;
5290 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5291 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5292 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5293 islv &= c < 0;
5295 /* now we convert second operand */
5296 if (c != 1) {
5297 gen_cast(&type);
5298 if (islv) {
5299 mk_pointer(&vtop->type);
5300 gaddrof();
5301 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5302 gaddrof();
5305 rc = RC_INT;
5306 if (is_float(type.t)) {
5307 rc = RC_FLOAT;
5308 #ifdef TCC_TARGET_X86_64
5309 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5310 rc = RC_ST0;
5312 #endif
5313 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5314 /* for long longs, we use fixed registers to avoid having
5315 to handle a complicated move */
5316 rc = RC_IRET;
5319 tt = r2 = 0;
5320 if (c < 0) {
5321 r2 = gv(rc);
5322 tt = gjmp(0);
5324 gsym(u);
5326 /* this is horrible, but we must also convert first
5327 operand */
5328 if (c != 0) {
5329 *vtop = sv;
5330 gen_cast(&type);
5331 if (islv) {
5332 mk_pointer(&vtop->type);
5333 gaddrof();
5334 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5335 gaddrof();
5338 if (c < 0) {
5339 r1 = gv(rc);
5340 move_reg(r2, r1, type.t);
5341 vtop->r = r2;
5342 gsym(tt);
5343 if (islv)
5344 indir();
5350 static void expr_eq(void)
5352 int t;
5354 expr_cond();
5355 if (tok == '=' ||
5356 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5357 tok == TOK_A_XOR || tok == TOK_A_OR ||
5358 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5359 test_lvalue();
5360 t = tok;
5361 next();
5362 if (t == '=') {
5363 expr_eq();
5364 } else {
5365 vdup();
5366 expr_eq();
5367 gen_op(t & 0x7f);
5369 vstore();
5373 ST_FUNC void gexpr(void)
5375 while (1) {
5376 expr_eq();
5377 if (tok != ',')
5378 break;
5379 vpop();
5380 next();
5384 /* parse an expression and return its type without any side effect. */
5385 static void expr_type(CType *type)
5388 nocode_wanted++;
5389 gexpr();
5390 *type = vtop->type;
5391 vpop();
5392 nocode_wanted--;
5395 /* parse a unary expression and return its type without any side
5396 effect. */
5397 static void unary_type(CType *type)
5399 nocode_wanted++;
5400 unary();
5401 *type = vtop->type;
5402 vpop();
5403 nocode_wanted--;
5406 /* parse a constant expression and return value in vtop. */
5407 static void expr_const1(void)
5409 const_wanted++;
5410 expr_cond();
5411 const_wanted--;
5414 /* parse an integer constant and return its value. */
5415 static inline int64_t expr_const64(void)
5417 int64_t c;
5418 expr_const1();
5419 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5420 expect("constant expression");
5421 c = vtop->c.i;
5422 vpop();
5423 return c;
5426 /* parse an integer constant and return its value.
5427 Complain if it doesn't fit 32bit (signed or unsigned). */
5428 ST_FUNC int expr_const(void)
5430 int c;
5431 int64_t wc = expr_const64();
5432 c = wc;
5433 if (c != wc && (unsigned)c != wc)
5434 tcc_error("constant exceeds 32 bit");
5435 return c;
5438 /* return the label token if current token is a label, otherwise
5439 return zero */
5440 static int is_label(void)
5442 int last_tok;
5444 /* fast test first */
5445 if (tok < TOK_UIDENT)
5446 return 0;
5447 /* no need to save tokc because tok is an identifier */
5448 last_tok = tok;
5449 next();
5450 if (tok == ':') {
5451 next();
5452 return last_tok;
5453 } else {
5454 unget_tok(last_tok);
5455 return 0;
5459 static void label_or_decl(int l)
5461 int last_tok;
5463 /* fast test first */
5464 if (tok >= TOK_UIDENT)
5466 /* no need to save tokc because tok is an identifier */
5467 last_tok = tok;
5468 next();
5469 if (tok == ':') {
5470 unget_tok(last_tok);
5471 return;
5473 unget_tok(last_tok);
5475 decl(l);
5478 #ifndef TCC_TARGET_ARM64
5479 static void gfunc_return(CType *func_type)
5481 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5482 CType type, ret_type;
5483 int ret_align, ret_nregs, regsize;
5484 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5485 &ret_align, &regsize);
5486 if (0 == ret_nregs) {
5487 /* if returning structure, must copy it to implicit
5488 first pointer arg location */
5489 type = *func_type;
5490 mk_pointer(&type);
5491 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5492 indir();
5493 vswap();
5494 /* copy structure value to pointer */
5495 vstore();
5496 } else {
5497 /* returning structure packed into registers */
5498 int r, size, addr, align;
5499 size = type_size(func_type,&align);
5500 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5501 (vtop->c.i & (ret_align-1)))
5502 && (align & (ret_align-1))) {
5503 loc = (loc - size) & -ret_align;
5504 addr = loc;
5505 type = *func_type;
5506 vset(&type, VT_LOCAL | VT_LVAL, addr);
5507 vswap();
5508 vstore();
5509 vpop();
5510 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5512 vtop->type = ret_type;
5513 if (is_float(ret_type.t))
5514 r = rc_fret(ret_type.t);
5515 else
5516 r = RC_IRET;
5518 if (ret_nregs == 1)
5519 gv(r);
5520 else {
5521 for (;;) {
5522 vdup();
5523 gv(r);
5524 vpop();
5525 if (--ret_nregs == 0)
5526 break;
5527 /* We assume that when a structure is returned in multiple
5528 registers, their classes are consecutive values of the
5529 suite s(n) = 2^n */
5530 r <<= 1;
5531 vtop->c.i += regsize;
5535 } else if (is_float(func_type->t)) {
5536 gv(rc_fret(func_type->t));
5537 } else {
5538 gv(RC_IRET);
5540 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5542 #endif
5544 static int case_cmp(const void *pa, const void *pb)
5546 int64_t a = (*(struct case_t**) pa)->v1;
5547 int64_t b = (*(struct case_t**) pb)->v1;
5548 return a < b ? -1 : a > b;
5551 static void gcase(struct case_t **base, int len, int *bsym)
5553 struct case_t *p;
5554 int e;
5555 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5556 gv(RC_INT);
5557 while (len > 4) {
5558 /* binary search */
5559 p = base[len/2];
5560 vdup();
5561 if (ll)
5562 vpushll(p->v2);
5563 else
5564 vpushi(p->v2);
5565 gen_op(TOK_LE);
5566 e = gtst(1, 0);
5567 vdup();
5568 if (ll)
5569 vpushll(p->v1);
5570 else
5571 vpushi(p->v1);
5572 gen_op(TOK_GE);
5573 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5574 /* x < v1 */
5575 gcase(base, len/2, bsym);
5576 if (cur_switch->def_sym)
5577 gjmp_addr(cur_switch->def_sym);
5578 else
5579 *bsym = gjmp(*bsym);
5580 /* x > v2 */
5581 gsym(e);
5582 e = len/2 + 1;
5583 base += e; len -= e;
5585 /* linear scan */
5586 while (len--) {
5587 p = *base++;
5588 vdup();
5589 if (ll)
5590 vpushll(p->v2);
5591 else
5592 vpushi(p->v2);
5593 if (p->v1 == p->v2) {
5594 gen_op(TOK_EQ);
5595 gtst_addr(0, p->sym);
5596 } else {
5597 gen_op(TOK_LE);
5598 e = gtst(1, 0);
5599 vdup();
5600 if (ll)
5601 vpushll(p->v1);
5602 else
5603 vpushi(p->v1);
5604 gen_op(TOK_GE);
5605 gtst_addr(0, p->sym);
5606 gsym(e);
5611 static void block(int *bsym, int *csym, int is_expr)
5613 int a, b, c, d, cond;
5614 Sym *s;
5616 /* generate line number info */
5617 if (tcc_state->do_debug)
5618 tcc_debug_line(tcc_state);
5620 if (is_expr) {
5621 /* default return value is (void) */
5622 vpushi(0);
5623 vtop->type.t = VT_VOID;
5626 if (tok == TOK_IF) {
5627 /* if test */
5628 int saved_nocode_wanted = nocode_wanted;
5629 next();
5630 skip('(');
5631 gexpr();
5632 skip(')');
5633 cond = condition_3way();
5634 if (cond == 1)
5635 a = 0, vpop();
5636 else
5637 a = gvtst(1, 0);
5638 if (cond == 0)
5639 nocode_wanted |= 0x20000000;
5640 block(bsym, csym, 0);
5641 if (cond != 1)
5642 nocode_wanted = saved_nocode_wanted;
5643 c = tok;
5644 if (c == TOK_ELSE) {
5645 next();
5646 d = gjmp(0);
5647 gsym(a);
5648 if (cond == 1)
5649 nocode_wanted |= 0x20000000;
5650 block(bsym, csym, 0);
5651 gsym(d); /* patch else jmp */
5652 if (cond != 0)
5653 nocode_wanted = saved_nocode_wanted;
5654 } else
5655 gsym(a);
5656 } else if (tok == TOK_WHILE) {
5657 int saved_nocode_wanted;
5658 nocode_wanted &= ~0x20000000;
5659 next();
5660 d = ind;
5661 vla_sp_restore();
5662 skip('(');
5663 gexpr();
5664 skip(')');
5665 a = gvtst(1, 0);
5666 b = 0;
5667 ++local_scope;
5668 saved_nocode_wanted = nocode_wanted;
5669 block(&a, &b, 0);
5670 nocode_wanted = saved_nocode_wanted;
5671 --local_scope;
5672 gjmp_addr(d);
5673 gsym(a);
5674 gsym_addr(b, d);
5675 } else if (tok == '{') {
5676 Sym *llabel;
5677 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5679 next();
5680 /* record local declaration stack position */
5681 s = local_stack;
5682 llabel = local_label_stack;
5683 ++local_scope;
5685 /* handle local labels declarations */
5686 if (tok == TOK_LABEL) {
5687 next();
5688 for(;;) {
5689 if (tok < TOK_UIDENT)
5690 expect("label identifier");
5691 label_push(&local_label_stack, tok, LABEL_DECLARED);
5692 next();
5693 if (tok == ',') {
5694 next();
5695 } else {
5696 skip(';');
5697 break;
5701 while (tok != '}') {
5702 label_or_decl(VT_LOCAL);
5703 if (tok != '}') {
5704 if (is_expr)
5705 vpop();
5706 block(bsym, csym, is_expr);
5709 /* pop locally defined labels */
5710 label_pop(&local_label_stack, llabel);
5711 /* pop locally defined symbols */
5712 --local_scope;
5713 /* In the is_expr case (a statement expression is finished here),
5714 vtop might refer to symbols on the local_stack. Either via the
5715 type or via vtop->sym. We can't pop those nor any that in turn
5716 might be referred to. To make it easier we don't roll back
5717 any symbols in that case; some upper level call to block() will
5718 do that. We do have to remove such symbols from the lookup
5719 tables, though. sym_pop will do that. */
5720 sym_pop(&local_stack, s, is_expr);
5722 /* Pop VLA frames and restore stack pointer if required */
5723 if (vlas_in_scope > saved_vlas_in_scope) {
5724 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5725 vla_sp_restore();
5727 vlas_in_scope = saved_vlas_in_scope;
5729 next();
5730 } else if (tok == TOK_RETURN) {
5731 next();
5732 if (tok != ';') {
5733 gexpr();
5734 gen_assign_cast(&func_vt);
5735 gfunc_return(&func_vt);
5737 skip(';');
5738 /* jump unless last stmt in top-level block */
5739 if (tok != '}' || local_scope != 1)
5740 rsym = gjmp(rsym);
5741 nocode_wanted |= 0x20000000;
5742 } else if (tok == TOK_BREAK) {
5743 /* compute jump */
5744 if (!bsym)
5745 tcc_error("cannot break");
5746 *bsym = gjmp(*bsym);
5747 next();
5748 skip(';');
5749 nocode_wanted |= 0x20000000;
5750 } else if (tok == TOK_CONTINUE) {
5751 /* compute jump */
5752 if (!csym)
5753 tcc_error("cannot continue");
5754 vla_sp_restore_root();
5755 *csym = gjmp(*csym);
5756 next();
5757 skip(';');
5758 } else if (tok == TOK_FOR) {
5759 int e;
5760 int saved_nocode_wanted;
5761 nocode_wanted &= ~0x20000000;
5762 next();
5763 skip('(');
5764 s = local_stack;
5765 ++local_scope;
5766 if (tok != ';') {
5767 /* c99 for-loop init decl? */
5768 if (!decl0(VT_LOCAL, 1)) {
5769 /* no, regular for-loop init expr */
5770 gexpr();
5771 vpop();
5774 skip(';');
5775 d = ind;
5776 c = ind;
5777 vla_sp_restore();
5778 a = 0;
5779 b = 0;
5780 if (tok != ';') {
5781 gexpr();
5782 a = gvtst(1, 0);
5784 skip(';');
5785 if (tok != ')') {
5786 e = gjmp(0);
5787 c = ind;
5788 vla_sp_restore();
5789 gexpr();
5790 vpop();
5791 gjmp_addr(d);
5792 gsym(e);
5794 skip(')');
5795 saved_nocode_wanted = nocode_wanted;
5796 block(&a, &b, 0);
5797 nocode_wanted = saved_nocode_wanted;
5798 gjmp_addr(c);
5799 gsym(a);
5800 gsym_addr(b, c);
5801 --local_scope;
5802 sym_pop(&local_stack, s, 0);
5804 } else
5805 if (tok == TOK_DO) {
5806 int saved_nocode_wanted;
5807 nocode_wanted &= ~0x20000000;
5808 next();
5809 a = 0;
5810 b = 0;
5811 d = ind;
5812 vla_sp_restore();
5813 saved_nocode_wanted = nocode_wanted;
5814 block(&a, &b, 0);
5815 skip(TOK_WHILE);
5816 skip('(');
5817 gsym(b);
5818 gexpr();
5819 c = gvtst(0, 0);
5820 gsym_addr(c, d);
5821 nocode_wanted = saved_nocode_wanted;
5822 skip(')');
5823 gsym(a);
5824 skip(';');
5825 } else
5826 if (tok == TOK_SWITCH) {
5827 struct switch_t *saved, sw;
5828 int saved_nocode_wanted = nocode_wanted;
5829 SValue switchval;
5830 next();
5831 skip('(');
5832 gexpr();
5833 skip(')');
5834 switchval = *vtop--;
5835 a = 0;
5836 b = gjmp(0); /* jump to first case */
5837 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5838 saved = cur_switch;
5839 cur_switch = &sw;
5840 block(&a, csym, 0);
5841 nocode_wanted = saved_nocode_wanted;
5842 a = gjmp(a); /* add implicit break */
5843 /* case lookup */
5844 gsym(b);
5845 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5846 for (b = 1; b < sw.n; b++)
5847 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5848 tcc_error("duplicate case value");
5849 /* Our switch table sorting is signed, so the compared
5850 value needs to be as well when it's 64bit. */
5851 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5852 switchval.type.t &= ~VT_UNSIGNED;
5853 vpushv(&switchval);
5854 gcase(sw.p, sw.n, &a);
5855 vpop();
5856 if (sw.def_sym)
5857 gjmp_addr(sw.def_sym);
5858 dynarray_reset(&sw.p, &sw.n);
5859 cur_switch = saved;
5860 /* break label */
5861 gsym(a);
5862 } else
5863 if (tok == TOK_CASE) {
5864 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5865 if (!cur_switch)
5866 expect("switch");
5867 nocode_wanted &= ~0x20000000;
5868 next();
5869 cr->v1 = cr->v2 = expr_const64();
5870 if (gnu_ext && tok == TOK_DOTS) {
5871 next();
5872 cr->v2 = expr_const64();
5873 if (cr->v2 < cr->v1)
5874 tcc_warning("empty case range");
5876 cr->sym = ind;
5877 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5878 skip(':');
5879 is_expr = 0;
5880 goto block_after_label;
5881 } else
5882 if (tok == TOK_DEFAULT) {
5883 next();
5884 skip(':');
5885 if (!cur_switch)
5886 expect("switch");
5887 if (cur_switch->def_sym)
5888 tcc_error("too many 'default'");
5889 cur_switch->def_sym = ind;
5890 is_expr = 0;
5891 goto block_after_label;
5892 } else
5893 if (tok == TOK_GOTO) {
5894 next();
5895 if (tok == '*' && gnu_ext) {
5896 /* computed goto */
5897 next();
5898 gexpr();
5899 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5900 expect("pointer");
5901 ggoto();
5902 } else if (tok >= TOK_UIDENT) {
5903 s = label_find(tok);
5904 /* put forward definition if needed */
5905 if (!s) {
5906 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5907 } else {
5908 if (s->r == LABEL_DECLARED)
5909 s->r = LABEL_FORWARD;
5911 vla_sp_restore_root();
5912 if (s->r & LABEL_FORWARD)
5913 s->jnext = gjmp(s->jnext);
5914 else
5915 gjmp_addr(s->jnext);
5916 next();
5917 } else {
5918 expect("label identifier");
5920 skip(';');
5921 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5922 asm_instr();
5923 } else {
5924 b = is_label();
5925 if (b) {
5926 /* label case */
5927 s = label_find(b);
5928 if (s) {
5929 if (s->r == LABEL_DEFINED)
5930 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5931 gsym(s->jnext);
5932 s->r = LABEL_DEFINED;
5933 } else {
5934 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5936 s->jnext = ind;
5937 vla_sp_restore();
5938 /* we accept this, but it is a mistake */
5939 block_after_label:
5940 nocode_wanted &= ~0x20000000;
5941 if (tok == '}') {
5942 tcc_warning("deprecated use of label at end of compound statement");
5943 } else {
5944 if (is_expr)
5945 vpop();
5946 block(bsym, csym, is_expr);
5948 } else {
5949 /* expression case */
5950 if (tok != ';') {
5951 if (is_expr) {
5952 vpop();
5953 gexpr();
5954 } else {
5955 gexpr();
5956 vpop();
5959 skip(';');
5964 #define EXPR_CONST 1
5965 #define EXPR_ANY 2
5967 static void parse_init_elem(int expr_type)
5969 int saved_global_expr;
5970 switch(expr_type) {
5971 case EXPR_CONST:
5972 /* compound literals must be allocated globally in this case */
5973 saved_global_expr = global_expr;
5974 global_expr = 1;
5975 expr_const1();
5976 global_expr = saved_global_expr;
5977 /* NOTE: symbols are accepted */
5978 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5979 #ifdef TCC_TARGET_PE
5980 || (vtop->type.t & VT_IMPORT)
5981 #endif
5983 tcc_error("initializer element is not constant");
5984 break;
5985 case EXPR_ANY:
5986 expr_eq();
5987 break;
5991 /* t is the array or struct type. c is the array or struct
5992 address. cur_field is the pointer to the current
5993 value, for arrays the 'c' member contains the current start
5994 index and the 'r' contains the end index (in case of range init).
5995 'size_only' is true if only size info is needed (only used
5996 in arrays) */
5997 static void decl_designator(CType *type, Section *sec, unsigned long c,
5998 Sym **cur_field, int size_only)
6000 Sym *s, *f;
6001 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6002 CType type1;
6004 notfirst = 0;
6005 elem_size = 0;
6006 nb_elems = 1;
6007 if (gnu_ext && (l = is_label()) != 0)
6008 goto struct_field;
6009 while (tok == '[' || tok == '.') {
6010 if (tok == '[') {
6011 if (!(type->t & VT_ARRAY))
6012 expect("array type");
6013 s = type->ref;
6014 next();
6015 index = expr_const();
6016 if (index < 0 || (s->c >= 0 && index >= s->c))
6017 tcc_error("invalid index");
6018 if (tok == TOK_DOTS && gnu_ext) {
6019 next();
6020 index_last = expr_const();
6021 if (index_last < 0 ||
6022 (s->c >= 0 && index_last >= s->c) ||
6023 index_last < index)
6024 tcc_error("invalid index");
6025 } else {
6026 index_last = index;
6028 skip(']');
6029 if (!notfirst) {
6030 (*cur_field)->c = index;
6031 (*cur_field)->r = index_last;
6033 type = pointed_type(type);
6034 elem_size = type_size(type, &align);
6035 c += index * elem_size;
6036 /* NOTE: we only support ranges for last designator */
6037 nb_elems = index_last - index + 1;
6038 if (nb_elems != 1) {
6039 notfirst = 1;
6040 break;
6042 } else {
6043 next();
6044 l = tok;
6045 next();
6046 struct_field:
6047 if ((type->t & VT_BTYPE) != VT_STRUCT)
6048 expect("struct/union type");
6049 f = find_field(type, l);
6050 if (!f)
6051 expect("field");
6052 if (!notfirst)
6053 *cur_field = f;
6054 /* XXX: fix this mess by using explicit storage field */
6055 type1 = f->type;
6056 type1.t |= (type->t & ~VT_TYPE);
6057 type = &type1;
6058 c += f->c;
6060 notfirst = 1;
6062 if (notfirst) {
6063 if (tok == '=') {
6064 next();
6065 } else {
6066 if (!gnu_ext)
6067 expect("=");
6069 } else {
6070 if (type->t & VT_ARRAY) {
6071 index = (*cur_field)->c;
6072 if (type->ref->c >= 0 && index >= type->ref->c)
6073 tcc_error("index too large");
6074 type = pointed_type(type);
6075 c += index * type_size(type, &align);
6076 } else {
6077 f = *cur_field;
6078 while (f && (f->v & SYM_FIRST_ANOM))
6079 *cur_field = f = f->next;
6080 if (!f)
6081 tcc_error("too many field init");
6082 /* XXX: fix this mess by using explicit storage field */
6083 type1 = f->type;
6084 type1.t |= (type->t & ~VT_TYPE);
6085 type = &type1;
6086 c += f->c;
6089 decl_initializer(type, sec, c, 0, size_only);
6091 /* XXX: make it more general */
6092 if (!size_only && nb_elems > 1) {
6093 unsigned long c_end;
6094 uint8_t *src, *dst;
6095 int i;
6097 if (!sec) {
6098 vset(type, VT_LOCAL|VT_LVAL, c);
6099 for (i = 1; i < nb_elems; i++) {
6100 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6101 vswap();
6102 vstore();
6104 vpop();
6105 } else {
6106 c_end = c + nb_elems * elem_size;
6107 if (c_end > sec->data_allocated)
6108 section_realloc(sec, c_end);
6109 src = sec->data + c;
6110 dst = src;
6111 for(i = 1; i < nb_elems; i++) {
6112 dst += elem_size;
6113 memcpy(dst, src, elem_size);
6119 /* store a value or an expression directly in global data or in local array */
6120 static void init_putv(CType *type, Section *sec, unsigned long c)
6122 int bt, bit_pos, bit_size;
6123 void *ptr;
6124 unsigned long long bit_mask;
6125 CType dtype;
6127 dtype = *type;
6128 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6130 if (sec) {
6131 int size, align;
6132 /* XXX: not portable */
6133 /* XXX: generate error if incorrect relocation */
6134 gen_assign_cast(&dtype);
6135 bt = type->t & VT_BTYPE;
6136 size = type_size(type, &align);
6137 if (c + size > sec->data_allocated) {
6138 section_realloc(sec, c + size);
6140 ptr = sec->data + c;
6141 /* XXX: make code faster ? */
6142 if (!(type->t & VT_BITFIELD)) {
6143 bit_pos = 0;
6144 bit_size = PTR_SIZE * 8;
6145 bit_mask = -1LL;
6146 } else {
6147 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6148 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6149 bit_mask = (1LL << bit_size) - 1;
6151 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6152 vtop->sym->v >= SYM_FIRST_ANOM &&
6153 /* XXX This rejects compount literals like
6154 '(void *){ptr}'. The problem is that '&sym' is
6155 represented the same way, which would be ruled out
6156 by the SYM_FIRST_ANOM check above, but also '"string"'
6157 in 'char *p = "string"' is represented the same
6158 with the type being VT_PTR and the symbol being an
6159 anonymous one. That is, there's no difference in vtop
6160 between '(void *){x}' and '&(void *){x}'. Ignore
6161 pointer typed entities here. Hopefully no real code
6162 will every use compound literals with scalar type. */
6163 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6164 /* These come from compound literals, memcpy stuff over. */
6165 Section *ssec;
6166 ElfW(Sym) *esym;
6167 ElfW_Rel *rel;
6168 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6169 ssec = tcc_state->sections[esym->st_shndx];
6170 memmove (ptr, ssec->data + esym->st_value, size);
6171 if (ssec->reloc) {
6172 /* We need to copy over all memory contents, and that
6173 includes relocations. Use the fact that relocs are
6174 created it order, so look from the end of relocs
6175 until we hit one before the copied region. */
6176 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6177 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6178 while (num_relocs--) {
6179 rel--;
6180 if (rel->r_offset >= esym->st_value + size)
6181 continue;
6182 if (rel->r_offset < esym->st_value)
6183 break;
6184 /* Note: if the same fields are initialized multiple
6185 times (possible with designators) then we possibly
6186 add multiple relocations for the same offset here.
6187 That would lead to wrong code, the last reloc needs
6188 to win. We clean this up later after the whole
6189 initializer is parsed. */
6190 put_elf_reloca(symtab_section, sec,
6191 c + rel->r_offset - esym->st_value,
6192 ELFW(R_TYPE)(rel->r_info),
6193 ELFW(R_SYM)(rel->r_info),
6194 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6195 rel->r_addend
6196 #else
6198 #endif
6202 } else {
6203 if ((vtop->r & VT_SYM) &&
6204 (bt == VT_BYTE ||
6205 bt == VT_SHORT ||
6206 bt == VT_DOUBLE ||
6207 bt == VT_LDOUBLE ||
6208 #if PTR_SIZE == 8
6209 (bt == VT_LLONG && bit_size != 64) ||
6210 bt == VT_INT
6211 #else
6212 bt == VT_LLONG ||
6213 (bt == VT_INT && bit_size != 32)
6214 #endif
6216 tcc_error("initializer element is not computable at load time");
6217 switch(bt) {
6218 /* XXX: when cross-compiling we assume that each type has the
6219 same representation on host and target, which is likely to
6220 be wrong in the case of long double */
6221 case VT_BOOL:
6222 vtop->c.i = (vtop->c.i != 0);
6223 case VT_BYTE:
6224 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6225 break;
6226 case VT_SHORT:
6227 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6228 break;
6229 case VT_DOUBLE:
6230 *(double *)ptr = vtop->c.d;
6231 break;
6232 case VT_LDOUBLE:
6233 if (sizeof(long double) == LDOUBLE_SIZE)
6234 *(long double *)ptr = vtop->c.ld;
6235 else if (sizeof(double) == LDOUBLE_SIZE)
6236 *(double *)ptr = vtop->c.ld;
6237 else
6238 tcc_error("can't cross compile long double constants");
6239 break;
6240 #if PTR_SIZE != 8
6241 case VT_LLONG:
6242 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6243 break;
6244 #else
6245 case VT_LLONG:
6246 #endif
6247 case VT_PTR:
6249 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6250 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6251 if (vtop->r & VT_SYM)
6252 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6253 else
6254 *(addr_t *)ptr |= val;
6255 #else
6256 if (vtop->r & VT_SYM)
6257 greloc(sec, vtop->sym, c, R_DATA_PTR);
6258 *(addr_t *)ptr |= val;
6259 #endif
6260 break;
6262 default:
6264 int val = (vtop->c.i & bit_mask) << bit_pos;
6265 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6266 if (vtop->r & VT_SYM)
6267 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6268 else
6269 *(int *)ptr |= val;
6270 #else
6271 if (vtop->r & VT_SYM)
6272 greloc(sec, vtop->sym, c, R_DATA_PTR);
6273 *(int *)ptr |= val;
6274 #endif
6275 break;
6279 vtop--;
6280 } else {
6281 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6282 vswap();
6283 vstore();
6284 vpop();
6288 /* put zeros for variable based init */
6289 static void init_putz(Section *sec, unsigned long c, int size)
6291 if (sec) {
6292 /* nothing to do because globals are already set to zero */
6293 } else {
6294 vpush_global_sym(&func_old_type, TOK_memset);
6295 vseti(VT_LOCAL, c);
6296 #ifdef TCC_TARGET_ARM
6297 vpushs(size);
6298 vpushi(0);
6299 #else
6300 vpushi(0);
6301 vpushs(size);
6302 #endif
6303 gfunc_call(3);
6307 /* 't' contains the type and storage info. 'c' is the offset of the
6308 object in section 'sec'. If 'sec' is NULL, it means stack based
6309 allocation. 'first' is true if array '{' must be read (multi
6310 dimension implicit array init handling). 'size_only' is true if
6311 size only evaluation is wanted (only for arrays). */
6312 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6313 int first, int size_only)
6315 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6316 int size1, align1;
6317 int have_elem;
6318 Sym *s, *f;
6319 Sym indexsym;
6320 CType *t1;
6322 /* If we currently are at an '}' or ',' we have read an initializer
6323 element in one of our callers, and not yet consumed it. */
6324 have_elem = tok == '}' || tok == ',';
6325 if (!have_elem && tok != '{' &&
6326 /* In case of strings we have special handling for arrays, so
6327 don't consume them as initializer value (which would commit them
6328 to some anonymous symbol). */
6329 tok != TOK_LSTR && tok != TOK_STR &&
6330 !size_only) {
6331 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6332 have_elem = 1;
6335 if (have_elem &&
6336 !(type->t & VT_ARRAY) &&
6337 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6338 The source type might have VT_CONSTANT set, which is
6339 of course assignable to non-const elements. */
6340 is_compatible_parameter_types(type, &vtop->type)) {
6341 init_putv(type, sec, c);
6342 } else if (type->t & VT_ARRAY) {
6343 s = type->ref;
6344 n = s->c;
6345 array_length = 0;
6346 t1 = pointed_type(type);
6347 size1 = type_size(t1, &align1);
6349 no_oblock = 1;
6350 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6351 tok == '{') {
6352 if (tok != '{')
6353 tcc_error("character array initializer must be a literal,"
6354 " optionally enclosed in braces");
6355 skip('{');
6356 no_oblock = 0;
6359 /* only parse strings here if correct type (otherwise: handle
6360 them as ((w)char *) expressions */
6361 if ((tok == TOK_LSTR &&
6362 #ifdef TCC_TARGET_PE
6363 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6364 #else
6365 (t1->t & VT_BTYPE) == VT_INT
6366 #endif
6367 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6368 while (tok == TOK_STR || tok == TOK_LSTR) {
6369 int cstr_len, ch;
6371 /* compute maximum number of chars wanted */
6372 if (tok == TOK_STR)
6373 cstr_len = tokc.str.size;
6374 else
6375 cstr_len = tokc.str.size / sizeof(nwchar_t);
6376 cstr_len--;
6377 nb = cstr_len;
6378 if (n >= 0 && nb > (n - array_length))
6379 nb = n - array_length;
6380 if (!size_only) {
6381 if (cstr_len > nb)
6382 tcc_warning("initializer-string for array is too long");
6383 /* in order to go faster for common case (char
6384 string in global variable, we handle it
6385 specifically */
6386 if (sec && tok == TOK_STR && size1 == 1) {
6387 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6388 } else {
6389 for(i=0;i<nb;i++) {
6390 if (tok == TOK_STR)
6391 ch = ((unsigned char *)tokc.str.data)[i];
6392 else
6393 ch = ((nwchar_t *)tokc.str.data)[i];
6394 vpushi(ch);
6395 init_putv(t1, sec, c + (array_length + i) * size1);
6399 array_length += nb;
6400 next();
6402 /* only add trailing zero if enough storage (no
6403 warning in this case since it is standard) */
6404 if (n < 0 || array_length < n) {
6405 if (!size_only) {
6406 vpushi(0);
6407 init_putv(t1, sec, c + (array_length * size1));
6409 array_length++;
6411 } else {
6412 indexsym.c = 0;
6413 indexsym.r = 0;
6414 f = &indexsym;
6416 do_init_list:
6417 while (tok != '}' || have_elem) {
6418 decl_designator(type, sec, c, &f, size_only);
6419 have_elem = 0;
6420 index = f->c;
6421 /* must put zero in holes (note that doing it that way
6422 ensures that it even works with designators) */
6423 if (!size_only && array_length < index) {
6424 init_putz(sec, c + array_length * size1,
6425 (index - array_length) * size1);
6427 if (type->t & VT_ARRAY) {
6428 index = indexsym.c = ++indexsym.r;
6429 } else {
6430 index = index + type_size(&f->type, &align1);
6431 if (s->type.t == TOK_UNION)
6432 f = NULL;
6433 else
6434 f = f->next;
6436 if (index > array_length)
6437 array_length = index;
6439 if (type->t & VT_ARRAY) {
6440 /* special test for multi dimensional arrays (may not
6441 be strictly correct if designators are used at the
6442 same time) */
6443 if (no_oblock && index >= n)
6444 break;
6445 } else {
6446 if (no_oblock && f == NULL)
6447 break;
6449 if (tok == '}')
6450 break;
6451 skip(',');
6454 /* put zeros at the end */
6455 if (!size_only && array_length < n) {
6456 init_putz(sec, c + array_length * size1,
6457 (n - array_length) * size1);
6459 if (!no_oblock)
6460 skip('}');
6461 /* patch type size if needed, which happens only for array types */
6462 if (n < 0)
6463 s->c = array_length;
6464 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6465 size1 = 1;
6466 no_oblock = 1;
6467 if (first || tok == '{') {
6468 skip('{');
6469 no_oblock = 0;
6471 s = type->ref;
6472 f = s->next;
6473 array_length = 0;
6474 n = s->c;
6475 goto do_init_list;
6476 } else if (tok == '{') {
6477 next();
6478 decl_initializer(type, sec, c, first, size_only);
6479 skip('}');
6480 } else if (size_only) {
6481 /* If we supported only ISO C we wouldn't have to accept calling
6482 this on anything than an array size_only==1 (and even then
6483 only on the outermost level, so no recursion would be needed),
6484 because initializing a flex array member isn't supported.
6485 But GNU C supports it, so we need to recurse even into
6486 subfields of structs and arrays when size_only is set. */
6487 /* just skip expression */
6488 parlevel = parlevel1 = 0;
6489 while ((parlevel > 0 || parlevel1 > 0 ||
6490 (tok != '}' && tok != ',')) && tok != -1) {
6491 if (tok == '(')
6492 parlevel++;
6493 else if (tok == ')') {
6494 if (parlevel == 0 && parlevel1 == 0)
6495 break;
6496 parlevel--;
6498 else if (tok == '{')
6499 parlevel1++;
6500 else if (tok == '}') {
6501 if (parlevel == 0 && parlevel1 == 0)
6502 break;
6503 parlevel1--;
6505 next();
6507 } else {
6508 if (!have_elem) {
6509 /* This should happen only when we haven't parsed
6510 the init element above for fear of committing a
6511 string constant to memory too early. */
6512 if (tok != TOK_STR && tok != TOK_LSTR)
6513 expect("string constant");
6514 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6516 init_putv(type, sec, c);
6520 /* parse an initializer for type 't' if 'has_init' is non zero, and
6521 allocate space in local or global data space ('r' is either
6522 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6523 variable 'v' of scope 'scope' is declared before initializers
6524 are parsed. If 'v' is zero, then a reference to the new object
6525 is put in the value stack. If 'has_init' is 2, a special parsing
6526 is done to handle string constants. */
6527 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6528 int has_init, int v, int scope)
6530 int size, align, addr, data_offset;
6531 int level;
6532 ParseState saved_parse_state = {0};
6533 TokenString *init_str = NULL;
6534 Section *sec;
6535 Sym *flexible_array;
6537 flexible_array = NULL;
6538 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6539 Sym *field = type->ref->next;
6540 if (field) {
6541 while (field->next)
6542 field = field->next;
6543 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6544 flexible_array = field;
6548 size = type_size(type, &align);
6549 /* If unknown size, we must evaluate it before
6550 evaluating initializers because
6551 initializers can generate global data too
6552 (e.g. string pointers or ISOC99 compound
6553 literals). It also simplifies local
6554 initializers handling */
6555 if (size < 0 || (flexible_array && has_init)) {
6556 if (!has_init)
6557 tcc_error("unknown type size");
6558 /* get all init string */
6559 init_str = tok_str_alloc();
6560 if (has_init == 2) {
6561 /* only get strings */
6562 while (tok == TOK_STR || tok == TOK_LSTR) {
6563 tok_str_add_tok(init_str);
6564 next();
6566 } else {
6567 level = 0;
6568 while (level > 0 || (tok != ',' && tok != ';')) {
6569 if (tok < 0)
6570 tcc_error("unexpected end of file in initializer");
6571 tok_str_add_tok(init_str);
6572 if (tok == '{')
6573 level++;
6574 else if (tok == '}') {
6575 level--;
6576 if (level <= 0) {
6577 next();
6578 break;
6581 next();
6584 tok_str_add(init_str, -1);
6585 tok_str_add(init_str, 0);
6587 /* compute size */
6588 save_parse_state(&saved_parse_state);
6590 begin_macro(init_str, 1);
6591 next();
6592 decl_initializer(type, NULL, 0, 1, 1);
6593 /* prepare second initializer parsing */
6594 macro_ptr = init_str->str;
6595 next();
6597 /* if still unknown size, error */
6598 size = type_size(type, &align);
6599 if (size < 0)
6600 tcc_error("unknown type size");
6602 /* If there's a flex member and it was used in the initializer
6603 adjust size. */
6604 if (flexible_array &&
6605 flexible_array->type.ref->c > 0)
6606 size += flexible_array->type.ref->c
6607 * pointed_size(&flexible_array->type);
6608 /* take into account specified alignment if bigger */
6609 if (ad->a.aligned) {
6610 int speca = 1 << (ad->a.aligned - 1);
6611 if (speca > align)
6612 align = speca;
6613 } else if (ad->a.packed) {
6614 align = 1;
6616 if ((r & VT_VALMASK) == VT_LOCAL) {
6617 sec = NULL;
6618 #ifdef CONFIG_TCC_BCHECK
6619 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6620 loc--;
6622 #endif
6623 loc = (loc - size) & -align;
6624 addr = loc;
6625 #ifdef CONFIG_TCC_BCHECK
6626 /* handles bounds */
6627 /* XXX: currently, since we do only one pass, we cannot track
6628 '&' operators, so we add only arrays */
6629 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6630 addr_t *bounds_ptr;
6631 /* add padding between regions */
6632 loc--;
6633 /* then add local bound info */
6634 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6635 bounds_ptr[0] = addr;
6636 bounds_ptr[1] = size;
6638 #endif
6639 if (v) {
6640 /* local variable */
6641 #ifdef CONFIG_TCC_ASM
6642 if (ad->asm_label) {
6643 int reg = asm_parse_regvar(ad->asm_label);
6644 if (reg >= 0)
6645 r = (r & ~VT_VALMASK) | reg;
6647 #endif
6648 sym_push(v, type, r, addr);
6649 } else {
6650 /* push local reference */
6651 vset(type, r, addr);
6653 } else {
6654 Sym *sym = NULL;
6655 if (v && scope == VT_CONST) {
6656 /* see if the symbol was already defined */
6657 sym = sym_find(v);
6658 if (sym) {
6659 patch_storage(sym, type);
6660 if (sym->type.t & VT_EXTERN) {
6661 /* if the variable is extern, it was not allocated */
6662 sym->type.t &= ~VT_EXTERN;
6663 /* set array size if it was omitted in extern
6664 declaration */
6665 if ((sym->type.t & VT_ARRAY) &&
6666 sym->type.ref->c < 0 &&
6667 type->ref->c >= 0)
6668 sym->type.ref->c = type->ref->c;
6669 } else if (!has_init) {
6670 /* we accept several definitions of the same
6671 global variable. this is tricky, because we
6672 must play with the SHN_COMMON type of the symbol */
6673 /* no init data, we won't add more to the symbol */
6674 update_storage(sym);
6675 goto no_alloc;
6676 } else if (sym->c) {
6677 ElfW(Sym) *esym;
6678 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6679 if (esym->st_shndx == data_section->sh_num)
6680 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6685 /* allocate symbol in corresponding section */
6686 sec = ad->section;
6687 if (!sec) {
6688 if (has_init)
6689 sec = data_section;
6690 else if (tcc_state->nocommon)
6691 sec = bss_section;
6694 if (sec) {
6695 data_offset = sec->data_offset;
6696 data_offset = (data_offset + align - 1) & -align;
6697 addr = data_offset;
6698 /* very important to increment global pointer at this time
6699 because initializers themselves can create new initializers */
6700 data_offset += size;
6701 #ifdef CONFIG_TCC_BCHECK
6702 /* add padding if bound check */
6703 if (tcc_state->do_bounds_check)
6704 data_offset++;
6705 #endif
6706 sec->data_offset = data_offset;
6707 /* allocate section space to put the data */
6708 if (sec->sh_type != SHT_NOBITS &&
6709 data_offset > sec->data_allocated)
6710 section_realloc(sec, data_offset);
6711 /* align section if needed */
6712 if (align > sec->sh_addralign)
6713 sec->sh_addralign = align;
6714 } else {
6715 addr = 0; /* avoid warning */
6718 if (v) {
6719 if (scope != VT_CONST || !sym) {
6720 sym = sym_push(v, type, r | VT_SYM, 0);
6721 sym->asm_label = ad->asm_label;
6723 /* update symbol definition */
6724 if (sec) {
6725 put_extern_sym(sym, sec, addr, size);
6726 } else {
6727 put_extern_sym(sym, SECTION_COMMON, align, size);
6730 } else {
6731 /* push global reference */
6732 sym = get_sym_ref(type, sec, addr, size);
6733 vpushsym(type, sym);
6736 #ifdef CONFIG_TCC_BCHECK
6737 /* handles bounds now because the symbol must be defined
6738 before for the relocation */
6739 if (tcc_state->do_bounds_check) {
6740 addr_t *bounds_ptr;
6742 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6743 /* then add global bound info */
6744 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6745 bounds_ptr[0] = 0; /* relocated */
6746 bounds_ptr[1] = size;
6748 #endif
6751 if (type->t & VT_VLA) {
6752 int a;
6754 /* save current stack pointer */
6755 if (vlas_in_scope == 0) {
6756 if (vla_sp_root_loc == -1)
6757 vla_sp_root_loc = (loc -= PTR_SIZE);
6758 gen_vla_sp_save(vla_sp_root_loc);
6761 vla_runtime_type_size(type, &a);
6762 gen_vla_alloc(type, a);
6763 gen_vla_sp_save(addr);
6764 vla_sp_loc = addr;
6765 vlas_in_scope++;
6767 } else if (has_init) {
6768 size_t oldreloc_offset = 0;
6769 if (sec && sec->reloc)
6770 oldreloc_offset = sec->reloc->data_offset;
6771 decl_initializer(type, sec, addr, 1, 0);
6772 if (sec && sec->reloc)
6773 squeeze_multi_relocs(sec, oldreloc_offset);
6774 /* patch flexible array member size back to -1, */
6775 /* for possible subsequent similar declarations */
6776 if (flexible_array)
6777 flexible_array->type.ref->c = -1;
6780 no_alloc:
6781 /* restore parse state if needed */
6782 if (init_str) {
6783 end_macro();
6784 restore_parse_state(&saved_parse_state);
6788 /* parse an old style function declaration list */
6789 /* XXX: check multiple parameter */
6790 static void func_decl_list(Sym *func_sym)
6792 AttributeDef ad;
6793 int v;
6794 Sym *s;
6795 CType btype, type;
6797 /* parse each declaration */
6798 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6799 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6800 if (!parse_btype(&btype, &ad))
6801 expect("declaration list");
6802 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6803 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6804 tok == ';') {
6805 /* we accept no variable after */
6806 } else {
6807 for(;;) {
6808 type = btype;
6809 type_decl(&type, &ad, &v, TYPE_DIRECT);
6810 /* find parameter in function parameter list */
6811 s = func_sym->next;
6812 while (s != NULL) {
6813 if ((s->v & ~SYM_FIELD) == v)
6814 goto found;
6815 s = s->next;
6817 tcc_error("declaration for parameter '%s' but no such parameter",
6818 get_tok_str(v, NULL));
6819 found:
6820 /* check that no storage specifier except 'register' was given */
6821 if (type.t & VT_STORAGE)
6822 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6823 convert_parameter_type(&type);
6824 /* we can add the type (NOTE: it could be local to the function) */
6825 s->type = type;
6826 /* accept other parameters */
6827 if (tok == ',')
6828 next();
6829 else
6830 break;
6833 skip(';');
6837 /* parse a function defined by symbol 'sym' and generate its code in
6838 'cur_text_section' */
6839 static void gen_function(Sym *sym)
6841 nocode_wanted = 0;
6842 ind = cur_text_section->data_offset;
6843 /* NOTE: we patch the symbol size later */
6844 put_extern_sym(sym, cur_text_section, ind, 0);
6845 funcname = get_tok_str(sym->v, NULL);
6846 func_ind = ind;
6847 /* Initialize VLA state */
6848 vla_sp_loc = -1;
6849 vla_sp_root_loc = -1;
6850 /* put debug symbol */
6851 tcc_debug_funcstart(tcc_state, sym);
6852 /* push a dummy symbol to enable local sym storage */
6853 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6854 local_scope = 1; /* for function parameters */
6855 gfunc_prolog(&sym->type);
6856 local_scope = 0;
6857 rsym = 0;
6858 block(NULL, NULL, 0);
6859 nocode_wanted = 0;
6860 gsym(rsym);
6861 gfunc_epilog();
6862 cur_text_section->data_offset = ind;
6863 label_pop(&global_label_stack, NULL);
6864 /* reset local stack */
6865 local_scope = 0;
6866 sym_pop(&local_stack, NULL, 0);
6867 /* end of function */
6868 /* patch symbol size */
6869 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6870 ind - func_ind;
6871 tcc_debug_funcend(tcc_state, ind - func_ind);
6872 /* It's better to crash than to generate wrong code */
6873 cur_text_section = NULL;
6874 funcname = ""; /* for safety */
6875 func_vt.t = VT_VOID; /* for safety */
6876 func_var = 0; /* for safety */
6877 ind = 0; /* for safety */
6878 nocode_wanted = 1;
6879 check_vstack();
6882 static void gen_inline_functions(TCCState *s)
6884 Sym *sym;
6885 int inline_generated, i, ln;
6886 struct InlineFunc *fn;
6888 ln = file->line_num;
6889 /* iterate while inline function are referenced */
6890 for(;;) {
6891 inline_generated = 0;
6892 for (i = 0; i < s->nb_inline_fns; ++i) {
6893 fn = s->inline_fns[i];
6894 sym = fn->sym;
6895 if (sym && sym->c) {
6896 /* the function was used: generate its code and
6897 convert it to a normal function */
6898 fn->sym = NULL;
6899 if (file)
6900 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6901 sym->type.t &= ~VT_INLINE;
6903 begin_macro(fn->func_str, 1);
6904 next();
6905 cur_text_section = text_section;
6906 gen_function(sym);
6907 end_macro();
6909 inline_generated = 1;
6912 if (!inline_generated)
6913 break;
6915 file->line_num = ln;
6918 ST_FUNC void free_inline_functions(TCCState *s)
6920 int i;
6921 /* free tokens of unused inline functions */
6922 for (i = 0; i < s->nb_inline_fns; ++i) {
6923 struct InlineFunc *fn = s->inline_fns[i];
6924 if (fn->sym)
6925 tok_str_free(fn->func_str);
6927 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6930 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6931 static int decl0(int l, int is_for_loop_init)
6933 int v, has_init, r;
6934 CType type, btype;
6935 Sym *sym;
6936 AttributeDef ad;
6938 while (1) {
6939 if (!parse_btype(&btype, &ad)) {
6940 if (is_for_loop_init)
6941 return 0;
6942 /* skip redundant ';' */
6943 /* XXX: find more elegant solution */
6944 if (tok == ';') {
6945 next();
6946 continue;
6948 if (l == VT_CONST &&
6949 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6950 /* global asm block */
6951 asm_global_instr();
6952 continue;
6954 /* special test for old K&R protos without explicit int
6955 type. Only accepted when defining global data */
6956 if (l == VT_LOCAL || tok < TOK_UIDENT)
6957 break;
6958 btype.t = VT_INT;
6960 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6961 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6962 tok == ';') {
6963 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6964 int v = btype.ref->v;
6965 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6966 tcc_warning("unnamed struct/union that defines no instances");
6968 next();
6969 continue;
6971 while (1) { /* iterate thru each declaration */
6972 type = btype;
6973 /* If the base type itself was an array type of unspecified
6974 size (like in 'typedef int arr[]; arr x = {1};') then
6975 we will overwrite the unknown size by the real one for
6976 this decl. We need to unshare the ref symbol holding
6977 that size. */
6978 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6979 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6981 type_decl(&type, &ad, &v, TYPE_DIRECT);
6982 #if 0
6984 char buf[500];
6985 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6986 printf("type = '%s'\n", buf);
6988 #endif
6989 if ((type.t & VT_BTYPE) == VT_FUNC) {
6990 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6991 tcc_error("function without file scope cannot be static");
6993 /* if old style function prototype, we accept a
6994 declaration list */
6995 sym = type.ref;
6996 if (sym->c == FUNC_OLD)
6997 func_decl_list(sym);
7000 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7001 ad.asm_label = asm_label_instr();
7002 /* parse one last attribute list, after asm label */
7003 parse_attribute(&ad);
7004 if (tok == '{')
7005 expect(";");
7008 if (ad.a.weak)
7009 type.t |= VT_WEAK;
7010 #ifdef TCC_TARGET_PE
7011 if (ad.a.func_import || ad.a.func_export) {
7012 if (type.t & (VT_STATIC|VT_TYPEDEF))
7013 tcc_error("cannot have dll linkage with static or typedef");
7014 if (ad.a.func_export)
7015 type.t |= VT_EXPORT;
7016 else if ((type.t & VT_BTYPE) != VT_FUNC)
7017 type.t |= VT_IMPORT|VT_EXTERN;
7019 #endif
7020 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7022 if (tok == '{') {
7023 if (l == VT_LOCAL)
7024 tcc_error("cannot use local functions");
7025 if ((type.t & VT_BTYPE) != VT_FUNC)
7026 expect("function definition");
7028 /* reject abstract declarators in function definition */
7029 sym = type.ref;
7030 while ((sym = sym->next) != NULL)
7031 if (!(sym->v & ~SYM_FIELD))
7032 expect("identifier");
7034 /* XXX: cannot do better now: convert extern line to static inline */
7035 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7036 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7038 sym = sym_find(v);
7039 if (sym) {
7040 Sym *ref;
7041 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7042 goto func_error1;
7044 ref = sym->type.ref;
7046 /* use func_call from prototype if not defined */
7047 if (ref->a.func_call != FUNC_CDECL
7048 && type.ref->a.func_call == FUNC_CDECL)
7049 type.ref->a.func_call = ref->a.func_call;
7051 /* use static from prototype */
7052 if (sym->type.t & VT_STATIC)
7053 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7055 /* If the definition has no visibility use the
7056 one from prototype. */
7057 if (! (type.t & VT_VIS_MASK))
7058 type.t |= sym->type.t & VT_VIS_MASK;
7060 /* apply other storage attributes from prototype */
7061 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
7063 if (!is_compatible_types(&sym->type, &type)) {
7064 func_error1:
7065 tcc_error("incompatible types for redefinition of '%s'",
7066 get_tok_str(v, NULL));
7068 if (ref->a.func_body)
7069 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7070 /* if symbol is already defined, then put complete type */
7071 sym->type = type;
7073 } else {
7074 /* put function symbol */
7075 sym = global_identifier_push(v, type.t, 0);
7076 sym->type.ref = type.ref;
7079 sym->type.ref->a.func_body = 1;
7080 sym->r = VT_SYM | VT_CONST;
7082 /* static inline functions are just recorded as a kind
7083 of macro. Their code will be emitted at the end of
7084 the compilation unit only if they are used */
7085 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7086 (VT_INLINE | VT_STATIC)) {
7087 int block_level;
7088 struct InlineFunc *fn;
7089 const char *filename;
7091 filename = file ? file->filename : "";
7092 fn = tcc_malloc(sizeof *fn + strlen(filename));
7093 strcpy(fn->filename, filename);
7094 fn->sym = sym;
7095 fn->func_str = tok_str_alloc();
7097 block_level = 0;
7098 for(;;) {
7099 int t;
7100 if (tok == TOK_EOF)
7101 tcc_error("unexpected end of file");
7102 tok_str_add_tok(fn->func_str);
7103 t = tok;
7104 next();
7105 if (t == '{') {
7106 block_level++;
7107 } else if (t == '}') {
7108 block_level--;
7109 if (block_level == 0)
7110 break;
7113 tok_str_add(fn->func_str, -1);
7114 tok_str_add(fn->func_str, 0);
7115 dynarray_add(&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7117 } else {
7118 /* compute text section */
7119 cur_text_section = ad.section;
7120 if (!cur_text_section)
7121 cur_text_section = text_section;
7122 gen_function(sym);
7124 break;
7125 } else {
7126 if (type.t & VT_TYPEDEF) {
7127 /* save typedefed type */
7128 /* XXX: test storage specifiers ? */
7129 sym = sym_find(v);
7130 if (sym && sym->scope == local_scope) {
7131 if (!is_compatible_types(&sym->type, &type)
7132 || !(sym->type.t & VT_TYPEDEF))
7133 tcc_error("incompatible redefinition of '%s'",
7134 get_tok_str(v, NULL));
7135 sym->type = type;
7136 } else {
7137 sym = sym_push(v, &type, 0, 0);
7139 sym->a = ad.a;
7140 } else {
7141 r = 0;
7142 if ((type.t & VT_BTYPE) == VT_FUNC) {
7143 /* external function definition */
7144 /* specific case for func_call attribute */
7145 type.ref->a = ad.a;
7146 } else if (!(type.t & VT_ARRAY)) {
7147 /* not lvalue if array */
7148 r |= lvalue_type(type.t);
7150 has_init = (tok == '=');
7151 if (has_init && (type.t & VT_VLA))
7152 tcc_error("variable length array cannot be initialized");
7153 if ((type.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7154 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7155 !has_init && l == VT_CONST && type.ref->c < 0)) {
7156 /* external variable or function */
7157 /* NOTE: as GCC, uninitialized global static
7158 arrays of null size are considered as
7159 extern */
7160 sym = external_sym(v, &type, r);
7161 sym->asm_label = ad.asm_label;
7162 if (ad.alias_target) {
7163 Section tsec;
7164 ElfW(Sym) *esym;
7165 Sym *alias_target;
7167 alias_target = sym_find(ad.alias_target);
7168 if (!alias_target || !alias_target->c)
7169 tcc_error("unsupported forward __alias__ attribute");
7170 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7171 tsec.sh_num = esym->st_shndx;
7172 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7174 } else {
7175 if (type.t & VT_STATIC)
7176 r |= VT_CONST;
7177 else
7178 r |= l;
7179 if (has_init)
7180 next();
7181 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7184 if (tok != ',') {
7185 if (is_for_loop_init)
7186 return 1;
7187 skip(';');
7188 break;
7190 next();
7192 ad.a.aligned = 0;
7195 return 0;
7198 ST_FUNC void decl(int l)
7200 decl0(l, 0);
7203 /* ------------------------------------------------------------------------- */