Fix more bitfield corner cases
[tinycc.git] / tccgen.c
blob1efbc1f03e51c144aa06862320a1a2feb7fee3f4
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC void tcc_debug_start(TCCState *s1)
146 if (s1->do_debug) {
147 char buf[512];
149 /* file info: full path + filename */
150 section_sym = put_elf_sym(symtab_section, 0, 0,
151 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
152 text_section->sh_num, NULL);
153 getcwd(buf, sizeof(buf));
154 #ifdef _WIN32
155 normalize_slashes(buf);
156 #endif
157 pstrcat(buf, sizeof(buf), "/");
158 put_stabs_r(buf, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 put_stabs_r(file->filename, N_SO, 0, 0,
161 text_section->data_offset, text_section, section_sym);
162 last_ind = 0;
163 last_line_num = 0;
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
170 SHN_ABS, file->filename);
173 /* put end of translation unit info */
174 ST_FUNC void tcc_debug_end(TCCState *s1)
176 if (!s1->do_debug)
177 return;
178 put_stabs_r(NULL, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
183 /* generate line number info */
184 ST_FUNC void tcc_debug_line(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 if ((last_line_num != file->line_num || last_ind != ind)) {
189 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
190 last_ind = ind;
191 last_line_num = file->line_num;
195 /* put function symbol */
196 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
198 char buf[512];
200 if (!s1->do_debug)
201 return;
203 /* stabs info */
204 /* XXX: we put here a dummy type */
205 snprintf(buf, sizeof(buf), "%s:%c1",
206 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
207 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
208 cur_text_section, sym->c);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE, 0, file->line_num, 0);
212 last_ind = 0;
213 last_line_num = 0;
216 /* put function size */
217 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
219 if (!s1->do_debug)
220 return;
221 put_stabn(N_FUN, 0, 0, size);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC void tccgen_start(TCCState *s1)
227 cur_text_section = NULL;
228 funcname = "";
229 anon_sym = SYM_FIRST_ANOM;
230 section_sym = 0;
231 const_wanted = 0;
232 nocode_wanted = 1;
234 /* define some often used types */
235 int_type.t = VT_INT;
236 char_pointer_type.t = VT_BYTE;
237 mk_pointer(&char_pointer_type);
238 #if PTR_SIZE == 4
239 size_type.t = VT_INT;
240 #else
241 size_type.t = VT_LLONG;
242 #endif
243 func_old_type.t = VT_FUNC;
244 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
246 tcc_debug_start(s1);
248 #ifdef TCC_TARGET_ARM
249 arm_init(s1);
250 #endif
253 ST_FUNC void tccgen_end(TCCState *s1)
255 gen_inline_functions(s1);
256 check_vstack();
257 /* end of translation unit info */
258 tcc_debug_end(s1);
261 /* ------------------------------------------------------------------------- */
262 /* apply storage attibutes to Elf symbol */
264 static void update_storage(Sym *sym)
266 int t;
267 ElfW(Sym) *esym;
269 if (0 == sym->c)
270 return;
272 t = sym->type.t;
273 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
275 if (t & VT_VIS_MASK)
276 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
277 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
279 if (t & VT_WEAK)
280 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
282 #ifdef TCC_TARGET_PE
283 if (t & VT_EXPORT)
284 esym->st_other |= ST_PE_EXPORT;
285 #endif
288 /* ------------------------------------------------------------------------- */
289 /* update sym->c so that it points to an external symbol in section
290 'section' with value 'value' */
292 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
293 addr_t value, unsigned long size,
294 int can_add_underscore)
296 int sym_type, sym_bind, sh_num, info, other, t;
297 ElfW(Sym) *esym;
298 const char *name;
299 char buf1[256];
300 #ifdef CONFIG_TCC_BCHECK
301 char buf[32];
302 #endif
304 if (section == NULL)
305 sh_num = SHN_UNDEF;
306 else if (section == SECTION_ABS)
307 sh_num = SHN_ABS;
308 else if (section == SECTION_COMMON)
309 sh_num = SHN_COMMON;
310 else
311 sh_num = section->sh_num;
313 if (!sym->c) {
314 name = get_tok_str(sym->v, NULL);
315 #ifdef CONFIG_TCC_BCHECK
316 if (tcc_state->do_bounds_check) {
317 /* XXX: avoid doing that for statics ? */
318 /* if bound checking is activated, we change some function
319 names by adding the "__bound" prefix */
320 switch(sym->v) {
321 #ifdef TCC_TARGET_PE
322 /* XXX: we rely only on malloc hooks */
323 case TOK_malloc:
324 case TOK_free:
325 case TOK_realloc:
326 case TOK_memalign:
327 case TOK_calloc:
328 #endif
329 case TOK_memcpy:
330 case TOK_memmove:
331 case TOK_memset:
332 case TOK_strlen:
333 case TOK_strcpy:
334 case TOK_alloca:
335 strcpy(buf, "__bound_");
336 strcat(buf, name);
337 name = buf;
338 break;
341 #endif
342 t = sym->type.t;
343 if ((t & VT_BTYPE) == VT_FUNC) {
344 sym_type = STT_FUNC;
345 } else if ((t & VT_BTYPE) == VT_VOID) {
346 sym_type = STT_NOTYPE;
347 } else {
348 sym_type = STT_OBJECT;
350 if (t & VT_STATIC)
351 sym_bind = STB_LOCAL;
352 else
353 sym_bind = STB_GLOBAL;
354 other = 0;
355 #ifdef TCC_TARGET_PE
356 if (sym_type == STT_FUNC && sym->type.ref) {
357 Sym *ref = sym->type.ref;
358 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
359 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
360 name = buf1;
361 other |= ST_PE_STDCALL;
362 can_add_underscore = 0;
365 if (t & VT_IMPORT)
366 other |= ST_PE_IMPORT;
367 #endif
368 if (tcc_state->leading_underscore && can_add_underscore) {
369 buf1[0] = '_';
370 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
371 name = buf1;
373 if (sym->asm_label)
374 name = get_tok_str(sym->asm_label, NULL);
375 info = ELFW(ST_INFO)(sym_bind, sym_type);
376 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
377 } else {
378 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
379 esym->st_value = value;
380 esym->st_size = size;
381 esym->st_shndx = sh_num;
383 update_storage(sym);
386 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
387 addr_t value, unsigned long size)
389 put_extern_sym2(sym, section, value, size, 1);
392 /* add a new relocation entry to symbol 'sym' in section 's' */
393 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
394 addr_t addend)
396 int c = 0;
398 if (nocode_wanted && s == cur_text_section)
399 return;
401 if (sym) {
402 if (0 == sym->c)
403 put_extern_sym(sym, NULL, 0, 0);
404 c = sym->c;
407 /* now we can add ELF relocation info */
408 put_elf_reloca(symtab_section, s, offset, type, c, addend);
411 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
413 greloca(s, sym, offset, type, 0);
416 /* ------------------------------------------------------------------------- */
417 /* symbol allocator */
418 static Sym *__sym_malloc(void)
420 Sym *sym_pool, *sym, *last_sym;
421 int i;
423 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
424 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
426 last_sym = sym_free_first;
427 sym = sym_pool;
428 for(i = 0; i < SYM_POOL_NB; i++) {
429 sym->next = last_sym;
430 last_sym = sym;
431 sym++;
433 sym_free_first = last_sym;
434 return last_sym;
437 static inline Sym *sym_malloc(void)
439 Sym *sym;
440 #ifndef SYM_DEBUG
441 sym = sym_free_first;
442 if (!sym)
443 sym = __sym_malloc();
444 sym_free_first = sym->next;
445 return sym;
446 #else
447 sym = tcc_malloc(sizeof(Sym));
448 return sym;
449 #endif
452 ST_INLN void sym_free(Sym *sym)
454 #ifndef SYM_DEBUG
455 sym->next = sym_free_first;
456 sym_free_first = sym;
457 #else
458 tcc_free(sym);
459 #endif
462 /* push, without hashing */
463 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
465 Sym *s;
467 s = sym_malloc();
468 s->scope = 0;
469 s->v = v;
470 s->type.t = t;
471 s->type.ref = NULL;
472 #ifdef _WIN64
473 s->d = NULL;
474 #endif
475 s->c = c;
476 s->next = NULL;
477 /* add in stack */
478 s->prev = *ps;
479 *ps = s;
480 return s;
483 /* find a symbol and return its associated structure. 's' is the top
484 of the symbol stack */
485 ST_FUNC Sym *sym_find2(Sym *s, int v)
487 while (s) {
488 if (s->v == v)
489 return s;
490 else if (s->v == -1)
491 return NULL;
492 s = s->prev;
494 return NULL;
497 /* structure lookup */
498 ST_INLN Sym *struct_find(int v)
500 v -= TOK_IDENT;
501 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
502 return NULL;
503 return table_ident[v]->sym_struct;
506 /* find an identifier */
507 ST_INLN Sym *sym_find(int v)
509 v -= TOK_IDENT;
510 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
511 return NULL;
512 return table_ident[v]->sym_identifier;
515 /* push a given symbol on the symbol stack */
516 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
518 Sym *s, **ps;
519 TokenSym *ts;
521 if (local_stack)
522 ps = &local_stack;
523 else
524 ps = &global_stack;
525 s = sym_push2(ps, v, type->t, c);
526 s->type.ref = type->ref;
527 s->r = r;
528 /* don't record fields or anonymous symbols */
529 /* XXX: simplify */
530 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
531 /* record symbol in token array */
532 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
533 if (v & SYM_STRUCT)
534 ps = &ts->sym_struct;
535 else
536 ps = &ts->sym_identifier;
537 s->prev_tok = *ps;
538 *ps = s;
539 s->scope = local_scope;
540 if (s->prev_tok && s->prev_tok->scope == s->scope)
541 tcc_error("redeclaration of '%s'",
542 get_tok_str(v & ~SYM_STRUCT, NULL));
544 return s;
547 /* push a global identifier */
548 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
550 Sym *s, **ps;
551 s = sym_push2(&global_stack, v, t, c);
552 /* don't record anonymous symbol */
553 if (v < SYM_FIRST_ANOM) {
554 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
555 /* modify the top most local identifier, so that
556 sym_identifier will point to 's' when popped */
557 while (*ps != NULL)
558 ps = &(*ps)->prev_tok;
559 s->prev_tok = NULL;
560 *ps = s;
562 return s;
565 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
566 pop them yet from the list, but do remove them from the token array. */
567 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
569 Sym *s, *ss, **ps;
570 TokenSym *ts;
571 int v;
573 s = *ptop;
574 while(s != b) {
575 ss = s->prev;
576 v = s->v;
577 /* remove symbol in token array */
578 /* XXX: simplify */
579 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
580 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
581 if (v & SYM_STRUCT)
582 ps = &ts->sym_struct;
583 else
584 ps = &ts->sym_identifier;
585 *ps = s->prev_tok;
587 if (!keep)
588 sym_free(s);
589 s = ss;
591 if (!keep)
592 *ptop = b;
595 /* ------------------------------------------------------------------------- */
597 static void vsetc(CType *type, int r, CValue *vc)
599 int v;
601 if (vtop >= vstack + (VSTACK_SIZE - 1))
602 tcc_error("memory full (vstack)");
603 /* cannot let cpu flags if other instruction are generated. Also
604 avoid leaving VT_JMP anywhere except on the top of the stack
605 because it would complicate the code generator.
607 Don't do this when nocode_wanted. vtop might come from
608 !nocode_wanted regions (see 88_codeopt.c) and transforming
609 it to a register without actually generating code is wrong
610 as their value might still be used for real. All values
611 we push under nocode_wanted will eventually be popped
612 again, so that the VT_CMP/VT_JMP value will be in vtop
613 when code is unsuppressed again.
615 Same logic below in vswap(); */
616 if (vtop >= vstack && !nocode_wanted) {
617 v = vtop->r & VT_VALMASK;
618 if (v == VT_CMP || (v & ~1) == VT_JMP)
619 gv(RC_INT);
622 vtop++;
623 vtop->type = *type;
624 vtop->r = r;
625 vtop->r2 = VT_CONST;
626 vtop->c = *vc;
627 vtop->sym = NULL;
630 ST_FUNC void vswap(void)
632 SValue tmp;
633 /* cannot vswap cpu flags. See comment at vsetc() above */
634 if (vtop >= vstack && !nocode_wanted) {
635 int v = vtop->r & VT_VALMASK;
636 if (v == VT_CMP || (v & ~1) == VT_JMP)
637 gv(RC_INT);
639 tmp = vtop[0];
640 vtop[0] = vtop[-1];
641 vtop[-1] = tmp;
644 /* pop stack value */
645 ST_FUNC void vpop(void)
647 int v;
648 v = vtop->r & VT_VALMASK;
649 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
650 /* for x86, we need to pop the FP stack */
651 if (v == TREG_ST0) {
652 o(0xd8dd); /* fstp %st(0) */
653 } else
654 #endif
655 if (v == VT_JMP || v == VT_JMPI) {
656 /* need to put correct jump if && or || without test */
657 gsym(vtop->c.i);
659 vtop--;
662 /* push constant of type "type" with useless value */
663 ST_FUNC void vpush(CType *type)
665 CValue cval;
666 vsetc(type, VT_CONST, &cval);
669 /* push integer constant */
670 ST_FUNC void vpushi(int v)
672 CValue cval;
673 cval.i = v;
674 vsetc(&int_type, VT_CONST, &cval);
677 /* push a pointer sized constant */
678 static void vpushs(addr_t v)
680 CValue cval;
681 cval.i = v;
682 vsetc(&size_type, VT_CONST, &cval);
685 /* push arbitrary 64bit constant */
686 ST_FUNC void vpush64(int ty, unsigned long long v)
688 CValue cval;
689 CType ctype;
690 ctype.t = ty;
691 ctype.ref = NULL;
692 cval.i = v;
693 vsetc(&ctype, VT_CONST, &cval);
696 /* push long long constant */
697 static inline void vpushll(long long v)
699 vpush64(VT_LLONG, v);
702 ST_FUNC void vset(CType *type, int r, long v)
704 CValue cval;
706 cval.i = v;
707 vsetc(type, r, &cval);
710 static void vseti(int r, int v)
712 CType type;
713 type.t = VT_INT;
714 type.ref = 0;
715 vset(&type, r, v);
718 ST_FUNC void vpushv(SValue *v)
720 if (vtop >= vstack + (VSTACK_SIZE - 1))
721 tcc_error("memory full (vstack)");
722 vtop++;
723 *vtop = *v;
726 static void vdup(void)
728 vpushv(vtop);
731 /* rotate n first stack elements to the bottom
732 I1 ... In -> I2 ... In I1 [top is right]
734 ST_FUNC void vrotb(int n)
736 int i;
737 SValue tmp;
739 tmp = vtop[-n + 1];
740 for(i=-n+1;i!=0;i++)
741 vtop[i] = vtop[i+1];
742 vtop[0] = tmp;
745 /* rotate the n elements before entry e towards the top
746 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
748 ST_FUNC void vrote(SValue *e, int n)
750 int i;
751 SValue tmp;
753 tmp = *e;
754 for(i = 0;i < n - 1; i++)
755 e[-i] = e[-i - 1];
756 e[-n + 1] = tmp;
759 /* rotate n first stack elements to the top
760 I1 ... In -> In I1 ... I(n-1) [top is right]
762 ST_FUNC void vrott(int n)
764 vrote(vtop, n);
767 /* push a symbol value of TYPE */
768 static inline void vpushsym(CType *type, Sym *sym)
770 CValue cval;
771 cval.i = 0;
772 vsetc(type, VT_CONST | VT_SYM, &cval);
773 vtop->sym = sym;
776 /* Return a static symbol pointing to a section */
777 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
779 int v;
780 Sym *sym;
782 v = anon_sym++;
783 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
784 sym->type.ref = type->ref;
785 sym->r = VT_CONST | VT_SYM;
786 put_extern_sym(sym, sec, offset, size);
787 return sym;
790 /* push a reference to a section offset by adding a dummy symbol */
791 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
793 vpushsym(type, get_sym_ref(type, sec, offset, size));
796 /* define a new external reference to a symbol 'v' of type 'u' */
797 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
799 Sym *s;
801 s = sym_find(v);
802 if (!s) {
803 /* push forward reference */
804 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
805 s->type.ref = type->ref;
806 s->r = r | VT_CONST | VT_SYM;
808 return s;
811 /* Merge some storage attributes. */
812 static void patch_storage(Sym *sym, CType *type)
814 int t;
815 if (!is_compatible_types(&sym->type, type))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym->v, NULL));
818 t = type->t;
819 #ifdef TCC_TARGET_PE
820 if ((sym->type.t ^ t) & VT_IMPORT)
821 tcc_error("incompatible dll linkage for redefinition of '%s'",
822 get_tok_str(sym->v, NULL));
823 #endif
824 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
825 if (t & VT_VIS_MASK) {
826 int vis = sym->type.t & VT_VIS_MASK;
827 int vis2 = t & VT_VIS_MASK;
828 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
829 vis = vis2;
830 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
831 vis = (vis < vis2) ? vis : vis2;
832 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
836 /* define a new external reference to a symbol 'v' */
837 static Sym *external_sym(int v, CType *type, int r)
839 Sym *s;
840 s = sym_find(v);
841 if (!s) {
842 /* push forward reference */
843 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
844 s->type.t |= VT_EXTERN;
845 } else {
846 if (s->type.ref == func_old_type.ref) {
847 s->type.ref = type->ref;
848 s->r = r | VT_CONST | VT_SYM;
849 s->type.t |= VT_EXTERN;
851 patch_storage(s, type);
852 update_storage(s);
854 return s;
857 /* push a reference to global symbol v */
858 ST_FUNC void vpush_global_sym(CType *type, int v)
860 vpushsym(type, external_global_sym(v, type, 0));
863 /* save registers up to (vtop - n) stack entry */
864 ST_FUNC void save_regs(int n)
866 SValue *p, *p1;
867 for(p = vstack, p1 = vtop - n; p <= p1; p++)
868 save_reg(p->r);
871 /* save r to the memory stack, and mark it as being free */
872 ST_FUNC void save_reg(int r)
874 save_reg_upstack(r, 0);
877 /* save r to the memory stack, and mark it as being free,
878 if seen up to (vtop - n) stack entry */
879 ST_FUNC void save_reg_upstack(int r, int n)
881 int l, saved, size, align;
882 SValue *p, *p1, sv;
883 CType *type;
885 if ((r &= VT_VALMASK) >= VT_CONST)
886 return;
887 if (nocode_wanted)
888 return;
890 /* modify all stack values */
891 saved = 0;
892 l = 0;
893 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
894 if ((p->r & VT_VALMASK) == r ||
895 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
896 /* must save value on stack if not already done */
897 if (!saved) {
898 /* NOTE: must reload 'r' because r might be equal to r2 */
899 r = p->r & VT_VALMASK;
900 /* store register in the stack */
901 type = &p->type;
902 if ((p->r & VT_LVAL) ||
903 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
904 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
905 type = &char_pointer_type;
906 #else
907 type = &int_type;
908 #endif
909 size = type_size(type, &align);
910 loc = (loc - size) & -align;
911 sv.type.t = type->t;
912 sv.r = VT_LOCAL | VT_LVAL;
913 sv.c.i = loc;
914 store(r, &sv);
915 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
916 /* x86 specific: need to pop fp register ST0 if saved */
917 if (r == TREG_ST0) {
918 o(0xd8dd); /* fstp %st(0) */
920 #endif
921 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
922 /* special long long case */
923 if ((type->t & VT_BTYPE) == VT_LLONG) {
924 sv.c.i += 4;
925 store(p->r2, &sv);
927 #endif
928 l = loc;
929 saved = 1;
931 /* mark that stack entry as being saved on the stack */
932 if (p->r & VT_LVAL) {
933 /* also clear the bounded flag because the
934 relocation address of the function was stored in
935 p->c.i */
936 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
937 } else {
938 p->r = lvalue_type(p->type.t) | VT_LOCAL;
940 p->r2 = VT_CONST;
941 p->c.i = l;
946 #ifdef TCC_TARGET_ARM
947 /* find a register of class 'rc2' with at most one reference on stack.
948 * If none, call get_reg(rc) */
949 ST_FUNC int get_reg_ex(int rc, int rc2)
951 int r;
952 SValue *p;
954 for(r=0;r<NB_REGS;r++) {
955 if (reg_classes[r] & rc2) {
956 int n;
957 n=0;
958 for(p = vstack; p <= vtop; p++) {
959 if ((p->r & VT_VALMASK) == r ||
960 (p->r2 & VT_VALMASK) == r)
961 n++;
963 if (n <= 1)
964 return r;
967 return get_reg(rc);
969 #endif
971 /* find a free register of class 'rc'. If none, save one register */
972 ST_FUNC int get_reg(int rc)
974 int r;
975 SValue *p;
977 /* find a free register */
978 for(r=0;r<NB_REGS;r++) {
979 if (reg_classes[r] & rc) {
980 if (nocode_wanted)
981 return r;
982 for(p=vstack;p<=vtop;p++) {
983 if ((p->r & VT_VALMASK) == r ||
984 (p->r2 & VT_VALMASK) == r)
985 goto notfound;
987 return r;
989 notfound: ;
992 /* no register left : free the first one on the stack (VERY
993 IMPORTANT to start from the bottom to ensure that we don't
994 spill registers used in gen_opi()) */
995 for(p=vstack;p<=vtop;p++) {
996 /* look at second register (if long long) */
997 r = p->r2 & VT_VALMASK;
998 if (r < VT_CONST && (reg_classes[r] & rc))
999 goto save_found;
1000 r = p->r & VT_VALMASK;
1001 if (r < VT_CONST && (reg_classes[r] & rc)) {
1002 save_found:
1003 save_reg(r);
1004 return r;
1007 /* Should never comes here */
1008 return -1;
1011 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1012 if needed */
1013 static void move_reg(int r, int s, int t)
1015 SValue sv;
1017 if (r != s) {
1018 save_reg(r);
1019 sv.type.t = t;
1020 sv.type.ref = NULL;
1021 sv.r = s;
1022 sv.c.i = 0;
1023 load(r, &sv);
1027 /* get address of vtop (vtop MUST BE an lvalue) */
1028 ST_FUNC void gaddrof(void)
1030 if (vtop->r & VT_REF)
1031 gv(RC_INT);
1032 vtop->r &= ~VT_LVAL;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1035 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1044 int lval_type;
1045 CType type1;
1047 vtop->r &= ~VT_MUSTBOUND;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop->r & VT_LVAL) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop->r & VT_BOUNDED)) {
1052 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1053 /* must save type because we must set it to int to get pointer */
1054 type1 = vtop->type;
1055 vtop->type.t = VT_PTR;
1056 gaddrof();
1057 vpushi(0);
1058 gen_bounded_ptr_add();
1059 vtop->r |= lval_type;
1060 vtop->type = type1;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1066 #endif
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC int gv(int rc)
1073 int r, bit_pos, bit_size, size, align, i;
1074 int rc2;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop->type.t & VT_BITFIELD) {
1078 CType type;
1079 int bits = 32;
1080 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1081 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1086 type.t = VT_LLONG;
1087 bits = 64;
1088 } else
1089 type.t = VT_INT;
1090 if((vtop->type.t & VT_UNSIGNED) ||
1091 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1092 type.t |= VT_UNSIGNED;
1093 gen_cast(&type);
1094 /* generate shifts */
1095 vpushi(bits - (bit_pos + bit_size));
1096 gen_op(TOK_SHL);
1097 vpushi(bits - bit_size);
1098 /* NOTE: transformed to SHR if unsigned */
1099 gen_op(TOK_SAR);
1100 r = gv(rc);
1101 } else {
1102 if (is_float(vtop->type.t) &&
1103 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1104 Sym *sym;
1105 int *ptr;
1106 unsigned long offset;
1107 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1108 CValue check;
1109 #endif
1111 /* XXX: unify with initializers handling ? */
1112 /* CPUs usually cannot use float constants, so we store them
1113 generically in data segment */
1114 size = type_size(&vtop->type, &align);
1115 offset = (data_section->data_offset + align - 1) & -align;
1116 data_section->data_offset = offset;
1117 /* XXX: not portable yet */
1118 #if defined(__i386__) || defined(__x86_64__)
1119 /* Zero pad x87 tenbyte long doubles */
1120 if (size == LDOUBLE_SIZE) {
1121 vtop->c.tab[2] &= 0xffff;
1122 #if LDOUBLE_SIZE == 16
1123 vtop->c.tab[3] = 0;
1124 #endif
1126 #endif
1127 ptr = section_ptr_add(data_section, size);
1128 size = size >> 2;
1129 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1130 check.d = 1;
1131 if(check.tab[0])
1132 for(i=0;i<size;i++)
1133 ptr[i] = vtop->c.tab[size-1-i];
1134 else
1135 #endif
1136 for(i=0;i<size;i++)
1137 ptr[i] = vtop->c.tab[i];
1138 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1139 vtop->r |= VT_LVAL | VT_SYM;
1140 vtop->sym = sym;
1141 vtop->c.i = 0;
1143 #ifdef CONFIG_TCC_BCHECK
1144 if (vtop->r & VT_MUSTBOUND)
1145 gbound();
1146 #endif
1148 r = vtop->r & VT_VALMASK;
1149 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1150 #ifndef TCC_TARGET_ARM64
1151 if (rc == RC_IRET)
1152 rc2 = RC_LRET;
1153 #ifdef TCC_TARGET_X86_64
1154 else if (rc == RC_FRET)
1155 rc2 = RC_QRET;
1156 #endif
1157 #endif
1158 /* need to reload if:
1159 - constant
1160 - lvalue (need to dereference pointer)
1161 - already a register, but not in the right class */
1162 if (r >= VT_CONST
1163 || (vtop->r & VT_LVAL)
1164 || !(reg_classes[r] & rc)
1165 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1166 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1167 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1168 #else
1169 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1170 #endif
1173 r = get_reg(rc);
1174 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1175 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1176 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1177 #else
1178 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1179 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1180 unsigned long long ll;
1181 #endif
1182 int r2, original_type;
1183 original_type = vtop->type.t;
1184 /* two register type load : expand to two words
1185 temporarily */
1186 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1187 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1188 /* load constant */
1189 ll = vtop->c.i;
1190 vtop->c.i = ll; /* first word */
1191 load(r, vtop);
1192 vtop->r = r; /* save register value */
1193 vpushi(ll >> 32); /* second word */
1194 } else
1195 #endif
1196 if (vtop->r & VT_LVAL) {
1197 /* We do not want to modifier the long long
1198 pointer here, so the safest (and less
1199 efficient) is to save all the other registers
1200 in the stack. XXX: totally inefficient. */
1201 #if 0
1202 save_regs(1);
1203 #else
1204 /* lvalue_save: save only if used further down the stack */
1205 save_reg_upstack(vtop->r, 1);
1206 #endif
1207 /* load from memory */
1208 vtop->type.t = load_type;
1209 load(r, vtop);
1210 vdup();
1211 vtop[-1].r = r; /* save register value */
1212 /* increment pointer to get second word */
1213 vtop->type.t = addr_type;
1214 gaddrof();
1215 vpushi(load_size);
1216 gen_op('+');
1217 vtop->r |= VT_LVAL;
1218 vtop->type.t = load_type;
1219 } else {
1220 /* move registers */
1221 load(r, vtop);
1222 vdup();
1223 vtop[-1].r = r; /* save register value */
1224 vtop->r = vtop[-1].r2;
1226 /* Allocate second register. Here we rely on the fact that
1227 get_reg() tries first to free r2 of an SValue. */
1228 r2 = get_reg(rc2);
1229 load(r2, vtop);
1230 vpop();
1231 /* write second register */
1232 vtop->r2 = r2;
1233 vtop->type.t = original_type;
1234 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1235 int t1, t;
1236 /* lvalue of scalar type : need to use lvalue type
1237 because of possible cast */
1238 t = vtop->type.t;
1239 t1 = t;
1240 /* compute memory access type */
1241 if (vtop->r & VT_REF)
1242 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1243 t = VT_PTR;
1244 #else
1245 t = VT_INT;
1246 #endif
1247 else if (vtop->r & VT_LVAL_BYTE)
1248 t = VT_BYTE;
1249 else if (vtop->r & VT_LVAL_SHORT)
1250 t = VT_SHORT;
1251 if (vtop->r & VT_LVAL_UNSIGNED)
1252 t |= VT_UNSIGNED;
1253 vtop->type.t = t;
1254 load(r, vtop);
1255 /* restore wanted type */
1256 vtop->type.t = t1;
1257 } else {
1258 /* one register type load */
1259 load(r, vtop);
1262 vtop->r = r;
1263 #ifdef TCC_TARGET_C67
1264 /* uses register pairs for doubles */
1265 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1266 vtop->r2 = r+1;
1267 #endif
1269 return r;
1272 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1273 ST_FUNC void gv2(int rc1, int rc2)
1275 int v;
1277 /* generate more generic register first. But VT_JMP or VT_CMP
1278 values must be generated first in all cases to avoid possible
1279 reload errors */
1280 v = vtop[0].r & VT_VALMASK;
1281 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1282 vswap();
1283 gv(rc1);
1284 vswap();
1285 gv(rc2);
1286 /* test if reload is needed for first register */
1287 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1288 vswap();
1289 gv(rc1);
1290 vswap();
1292 } else {
1293 gv(rc2);
1294 vswap();
1295 gv(rc1);
1296 vswap();
1297 /* test if reload is needed for first register */
1298 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1299 gv(rc2);
1304 #ifndef TCC_TARGET_ARM64
1305 /* wrapper around RC_FRET to return a register by type */
1306 static int rc_fret(int t)
1308 #ifdef TCC_TARGET_X86_64
1309 if (t == VT_LDOUBLE) {
1310 return RC_ST0;
1312 #endif
1313 return RC_FRET;
1315 #endif
1317 /* wrapper around REG_FRET to return a register by type */
1318 static int reg_fret(int t)
1320 #ifdef TCC_TARGET_X86_64
1321 if (t == VT_LDOUBLE) {
1322 return TREG_ST0;
1324 #endif
1325 return REG_FRET;
1328 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1329 /* expand 64bit on stack in two ints */
1330 static void lexpand(void)
1332 int u, v;
1333 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1334 v = vtop->r & (VT_VALMASK | VT_LVAL);
1335 if (v == VT_CONST) {
1336 vdup();
1337 vtop[0].c.i >>= 32;
1338 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1339 vdup();
1340 vtop[0].c.i += 4;
1341 } else {
1342 gv(RC_INT);
1343 vdup();
1344 vtop[0].r = vtop[-1].r2;
1345 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1347 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1349 #endif
1351 #ifdef TCC_TARGET_ARM
1352 /* expand long long on stack */
1353 ST_FUNC void lexpand_nr(void)
1355 int u,v;
1357 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1358 vdup();
1359 vtop->r2 = VT_CONST;
1360 vtop->type.t = VT_INT | u;
1361 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1362 if (v == VT_CONST) {
1363 vtop[-1].c.i = vtop->c.i;
1364 vtop->c.i = vtop->c.i >> 32;
1365 vtop->r = VT_CONST;
1366 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1367 vtop->c.i += 4;
1368 vtop->r = vtop[-1].r;
1369 } else if (v > VT_CONST) {
1370 vtop--;
1371 lexpand();
1372 } else
1373 vtop->r = vtop[-1].r2;
1374 vtop[-1].r2 = VT_CONST;
1375 vtop[-1].type.t = VT_INT | u;
1377 #endif
1379 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1380 /* build a long long from two ints */
1381 static void lbuild(int t)
1383 gv2(RC_INT, RC_INT);
1384 vtop[-1].r2 = vtop[0].r;
1385 vtop[-1].type.t = t;
1386 vpop();
1388 #endif
1390 /* convert stack entry to register and duplicate its value in another
1391 register */
1392 static void gv_dup(void)
1394 int rc, t, r, r1;
1395 SValue sv;
1397 t = vtop->type.t;
1398 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1399 if ((t & VT_BTYPE) == VT_LLONG) {
1400 lexpand();
1401 gv_dup();
1402 vswap();
1403 vrotb(3);
1404 gv_dup();
1405 vrotb(4);
1406 /* stack: H L L1 H1 */
1407 lbuild(t);
1408 vrotb(3);
1409 vrotb(3);
1410 vswap();
1411 lbuild(t);
1412 vswap();
1413 } else
1414 #endif
1416 /* duplicate value */
1417 rc = RC_INT;
1418 sv.type.t = VT_INT;
1419 if (is_float(t)) {
1420 rc = RC_FLOAT;
1421 #ifdef TCC_TARGET_X86_64
1422 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1423 rc = RC_ST0;
1425 #endif
1426 sv.type.t = t;
1428 r = gv(rc);
1429 r1 = get_reg(rc);
1430 sv.r = r;
1431 sv.c.i = 0;
1432 load(r1, &sv); /* move r to r1 */
1433 vdup();
1434 /* duplicates value */
1435 if (r != r1)
1436 vtop->r = r1;
1440 /* Generate value test
1442 * Generate a test for any value (jump, comparison and integers) */
1443 ST_FUNC int gvtst(int inv, int t)
1445 int v = vtop->r & VT_VALMASK;
1446 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1447 vpushi(0);
1448 gen_op(TOK_NE);
1450 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1451 /* constant jmp optimization */
1452 if ((vtop->c.i != 0) != inv)
1453 t = gjmp(t);
1454 vtop--;
1455 return t;
1457 return gtst(inv, t);
1460 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1461 /* generate CPU independent (unsigned) long long operations */
1462 static void gen_opl(int op)
1464 int t, a, b, op1, c, i;
1465 int func;
1466 unsigned short reg_iret = REG_IRET;
1467 unsigned short reg_lret = REG_LRET;
1468 SValue tmp;
1470 switch(op) {
1471 case '/':
1472 case TOK_PDIV:
1473 func = TOK___divdi3;
1474 goto gen_func;
1475 case TOK_UDIV:
1476 func = TOK___udivdi3;
1477 goto gen_func;
1478 case '%':
1479 func = TOK___moddi3;
1480 goto gen_mod_func;
1481 case TOK_UMOD:
1482 func = TOK___umoddi3;
1483 gen_mod_func:
1484 #ifdef TCC_ARM_EABI
1485 reg_iret = TREG_R2;
1486 reg_lret = TREG_R3;
1487 #endif
1488 gen_func:
1489 /* call generic long long function */
1490 vpush_global_sym(&func_old_type, func);
1491 vrott(3);
1492 gfunc_call(2);
1493 vpushi(0);
1494 vtop->r = reg_iret;
1495 vtop->r2 = reg_lret;
1496 break;
1497 case '^':
1498 case '&':
1499 case '|':
1500 case '*':
1501 case '+':
1502 case '-':
1503 //pv("gen_opl A",0,2);
1504 t = vtop->type.t;
1505 vswap();
1506 lexpand();
1507 vrotb(3);
1508 lexpand();
1509 /* stack: L1 H1 L2 H2 */
1510 tmp = vtop[0];
1511 vtop[0] = vtop[-3];
1512 vtop[-3] = tmp;
1513 tmp = vtop[-2];
1514 vtop[-2] = vtop[-3];
1515 vtop[-3] = tmp;
1516 vswap();
1517 /* stack: H1 H2 L1 L2 */
1518 //pv("gen_opl B",0,4);
1519 if (op == '*') {
1520 vpushv(vtop - 1);
1521 vpushv(vtop - 1);
1522 gen_op(TOK_UMULL);
1523 lexpand();
1524 /* stack: H1 H2 L1 L2 ML MH */
1525 for(i=0;i<4;i++)
1526 vrotb(6);
1527 /* stack: ML MH H1 H2 L1 L2 */
1528 tmp = vtop[0];
1529 vtop[0] = vtop[-2];
1530 vtop[-2] = tmp;
1531 /* stack: ML MH H1 L2 H2 L1 */
1532 gen_op('*');
1533 vrotb(3);
1534 vrotb(3);
1535 gen_op('*');
1536 /* stack: ML MH M1 M2 */
1537 gen_op('+');
1538 gen_op('+');
1539 } else if (op == '+' || op == '-') {
1540 /* XXX: add non carry method too (for MIPS or alpha) */
1541 if (op == '+')
1542 op1 = TOK_ADDC1;
1543 else
1544 op1 = TOK_SUBC1;
1545 gen_op(op1);
1546 /* stack: H1 H2 (L1 op L2) */
1547 vrotb(3);
1548 vrotb(3);
1549 gen_op(op1 + 1); /* TOK_xxxC2 */
1550 } else {
1551 gen_op(op);
1552 /* stack: H1 H2 (L1 op L2) */
1553 vrotb(3);
1554 vrotb(3);
1555 /* stack: (L1 op L2) H1 H2 */
1556 gen_op(op);
1557 /* stack: (L1 op L2) (H1 op H2) */
1559 /* stack: L H */
1560 lbuild(t);
1561 break;
1562 case TOK_SAR:
1563 case TOK_SHR:
1564 case TOK_SHL:
1565 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1566 t = vtop[-1].type.t;
1567 vswap();
1568 lexpand();
1569 vrotb(3);
1570 /* stack: L H shift */
1571 c = (int)vtop->c.i;
1572 /* constant: simpler */
1573 /* NOTE: all comments are for SHL. the other cases are
1574 done by swaping words */
1575 vpop();
1576 if (op != TOK_SHL)
1577 vswap();
1578 if (c >= 32) {
1579 /* stack: L H */
1580 vpop();
1581 if (c > 32) {
1582 vpushi(c - 32);
1583 gen_op(op);
1585 if (op != TOK_SAR) {
1586 vpushi(0);
1587 } else {
1588 gv_dup();
1589 vpushi(31);
1590 gen_op(TOK_SAR);
1592 vswap();
1593 } else {
1594 vswap();
1595 gv_dup();
1596 /* stack: H L L */
1597 vpushi(c);
1598 gen_op(op);
1599 vswap();
1600 vpushi(32 - c);
1601 if (op == TOK_SHL)
1602 gen_op(TOK_SHR);
1603 else
1604 gen_op(TOK_SHL);
1605 vrotb(3);
1606 /* stack: L L H */
1607 vpushi(c);
1608 if (op == TOK_SHL)
1609 gen_op(TOK_SHL);
1610 else
1611 gen_op(TOK_SHR);
1612 gen_op('|');
1614 if (op != TOK_SHL)
1615 vswap();
1616 lbuild(t);
1617 } else {
1618 /* XXX: should provide a faster fallback on x86 ? */
1619 switch(op) {
1620 case TOK_SAR:
1621 func = TOK___ashrdi3;
1622 goto gen_func;
1623 case TOK_SHR:
1624 func = TOK___lshrdi3;
1625 goto gen_func;
1626 case TOK_SHL:
1627 func = TOK___ashldi3;
1628 goto gen_func;
1631 break;
1632 default:
1633 /* compare operations */
1634 t = vtop->type.t;
1635 vswap();
1636 lexpand();
1637 vrotb(3);
1638 lexpand();
1639 /* stack: L1 H1 L2 H2 */
1640 tmp = vtop[-1];
1641 vtop[-1] = vtop[-2];
1642 vtop[-2] = tmp;
1643 /* stack: L1 L2 H1 H2 */
1644 /* compare high */
1645 op1 = op;
1646 /* when values are equal, we need to compare low words. since
1647 the jump is inverted, we invert the test too. */
1648 if (op1 == TOK_LT)
1649 op1 = TOK_LE;
1650 else if (op1 == TOK_GT)
1651 op1 = TOK_GE;
1652 else if (op1 == TOK_ULT)
1653 op1 = TOK_ULE;
1654 else if (op1 == TOK_UGT)
1655 op1 = TOK_UGE;
1656 a = 0;
1657 b = 0;
1658 gen_op(op1);
1659 if (op == TOK_NE) {
1660 b = gvtst(0, 0);
1661 } else {
1662 a = gvtst(1, 0);
1663 if (op != TOK_EQ) {
1664 /* generate non equal test */
1665 vpushi(TOK_NE);
1666 vtop->r = VT_CMP;
1667 b = gvtst(0, 0);
1670 /* compare low. Always unsigned */
1671 op1 = op;
1672 if (op1 == TOK_LT)
1673 op1 = TOK_ULT;
1674 else if (op1 == TOK_LE)
1675 op1 = TOK_ULE;
1676 else if (op1 == TOK_GT)
1677 op1 = TOK_UGT;
1678 else if (op1 == TOK_GE)
1679 op1 = TOK_UGE;
1680 gen_op(op1);
1681 a = gvtst(1, a);
1682 gsym(b);
1683 vseti(VT_JMPI, a);
1684 break;
1687 #endif
1689 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1691 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1692 return (a ^ b) >> 63 ? -x : x;
1695 static int gen_opic_lt(uint64_t a, uint64_t b)
1697 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1700 /* handle integer constant optimizations and various machine
1701 independent opt */
1702 static void gen_opic(int op)
1704 SValue *v1 = vtop - 1;
1705 SValue *v2 = vtop;
1706 int t1 = v1->type.t & VT_BTYPE;
1707 int t2 = v2->type.t & VT_BTYPE;
1708 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1709 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1710 uint64_t l1 = c1 ? v1->c.i : 0;
1711 uint64_t l2 = c2 ? v2->c.i : 0;
1712 int shm = (t1 == VT_LLONG) ? 63 : 31;
1714 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1715 l1 = ((uint32_t)l1 |
1716 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1717 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1718 l2 = ((uint32_t)l2 |
1719 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1721 if (c1 && c2) {
1722 switch(op) {
1723 case '+': l1 += l2; break;
1724 case '-': l1 -= l2; break;
1725 case '&': l1 &= l2; break;
1726 case '^': l1 ^= l2; break;
1727 case '|': l1 |= l2; break;
1728 case '*': l1 *= l2; break;
1730 case TOK_PDIV:
1731 case '/':
1732 case '%':
1733 case TOK_UDIV:
1734 case TOK_UMOD:
1735 /* if division by zero, generate explicit division */
1736 if (l2 == 0) {
1737 if (const_wanted)
1738 tcc_error("division by zero in constant");
1739 goto general_case;
1741 switch(op) {
1742 default: l1 = gen_opic_sdiv(l1, l2); break;
1743 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1744 case TOK_UDIV: l1 = l1 / l2; break;
1745 case TOK_UMOD: l1 = l1 % l2; break;
1747 break;
1748 case TOK_SHL: l1 <<= (l2 & shm); break;
1749 case TOK_SHR: l1 >>= (l2 & shm); break;
1750 case TOK_SAR:
1751 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1752 break;
1753 /* tests */
1754 case TOK_ULT: l1 = l1 < l2; break;
1755 case TOK_UGE: l1 = l1 >= l2; break;
1756 case TOK_EQ: l1 = l1 == l2; break;
1757 case TOK_NE: l1 = l1 != l2; break;
1758 case TOK_ULE: l1 = l1 <= l2; break;
1759 case TOK_UGT: l1 = l1 > l2; break;
1760 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1761 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1762 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1763 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1764 /* logical */
1765 case TOK_LAND: l1 = l1 && l2; break;
1766 case TOK_LOR: l1 = l1 || l2; break;
1767 default:
1768 goto general_case;
1770 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1771 l1 = ((uint32_t)l1 |
1772 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1773 v1->c.i = l1;
1774 vtop--;
1775 } else {
1776 /* if commutative ops, put c2 as constant */
1777 if (c1 && (op == '+' || op == '&' || op == '^' ||
1778 op == '|' || op == '*')) {
1779 vswap();
1780 c2 = c1; //c = c1, c1 = c2, c2 = c;
1781 l2 = l1; //l = l1, l1 = l2, l2 = l;
1783 if (!const_wanted &&
1784 c1 && ((l1 == 0 &&
1785 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1786 (l1 == -1 && op == TOK_SAR))) {
1787 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1788 vtop--;
1789 } else if (!const_wanted &&
1790 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1791 (l2 == -1 && op == '|') ||
1792 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1793 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1794 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1795 if (l2 == 1)
1796 vtop->c.i = 0;
1797 vswap();
1798 vtop--;
1799 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1800 op == TOK_PDIV) &&
1801 l2 == 1) ||
1802 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1803 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1804 l2 == 0) ||
1805 (op == '&' &&
1806 l2 == -1))) {
1807 /* filter out NOP operations like x*1, x-0, x&-1... */
1808 vtop--;
1809 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1810 /* try to use shifts instead of muls or divs */
1811 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1812 int n = -1;
1813 while (l2) {
1814 l2 >>= 1;
1815 n++;
1817 vtop->c.i = n;
1818 if (op == '*')
1819 op = TOK_SHL;
1820 else if (op == TOK_PDIV)
1821 op = TOK_SAR;
1822 else
1823 op = TOK_SHR;
1825 goto general_case;
1826 } else if (c2 && (op == '+' || op == '-') &&
1827 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1828 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1829 /* symbol + constant case */
1830 if (op == '-')
1831 l2 = -l2;
1832 l2 += vtop[-1].c.i;
1833 /* The backends can't always deal with addends to symbols
1834 larger than +-1<<31. Don't construct such. */
1835 if ((int)l2 != l2)
1836 goto general_case;
1837 vtop--;
1838 vtop->c.i = l2;
1839 } else {
1840 general_case:
1841 /* call low level op generator */
1842 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1843 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1844 gen_opl(op);
1845 else
1846 gen_opi(op);
1851 /* generate a floating point operation with constant propagation */
1852 static void gen_opif(int op)
1854 int c1, c2;
1855 SValue *v1, *v2;
1856 long double f1, f2;
1858 v1 = vtop - 1;
1859 v2 = vtop;
1860 /* currently, we cannot do computations with forward symbols */
1861 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1862 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1863 if (c1 && c2) {
1864 if (v1->type.t == VT_FLOAT) {
1865 f1 = v1->c.f;
1866 f2 = v2->c.f;
1867 } else if (v1->type.t == VT_DOUBLE) {
1868 f1 = v1->c.d;
1869 f2 = v2->c.d;
1870 } else {
1871 f1 = v1->c.ld;
1872 f2 = v2->c.ld;
1875 /* NOTE: we only do constant propagation if finite number (not
1876 NaN or infinity) (ANSI spec) */
1877 if (!ieee_finite(f1) || !ieee_finite(f2))
1878 goto general_case;
1880 switch(op) {
1881 case '+': f1 += f2; break;
1882 case '-': f1 -= f2; break;
1883 case '*': f1 *= f2; break;
1884 case '/':
1885 if (f2 == 0.0) {
1886 if (const_wanted)
1887 tcc_error("division by zero in constant");
1888 goto general_case;
1890 f1 /= f2;
1891 break;
1892 /* XXX: also handles tests ? */
1893 default:
1894 goto general_case;
1896 /* XXX: overflow test ? */
1897 if (v1->type.t == VT_FLOAT) {
1898 v1->c.f = f1;
1899 } else if (v1->type.t == VT_DOUBLE) {
1900 v1->c.d = f1;
1901 } else {
1902 v1->c.ld = f1;
1904 vtop--;
1905 } else {
1906 general_case:
1907 gen_opf(op);
1911 static int pointed_size(CType *type)
1913 int align;
1914 return type_size(pointed_type(type), &align);
1917 static void vla_runtime_pointed_size(CType *type)
1919 int align;
1920 vla_runtime_type_size(pointed_type(type), &align);
1923 static inline int is_null_pointer(SValue *p)
1925 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1926 return 0;
1927 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1928 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1929 ((p->type.t & VT_BTYPE) == VT_PTR &&
1930 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1933 static inline int is_integer_btype(int bt)
1935 return (bt == VT_BYTE || bt == VT_SHORT ||
1936 bt == VT_INT || bt == VT_LLONG);
1939 /* check types for comparison or subtraction of pointers */
1940 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1942 CType *type1, *type2, tmp_type1, tmp_type2;
1943 int bt1, bt2;
1945 /* null pointers are accepted for all comparisons as gcc */
1946 if (is_null_pointer(p1) || is_null_pointer(p2))
1947 return;
1948 type1 = &p1->type;
1949 type2 = &p2->type;
1950 bt1 = type1->t & VT_BTYPE;
1951 bt2 = type2->t & VT_BTYPE;
1952 /* accept comparison between pointer and integer with a warning */
1953 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1954 if (op != TOK_LOR && op != TOK_LAND )
1955 tcc_warning("comparison between pointer and integer");
1956 return;
1959 /* both must be pointers or implicit function pointers */
1960 if (bt1 == VT_PTR) {
1961 type1 = pointed_type(type1);
1962 } else if (bt1 != VT_FUNC)
1963 goto invalid_operands;
1965 if (bt2 == VT_PTR) {
1966 type2 = pointed_type(type2);
1967 } else if (bt2 != VT_FUNC) {
1968 invalid_operands:
1969 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1971 if ((type1->t & VT_BTYPE) == VT_VOID ||
1972 (type2->t & VT_BTYPE) == VT_VOID)
1973 return;
1974 tmp_type1 = *type1;
1975 tmp_type2 = *type2;
1976 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1977 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1978 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1979 /* gcc-like error if '-' is used */
1980 if (op == '-')
1981 goto invalid_operands;
1982 else
1983 tcc_warning("comparison of distinct pointer types lacks a cast");
1987 /* generic gen_op: handles types problems */
1988 ST_FUNC void gen_op(int op)
1990 int u, t1, t2, bt1, bt2, t;
1991 CType type1;
1993 redo:
1994 t1 = vtop[-1].type.t;
1995 t2 = vtop[0].type.t;
1996 bt1 = t1 & VT_BTYPE;
1997 bt2 = t2 & VT_BTYPE;
1999 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2000 tcc_error("operation on a struct");
2001 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2002 if (bt2 == VT_FUNC) {
2003 mk_pointer(&vtop->type);
2004 gaddrof();
2006 if (bt1 == VT_FUNC) {
2007 vswap();
2008 mk_pointer(&vtop->type);
2009 gaddrof();
2010 vswap();
2012 goto redo;
2013 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2014 /* at least one operand is a pointer */
2015 /* relationnal op: must be both pointers */
2016 if (op >= TOK_ULT && op <= TOK_LOR) {
2017 check_comparison_pointer_types(vtop - 1, vtop, op);
2018 /* pointers are handled are unsigned */
2019 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2020 t = VT_LLONG | VT_UNSIGNED;
2021 #else
2022 t = VT_INT | VT_UNSIGNED;
2023 #endif
2024 goto std_op;
2026 /* if both pointers, then it must be the '-' op */
2027 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2028 if (op != '-')
2029 tcc_error("cannot use pointers here");
2030 check_comparison_pointer_types(vtop - 1, vtop, op);
2031 /* XXX: check that types are compatible */
2032 if (vtop[-1].type.t & VT_VLA) {
2033 vla_runtime_pointed_size(&vtop[-1].type);
2034 } else {
2035 vpushi(pointed_size(&vtop[-1].type));
2037 vrott(3);
2038 gen_opic(op);
2039 /* set to integer type */
2040 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2041 vtop->type.t = VT_LLONG;
2042 #else
2043 vtop->type.t = VT_INT;
2044 #endif
2045 vswap();
2046 gen_op(TOK_PDIV);
2047 } else {
2048 /* exactly one pointer : must be '+' or '-'. */
2049 if (op != '-' && op != '+')
2050 tcc_error("cannot use pointers here");
2051 /* Put pointer as first operand */
2052 if (bt2 == VT_PTR) {
2053 vswap();
2054 t = t1, t1 = t2, t2 = t;
2056 #if PTR_SIZE == 4
2057 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2058 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2059 gen_cast(&int_type);
2060 #endif
2061 type1 = vtop[-1].type;
2062 type1.t &= ~VT_ARRAY;
2063 if (vtop[-1].type.t & VT_VLA)
2064 vla_runtime_pointed_size(&vtop[-1].type);
2065 else {
2066 u = pointed_size(&vtop[-1].type);
2067 if (u < 0)
2068 tcc_error("unknown array element size");
2069 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2070 vpushll(u);
2071 #else
2072 /* XXX: cast to int ? (long long case) */
2073 vpushi(u);
2074 #endif
2076 gen_op('*');
2077 #if 0
2078 /* #ifdef CONFIG_TCC_BCHECK
2079 The main reason to removing this code:
2080 #include <stdio.h>
2081 int main ()
2083 int v[10];
2084 int i = 10;
2085 int j = 9;
2086 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2087 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2089 When this code is on. then the output looks like
2090 v+i-j = 0xfffffffe
2091 v+(i-j) = 0xbff84000
2093 /* if evaluating constant expression, no code should be
2094 generated, so no bound check */
2095 if (tcc_state->do_bounds_check && !const_wanted) {
2096 /* if bounded pointers, we generate a special code to
2097 test bounds */
2098 if (op == '-') {
2099 vpushi(0);
2100 vswap();
2101 gen_op('-');
2103 gen_bounded_ptr_add();
2104 } else
2105 #endif
2107 gen_opic(op);
2109 /* put again type if gen_opic() swaped operands */
2110 vtop->type = type1;
2112 } else if (is_float(bt1) || is_float(bt2)) {
2113 /* compute bigger type and do implicit casts */
2114 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2115 t = VT_LDOUBLE;
2116 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2117 t = VT_DOUBLE;
2118 } else {
2119 t = VT_FLOAT;
2121 /* floats can only be used for a few operations */
2122 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2123 (op < TOK_ULT || op > TOK_GT))
2124 tcc_error("invalid operands for binary operation");
2125 goto std_op;
2126 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2127 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2128 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2129 t |= VT_UNSIGNED;
2130 goto std_op;
2131 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2132 /* cast to biggest op */
2133 t = VT_LLONG;
2134 /* convert to unsigned if it does not fit in a long long */
2135 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2136 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2137 t |= VT_UNSIGNED;
2138 goto std_op;
2139 } else {
2140 /* integer operations */
2141 t = VT_INT;
2142 /* convert to unsigned if it does not fit in an integer */
2143 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2144 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2145 t |= VT_UNSIGNED;
2146 std_op:
2147 /* XXX: currently, some unsigned operations are explicit, so
2148 we modify them here */
2149 if (t & VT_UNSIGNED) {
2150 if (op == TOK_SAR)
2151 op = TOK_SHR;
2152 else if (op == '/')
2153 op = TOK_UDIV;
2154 else if (op == '%')
2155 op = TOK_UMOD;
2156 else if (op == TOK_LT)
2157 op = TOK_ULT;
2158 else if (op == TOK_GT)
2159 op = TOK_UGT;
2160 else if (op == TOK_LE)
2161 op = TOK_ULE;
2162 else if (op == TOK_GE)
2163 op = TOK_UGE;
2165 vswap();
2166 type1.t = t;
2167 gen_cast(&type1);
2168 vswap();
2169 /* special case for shifts and long long: we keep the shift as
2170 an integer */
2171 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2172 type1.t = VT_INT;
2173 gen_cast(&type1);
2174 if (is_float(t))
2175 gen_opif(op);
2176 else
2177 gen_opic(op);
2178 if (op >= TOK_ULT && op <= TOK_GT) {
2179 /* relationnal op: the result is an int */
2180 vtop->type.t = VT_INT;
2181 } else {
2182 vtop->type.t = t;
2185 // Make sure that we have converted to an rvalue:
2186 if (vtop->r & VT_LVAL)
2187 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2190 #ifndef TCC_TARGET_ARM
2191 /* generic itof for unsigned long long case */
2192 static void gen_cvt_itof1(int t)
2194 #ifdef TCC_TARGET_ARM64
2195 gen_cvt_itof(t);
2196 #else
2197 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2198 (VT_LLONG | VT_UNSIGNED)) {
2200 if (t == VT_FLOAT)
2201 vpush_global_sym(&func_old_type, TOK___floatundisf);
2202 #if LDOUBLE_SIZE != 8
2203 else if (t == VT_LDOUBLE)
2204 vpush_global_sym(&func_old_type, TOK___floatundixf);
2205 #endif
2206 else
2207 vpush_global_sym(&func_old_type, TOK___floatundidf);
2208 vrott(2);
2209 gfunc_call(1);
2210 vpushi(0);
2211 vtop->r = reg_fret(t);
2212 } else {
2213 gen_cvt_itof(t);
2215 #endif
2217 #endif
2219 /* generic ftoi for unsigned long long case */
2220 static void gen_cvt_ftoi1(int t)
2222 #ifdef TCC_TARGET_ARM64
2223 gen_cvt_ftoi(t);
2224 #else
2225 int st;
2227 if (t == (VT_LLONG | VT_UNSIGNED)) {
2228 /* not handled natively */
2229 st = vtop->type.t & VT_BTYPE;
2230 if (st == VT_FLOAT)
2231 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2232 #if LDOUBLE_SIZE != 8
2233 else if (st == VT_LDOUBLE)
2234 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2235 #endif
2236 else
2237 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2238 vrott(2);
2239 gfunc_call(1);
2240 vpushi(0);
2241 vtop->r = REG_IRET;
2242 vtop->r2 = REG_LRET;
2243 } else {
2244 gen_cvt_ftoi(t);
2246 #endif
2249 /* force char or short cast */
2250 static void force_charshort_cast(int t)
2252 int bits, dbt;
2253 dbt = t & VT_BTYPE;
2254 /* XXX: add optimization if lvalue : just change type and offset */
2255 if (dbt == VT_BYTE)
2256 bits = 8;
2257 else
2258 bits = 16;
2259 if (t & VT_UNSIGNED) {
2260 vpushi((1 << bits) - 1);
2261 gen_op('&');
2262 } else {
2263 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2264 bits = 64 - bits;
2265 else
2266 bits = 32 - bits;
2267 vpushi(bits);
2268 gen_op(TOK_SHL);
2269 /* result must be signed or the SAR is converted to an SHL
2270 This was not the case when "t" was a signed short
2271 and the last value on the stack was an unsigned int */
2272 vtop->type.t &= ~VT_UNSIGNED;
2273 vpushi(bits);
2274 gen_op(TOK_SAR);
2278 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2279 static void gen_cast(CType *type)
2281 int sbt, dbt, sf, df, c, p;
2283 /* special delayed cast for char/short */
2284 /* XXX: in some cases (multiple cascaded casts), it may still
2285 be incorrect */
2286 if (vtop->r & VT_MUSTCAST) {
2287 vtop->r &= ~VT_MUSTCAST;
2288 force_charshort_cast(vtop->type.t);
2291 /* bitfields first get cast to ints */
2292 if (vtop->type.t & VT_BITFIELD) {
2293 gv(RC_INT);
2296 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2297 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2299 if (sbt != dbt) {
2300 sf = is_float(sbt);
2301 df = is_float(dbt);
2302 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2303 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2304 if (c) {
2305 /* constant case: we can do it now */
2306 /* XXX: in ISOC, cannot do it if error in convert */
2307 if (sbt == VT_FLOAT)
2308 vtop->c.ld = vtop->c.f;
2309 else if (sbt == VT_DOUBLE)
2310 vtop->c.ld = vtop->c.d;
2312 if (df) {
2313 if ((sbt & VT_BTYPE) == VT_LLONG) {
2314 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2315 vtop->c.ld = vtop->c.i;
2316 else
2317 vtop->c.ld = -(long double)-vtop->c.i;
2318 } else if(!sf) {
2319 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2320 vtop->c.ld = (uint32_t)vtop->c.i;
2321 else
2322 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2325 if (dbt == VT_FLOAT)
2326 vtop->c.f = (float)vtop->c.ld;
2327 else if (dbt == VT_DOUBLE)
2328 vtop->c.d = (double)vtop->c.ld;
2329 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2330 vtop->c.i = vtop->c.ld;
2331 } else if (sf && dbt == VT_BOOL) {
2332 vtop->c.i = (vtop->c.ld != 0);
2333 } else {
2334 if(sf)
2335 vtop->c.i = vtop->c.ld;
2336 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2338 else if (sbt & VT_UNSIGNED)
2339 vtop->c.i = (uint32_t)vtop->c.i;
2340 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2341 else if (sbt == VT_PTR)
2343 #endif
2344 else if (sbt != VT_LLONG)
2345 vtop->c.i = ((uint32_t)vtop->c.i |
2346 -(vtop->c.i & 0x80000000));
2348 if (dbt == (VT_LLONG|VT_UNSIGNED))
2350 else if (dbt == VT_BOOL)
2351 vtop->c.i = (vtop->c.i != 0);
2352 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2353 else if (dbt == VT_PTR)
2355 #endif
2356 else if (dbt != VT_LLONG) {
2357 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2358 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2359 0xffffffff);
2360 vtop->c.i &= m;
2361 if (!(dbt & VT_UNSIGNED))
2362 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2365 } else if (p && dbt == VT_BOOL) {
2366 vtop->r = VT_CONST;
2367 vtop->c.i = 1;
2368 } else {
2369 /* non constant case: generate code */
2370 if (sf && df) {
2371 /* convert from fp to fp */
2372 gen_cvt_ftof(dbt);
2373 } else if (df) {
2374 /* convert int to fp */
2375 gen_cvt_itof1(dbt);
2376 } else if (sf) {
2377 /* convert fp to int */
2378 if (dbt == VT_BOOL) {
2379 vpushi(0);
2380 gen_op(TOK_NE);
2381 } else {
2382 /* we handle char/short/etc... with generic code */
2383 if (dbt != (VT_INT | VT_UNSIGNED) &&
2384 dbt != (VT_LLONG | VT_UNSIGNED) &&
2385 dbt != VT_LLONG)
2386 dbt = VT_INT;
2387 gen_cvt_ftoi1(dbt);
2388 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2389 /* additional cast for char/short... */
2390 vtop->type.t = dbt;
2391 gen_cast(type);
2394 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2395 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2396 if ((sbt & VT_BTYPE) != VT_LLONG) {
2397 /* scalar to long long */
2398 /* machine independent conversion */
2399 gv(RC_INT);
2400 /* generate high word */
2401 if (sbt == (VT_INT | VT_UNSIGNED)) {
2402 vpushi(0);
2403 gv(RC_INT);
2404 } else {
2405 if (sbt == VT_PTR) {
2406 /* cast from pointer to int before we apply
2407 shift operation, which pointers don't support*/
2408 gen_cast(&int_type);
2410 gv_dup();
2411 vpushi(31);
2412 gen_op(TOK_SAR);
2414 /* patch second register */
2415 vtop[-1].r2 = vtop->r;
2416 vpop();
2418 #else
2419 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2420 (dbt & VT_BTYPE) == VT_PTR ||
2421 (dbt & VT_BTYPE) == VT_FUNC) {
2422 if ((sbt & VT_BTYPE) != VT_LLONG &&
2423 (sbt & VT_BTYPE) != VT_PTR &&
2424 (sbt & VT_BTYPE) != VT_FUNC) {
2425 /* need to convert from 32bit to 64bit */
2426 gv(RC_INT);
2427 if (sbt != (VT_INT | VT_UNSIGNED)) {
2428 #if defined(TCC_TARGET_ARM64)
2429 gen_cvt_sxtw();
2430 #elif defined(TCC_TARGET_X86_64)
2431 int r = gv(RC_INT);
2432 /* x86_64 specific: movslq */
2433 o(0x6348);
2434 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2435 #else
2436 #error
2437 #endif
2440 #endif
2441 } else if (dbt == VT_BOOL) {
2442 /* scalar to bool */
2443 vpushi(0);
2444 gen_op(TOK_NE);
2445 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2446 (dbt & VT_BTYPE) == VT_SHORT) {
2447 if (sbt == VT_PTR) {
2448 vtop->type.t = VT_INT;
2449 tcc_warning("nonportable conversion from pointer to char/short");
2451 force_charshort_cast(dbt);
2452 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2453 } else if ((dbt & VT_BTYPE) == VT_INT) {
2454 /* scalar to int */
2455 if ((sbt & VT_BTYPE) == VT_LLONG) {
2456 /* from long long: just take low order word */
2457 lexpand();
2458 vpop();
2460 /* if lvalue and single word type, nothing to do because
2461 the lvalue already contains the real type size (see
2462 VT_LVAL_xxx constants) */
2463 #endif
2466 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2467 /* if we are casting between pointer types,
2468 we must update the VT_LVAL_xxx size */
2469 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2470 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2472 vtop->type = *type;
2475 /* return type size as known at compile time. Put alignment at 'a' */
2476 ST_FUNC int type_size(CType *type, int *a)
2478 Sym *s;
2479 int bt;
2481 bt = type->t & VT_BTYPE;
2482 if (bt == VT_STRUCT) {
2483 /* struct/union */
2484 s = type->ref;
2485 *a = s->r;
2486 return s->c;
2487 } else if (bt == VT_PTR) {
2488 if (type->t & VT_ARRAY) {
2489 int ts;
2491 s = type->ref;
2492 ts = type_size(&s->type, a);
2494 if (ts < 0 && s->c < 0)
2495 ts = -ts;
2497 return ts * s->c;
2498 } else {
2499 *a = PTR_SIZE;
2500 return PTR_SIZE;
2502 } else if (bt == VT_LDOUBLE) {
2503 *a = LDOUBLE_ALIGN;
2504 return LDOUBLE_SIZE;
2505 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2506 #ifdef TCC_TARGET_I386
2507 #ifdef TCC_TARGET_PE
2508 *a = 8;
2509 #else
2510 *a = 4;
2511 #endif
2512 #elif defined(TCC_TARGET_ARM)
2513 #ifdef TCC_ARM_EABI
2514 *a = 8;
2515 #else
2516 *a = 4;
2517 #endif
2518 #else
2519 *a = 8;
2520 #endif
2521 return 8;
2522 } else if (bt == VT_INT || bt == VT_FLOAT) {
2523 *a = 4;
2524 return 4;
2525 } else if (bt == VT_SHORT) {
2526 *a = 2;
2527 return 2;
2528 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2529 *a = 8;
2530 return 16;
2531 } else if (bt == VT_ENUM) {
2532 *a = 4;
2533 /* Enums might be incomplete, so don't just return '4' here. */
2534 return type->ref->c;
2535 } else {
2536 /* char, void, function, _Bool */
2537 *a = 1;
2538 return 1;
2542 /* push type size as known at runtime time on top of value stack. Put
2543 alignment at 'a' */
2544 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2546 if (type->t & VT_VLA) {
2547 type_size(&type->ref->type, a);
2548 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2549 } else {
2550 vpushi(type_size(type, a));
2554 static void vla_sp_restore(void) {
2555 if (vlas_in_scope) {
2556 gen_vla_sp_restore(vla_sp_loc);
2560 static void vla_sp_restore_root(void) {
2561 if (vlas_in_scope) {
2562 gen_vla_sp_restore(vla_sp_root_loc);
2566 /* return the pointed type of t */
2567 static inline CType *pointed_type(CType *type)
2569 return &type->ref->type;
2572 /* modify type so that its it is a pointer to type. */
2573 ST_FUNC void mk_pointer(CType *type)
2575 Sym *s;
2576 s = sym_push(SYM_FIELD, type, 0, -1);
2577 type->t = VT_PTR | (type->t & ~VT_TYPE);
2578 type->ref = s;
2581 /* compare function types. OLD functions match any new functions */
2582 static int is_compatible_func(CType *type1, CType *type2)
2584 Sym *s1, *s2;
2586 s1 = type1->ref;
2587 s2 = type2->ref;
2588 if (!is_compatible_types(&s1->type, &s2->type))
2589 return 0;
2590 /* check func_call */
2591 if (s1->a.func_call != s2->a.func_call)
2592 return 0;
2593 /* XXX: not complete */
2594 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2595 return 1;
2596 if (s1->c != s2->c)
2597 return 0;
2598 while (s1 != NULL) {
2599 if (s2 == NULL)
2600 return 0;
2601 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2602 return 0;
2603 s1 = s1->next;
2604 s2 = s2->next;
2606 if (s2)
2607 return 0;
2608 return 1;
2611 /* return true if type1 and type2 are the same. If unqualified is
2612 true, qualifiers on the types are ignored.
2614 - enums are not checked as gcc __builtin_types_compatible_p ()
2616 static int compare_types(CType *type1, CType *type2, int unqualified)
2618 int bt1, t1, t2;
2620 t1 = type1->t & VT_TYPE;
2621 t2 = type2->t & VT_TYPE;
2622 if (unqualified) {
2623 /* strip qualifiers before comparing */
2624 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2625 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2627 /* Default Vs explicit signedness only matters for char */
2628 if ((t1 & VT_BTYPE) != VT_BYTE) {
2629 t1 &= ~VT_DEFSIGN;
2630 t2 &= ~VT_DEFSIGN;
2632 /* An enum is compatible with (unsigned) int. Ideally we would
2633 store the enums signedness in type->ref.a.<some_bit> and
2634 only accept unsigned enums with unsigned int and vice versa.
2635 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2636 from pointer target types, so we can't add it here either. */
2637 if ((t1 & VT_BTYPE) == VT_ENUM) {
2638 t1 = VT_INT;
2639 if (type1->ref->a.unsigned_enum)
2640 t1 |= VT_UNSIGNED;
2642 if ((t2 & VT_BTYPE) == VT_ENUM) {
2643 t2 = VT_INT;
2644 if (type2->ref->a.unsigned_enum)
2645 t2 |= VT_UNSIGNED;
2647 /* XXX: bitfields ? */
2648 if (t1 != t2)
2649 return 0;
2650 /* test more complicated cases */
2651 bt1 = t1 & VT_BTYPE;
2652 if (bt1 == VT_PTR) {
2653 type1 = pointed_type(type1);
2654 type2 = pointed_type(type2);
2655 return is_compatible_types(type1, type2);
2656 } else if (bt1 == VT_STRUCT) {
2657 return (type1->ref == type2->ref);
2658 } else if (bt1 == VT_FUNC) {
2659 return is_compatible_func(type1, type2);
2660 } else {
2661 return 1;
2665 /* return true if type1 and type2 are exactly the same (including
2666 qualifiers).
2668 static int is_compatible_types(CType *type1, CType *type2)
2670 return compare_types(type1,type2,0);
2673 /* return true if type1 and type2 are the same (ignoring qualifiers).
2675 static int is_compatible_parameter_types(CType *type1, CType *type2)
2677 return compare_types(type1,type2,1);
2680 /* print a type. If 'varstr' is not NULL, then the variable is also
2681 printed in the type */
2682 /* XXX: union */
2683 /* XXX: add array and function pointers */
2684 static void type_to_str(char *buf, int buf_size,
2685 CType *type, const char *varstr)
2687 int bt, v, t;
2688 Sym *s, *sa;
2689 char buf1[256];
2690 const char *tstr;
2692 t = type->t & VT_TYPE;
2693 bt = t & VT_BTYPE;
2694 buf[0] = '\0';
2695 if (t & VT_CONSTANT)
2696 pstrcat(buf, buf_size, "const ");
2697 if (t & VT_VOLATILE)
2698 pstrcat(buf, buf_size, "volatile ");
2699 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2700 pstrcat(buf, buf_size, "unsigned ");
2701 else if (t & VT_DEFSIGN)
2702 pstrcat(buf, buf_size, "signed ");
2703 switch(bt) {
2704 case VT_VOID:
2705 tstr = "void";
2706 goto add_tstr;
2707 case VT_BOOL:
2708 tstr = "_Bool";
2709 goto add_tstr;
2710 case VT_BYTE:
2711 tstr = "char";
2712 goto add_tstr;
2713 case VT_SHORT:
2714 tstr = "short";
2715 goto add_tstr;
2716 case VT_INT:
2717 tstr = "int";
2718 goto add_tstr;
2719 case VT_LONG:
2720 tstr = "long";
2721 goto add_tstr;
2722 case VT_LLONG:
2723 tstr = "long long";
2724 goto add_tstr;
2725 case VT_FLOAT:
2726 tstr = "float";
2727 goto add_tstr;
2728 case VT_DOUBLE:
2729 tstr = "double";
2730 goto add_tstr;
2731 case VT_LDOUBLE:
2732 tstr = "long double";
2733 add_tstr:
2734 pstrcat(buf, buf_size, tstr);
2735 break;
2736 case VT_ENUM:
2737 case VT_STRUCT:
2738 if (bt == VT_STRUCT)
2739 tstr = "struct ";
2740 else
2741 tstr = "enum ";
2742 pstrcat(buf, buf_size, tstr);
2743 v = type->ref->v & ~SYM_STRUCT;
2744 if (v >= SYM_FIRST_ANOM)
2745 pstrcat(buf, buf_size, "<anonymous>");
2746 else
2747 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2748 break;
2749 case VT_FUNC:
2750 s = type->ref;
2751 type_to_str(buf, buf_size, &s->type, varstr);
2752 pstrcat(buf, buf_size, "(");
2753 sa = s->next;
2754 while (sa != NULL) {
2755 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2756 pstrcat(buf, buf_size, buf1);
2757 sa = sa->next;
2758 if (sa)
2759 pstrcat(buf, buf_size, ", ");
2761 pstrcat(buf, buf_size, ")");
2762 goto no_var;
2763 case VT_PTR:
2764 s = type->ref;
2765 if (t & VT_ARRAY) {
2766 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2767 type_to_str(buf, buf_size, &s->type, buf1);
2768 goto no_var;
2770 pstrcpy(buf1, sizeof(buf1), "*");
2771 if (t & VT_CONSTANT)
2772 pstrcat(buf1, buf_size, "const ");
2773 if (t & VT_VOLATILE)
2774 pstrcat(buf1, buf_size, "volatile ");
2775 if (varstr)
2776 pstrcat(buf1, sizeof(buf1), varstr);
2777 type_to_str(buf, buf_size, &s->type, buf1);
2778 goto no_var;
2780 if (varstr) {
2781 pstrcat(buf, buf_size, " ");
2782 pstrcat(buf, buf_size, varstr);
2784 no_var: ;
2787 /* verify type compatibility to store vtop in 'dt' type, and generate
2788 casts if needed. */
2789 static void gen_assign_cast(CType *dt)
2791 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2792 char buf1[256], buf2[256];
2793 int dbt, sbt;
2795 st = &vtop->type; /* source type */
2796 dbt = dt->t & VT_BTYPE;
2797 sbt = st->t & VT_BTYPE;
2798 if (sbt == VT_VOID || dbt == VT_VOID) {
2799 if (sbt == VT_VOID && dbt == VT_VOID)
2800 ; /*
2801 It is Ok if both are void
2802 A test program:
2803 void func1() {}
2804 void func2() {
2805 return func1();
2807 gcc accepts this program
2809 else
2810 tcc_error("cannot cast from/to void");
2812 if (dt->t & VT_CONSTANT)
2813 tcc_warning("assignment of read-only location");
2814 switch(dbt) {
2815 case VT_PTR:
2816 /* special cases for pointers */
2817 /* '0' can also be a pointer */
2818 if (is_null_pointer(vtop))
2819 goto type_ok;
2820 /* accept implicit pointer to integer cast with warning */
2821 if (is_integer_btype(sbt)) {
2822 tcc_warning("assignment makes pointer from integer without a cast");
2823 goto type_ok;
2825 type1 = pointed_type(dt);
2826 /* a function is implicitely a function pointer */
2827 if (sbt == VT_FUNC) {
2828 if ((type1->t & VT_BTYPE) != VT_VOID &&
2829 !is_compatible_types(pointed_type(dt), st))
2830 tcc_warning("assignment from incompatible pointer type");
2831 goto type_ok;
2833 if (sbt != VT_PTR)
2834 goto error;
2835 type2 = pointed_type(st);
2836 if ((type1->t & VT_BTYPE) == VT_VOID ||
2837 (type2->t & VT_BTYPE) == VT_VOID) {
2838 /* void * can match anything */
2839 } else {
2840 /* exact type match, except for qualifiers */
2841 tmp_type1 = *type1;
2842 tmp_type2 = *type2;
2843 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2844 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2846 /* Like GCC don't warn by default for merely changes
2847 in pointer target signedness. Do warn for different
2848 base types, though, in particular for unsigned enums
2849 and signed int targets. */
2850 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2851 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2852 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2854 else
2855 tcc_warning("assignment from incompatible pointer type");
2858 /* check const and volatile */
2859 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2860 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2861 tcc_warning("assignment discards qualifiers from pointer target type");
2862 break;
2863 case VT_BYTE:
2864 case VT_SHORT:
2865 case VT_INT:
2866 case VT_LLONG:
2867 if (sbt == VT_PTR || sbt == VT_FUNC) {
2868 tcc_warning("assignment makes integer from pointer without a cast");
2869 } else if (sbt == VT_STRUCT) {
2870 goto case_VT_STRUCT;
2872 /* XXX: more tests */
2873 break;
2874 case VT_STRUCT:
2875 case_VT_STRUCT:
2876 tmp_type1 = *dt;
2877 tmp_type2 = *st;
2878 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2879 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2880 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2881 error:
2882 type_to_str(buf1, sizeof(buf1), st, NULL);
2883 type_to_str(buf2, sizeof(buf2), dt, NULL);
2884 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2886 break;
2888 type_ok:
2889 gen_cast(dt);
2892 /* store vtop in lvalue pushed on stack */
2893 ST_FUNC void vstore(void)
2895 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2897 ft = vtop[-1].type.t;
2898 sbt = vtop->type.t & VT_BTYPE;
2899 dbt = ft & VT_BTYPE;
2900 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2901 (sbt == VT_INT && dbt == VT_SHORT))
2902 && !(vtop->type.t & VT_BITFIELD)) {
2903 /* optimize char/short casts */
2904 delayed_cast = VT_MUSTCAST;
2905 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2906 ((1 << VT_STRUCT_SHIFT) - 1));
2907 /* XXX: factorize */
2908 if (ft & VT_CONSTANT)
2909 tcc_warning("assignment of read-only location");
2910 } else {
2911 delayed_cast = 0;
2912 if (!(ft & VT_BITFIELD))
2913 gen_assign_cast(&vtop[-1].type);
2916 if (sbt == VT_STRUCT) {
2917 /* if structure, only generate pointer */
2918 /* structure assignment : generate memcpy */
2919 /* XXX: optimize if small size */
2920 size = type_size(&vtop->type, &align);
2922 /* destination */
2923 vswap();
2924 vtop->type.t = VT_PTR;
2925 gaddrof();
2927 /* address of memcpy() */
2928 #ifdef TCC_ARM_EABI
2929 if(!(align & 7))
2930 vpush_global_sym(&func_old_type, TOK_memcpy8);
2931 else if(!(align & 3))
2932 vpush_global_sym(&func_old_type, TOK_memcpy4);
2933 else
2934 #endif
2935 /* Use memmove, rather than memcpy, as dest and src may be same: */
2936 vpush_global_sym(&func_old_type, TOK_memmove);
2938 vswap();
2939 /* source */
2940 vpushv(vtop - 2);
2941 vtop->type.t = VT_PTR;
2942 gaddrof();
2943 /* type size */
2944 vpushi(size);
2945 gfunc_call(3);
2947 /* leave source on stack */
2948 } else if (ft & VT_BITFIELD) {
2949 /* bitfield store handling */
2951 /* save lvalue as expression result (example: s.b = s.a = n;) */
2952 vdup(), vtop[-1] = vtop[-2];
2954 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2955 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2956 /* remove bit field info to avoid loops */
2957 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2959 if((ft & VT_BTYPE) == VT_BOOL) {
2960 gen_cast(&vtop[-1].type);
2961 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2964 /* duplicate destination */
2965 vdup();
2966 vtop[-1] = vtop[-2];
2968 /* mask and shift source */
2969 if((ft & VT_BTYPE) != VT_BOOL) {
2970 if((ft & VT_BTYPE) == VT_LLONG) {
2971 vpushll((1ULL << bit_size) - 1ULL);
2972 } else {
2973 vpushi((1 << bit_size) - 1);
2975 gen_op('&');
2977 vpushi(bit_pos);
2978 gen_op(TOK_SHL);
2979 /* load destination, mask and or with source */
2980 vswap();
2981 if((ft & VT_BTYPE) == VT_LLONG) {
2982 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2983 } else {
2984 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2986 gen_op('&');
2987 gen_op('|');
2988 /* store result */
2989 vstore();
2990 /* ... and discard */
2991 vpop();
2993 } else {
2994 #ifdef CONFIG_TCC_BCHECK
2995 /* bound check case */
2996 if (vtop[-1].r & VT_MUSTBOUND) {
2997 vswap();
2998 gbound();
2999 vswap();
3001 #endif
3002 rc = RC_INT;
3003 if (is_float(ft)) {
3004 rc = RC_FLOAT;
3005 #ifdef TCC_TARGET_X86_64
3006 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3007 rc = RC_ST0;
3008 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3009 rc = RC_FRET;
3011 #endif
3013 r = gv(rc); /* generate value */
3014 /* if lvalue was saved on stack, must read it */
3015 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3016 SValue sv;
3017 t = get_reg(RC_INT);
3018 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3019 sv.type.t = VT_PTR;
3020 #else
3021 sv.type.t = VT_INT;
3022 #endif
3023 sv.r = VT_LOCAL | VT_LVAL;
3024 sv.c.i = vtop[-1].c.i;
3025 load(t, &sv);
3026 vtop[-1].r = t | VT_LVAL;
3028 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3030 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3031 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3032 #else
3033 if ((ft & VT_BTYPE) == VT_LLONG) {
3034 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3035 #endif
3036 vtop[-1].type.t = load_type;
3037 store(r, vtop - 1);
3038 vswap();
3039 /* convert to int to increment easily */
3040 vtop->type.t = addr_type;
3041 gaddrof();
3042 vpushi(load_size);
3043 gen_op('+');
3044 vtop->r |= VT_LVAL;
3045 vswap();
3046 vtop[-1].type.t = load_type;
3047 /* XXX: it works because r2 is spilled last ! */
3048 store(vtop->r2, vtop - 1);
3049 } else {
3050 store(r, vtop - 1);
3053 vswap();
3054 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3055 vtop->r |= delayed_cast;
3059 /* post defines POST/PRE add. c is the token ++ or -- */
3060 ST_FUNC void inc(int post, int c)
3062 test_lvalue();
3063 vdup(); /* save lvalue */
3064 if (post) {
3065 gv_dup(); /* duplicate value */
3066 vrotb(3);
3067 vrotb(3);
3069 /* add constant */
3070 vpushi(c - TOK_MID);
3071 gen_op('+');
3072 vstore(); /* store value */
3073 if (post)
3074 vpop(); /* if post op, return saved value */
3077 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3079 /* read the string */
3080 if (tok != TOK_STR)
3081 expect(msg);
3082 cstr_new(astr);
3083 while (tok == TOK_STR) {
3084 /* XXX: add \0 handling too ? */
3085 cstr_cat(astr, tokc.str.data, -1);
3086 next();
3088 cstr_ccat(astr, '\0');
3091 /* If I is >= 1 and a power of two, returns log2(i)+1.
3092 If I is 0 returns 0. */
3093 static int exact_log2p1(int i)
3095 int ret;
3096 if (!i)
3097 return 0;
3098 for (ret = 1; i >= 1 << 8; ret += 8)
3099 i >>= 8;
3100 if (i >= 1 << 4)
3101 ret += 4, i >>= 4;
3102 if (i >= 1 << 2)
3103 ret += 2, i >>= 2;
3104 if (i >= 1 << 1)
3105 ret++;
3106 return ret;
3109 /* Parse GNUC __attribute__ extension. Currently, the following
3110 extensions are recognized:
3111 - aligned(n) : set data/function alignment.
3112 - packed : force data alignment to 1
3113 - section(x) : generate data/code in this section.
3114 - unused : currently ignored, but may be used someday.
3115 - regparm(n) : pass function parameters in registers (i386 only)
3117 static void parse_attribute(AttributeDef *ad)
3119 int t, n;
3120 CString astr;
3122 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3123 next();
3124 skip('(');
3125 skip('(');
3126 while (tok != ')') {
3127 if (tok < TOK_IDENT)
3128 expect("attribute name");
3129 t = tok;
3130 next();
3131 switch(t) {
3132 case TOK_SECTION1:
3133 case TOK_SECTION2:
3134 skip('(');
3135 parse_mult_str(&astr, "section name");
3136 ad->section = find_section(tcc_state, (char *)astr.data);
3137 skip(')');
3138 cstr_free(&astr);
3139 break;
3140 case TOK_ALIAS1:
3141 case TOK_ALIAS2:
3142 skip('(');
3143 parse_mult_str(&astr, "alias(\"target\")");
3144 ad->alias_target = /* save string as token, for later */
3145 tok_alloc((char*)astr.data, astr.size-1)->tok;
3146 skip(')');
3147 cstr_free(&astr);
3148 break;
3149 case TOK_VISIBILITY1:
3150 case TOK_VISIBILITY2:
3151 skip('(');
3152 parse_mult_str(&astr,
3153 "visibility(\"default|hidden|internal|protected\")");
3154 if (!strcmp (astr.data, "default"))
3155 ad->a.visibility = STV_DEFAULT;
3156 else if (!strcmp (astr.data, "hidden"))
3157 ad->a.visibility = STV_HIDDEN;
3158 else if (!strcmp (astr.data, "internal"))
3159 ad->a.visibility = STV_INTERNAL;
3160 else if (!strcmp (astr.data, "protected"))
3161 ad->a.visibility = STV_PROTECTED;
3162 else
3163 expect("visibility(\"default|hidden|internal|protected\")");
3164 skip(')');
3165 cstr_free(&astr);
3166 break;
3167 case TOK_ALIGNED1:
3168 case TOK_ALIGNED2:
3169 if (tok == '(') {
3170 next();
3171 n = expr_const();
3172 if (n <= 0 || (n & (n - 1)) != 0)
3173 tcc_error("alignment must be a positive power of two");
3174 skip(')');
3175 } else {
3176 n = MAX_ALIGN;
3178 ad->a.aligned = exact_log2p1(n);
3179 if (n != 1 << (ad->a.aligned - 1))
3180 tcc_error("alignment of %d is larger than implemented", n);
3181 break;
3182 case TOK_PACKED1:
3183 case TOK_PACKED2:
3184 ad->a.packed = 1;
3185 break;
3186 case TOK_WEAK1:
3187 case TOK_WEAK2:
3188 ad->a.weak = 1;
3189 break;
3190 case TOK_UNUSED1:
3191 case TOK_UNUSED2:
3192 /* currently, no need to handle it because tcc does not
3193 track unused objects */
3194 break;
3195 case TOK_NORETURN1:
3196 case TOK_NORETURN2:
3197 /* currently, no need to handle it because tcc does not
3198 track unused objects */
3199 break;
3200 case TOK_CDECL1:
3201 case TOK_CDECL2:
3202 case TOK_CDECL3:
3203 ad->a.func_call = FUNC_CDECL;
3204 break;
3205 case TOK_STDCALL1:
3206 case TOK_STDCALL2:
3207 case TOK_STDCALL3:
3208 ad->a.func_call = FUNC_STDCALL;
3209 break;
3210 #ifdef TCC_TARGET_I386
3211 case TOK_REGPARM1:
3212 case TOK_REGPARM2:
3213 skip('(');
3214 n = expr_const();
3215 if (n > 3)
3216 n = 3;
3217 else if (n < 0)
3218 n = 0;
3219 if (n > 0)
3220 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3221 skip(')');
3222 break;
3223 case TOK_FASTCALL1:
3224 case TOK_FASTCALL2:
3225 case TOK_FASTCALL3:
3226 ad->a.func_call = FUNC_FASTCALLW;
3227 break;
3228 #endif
3229 case TOK_MODE:
3230 skip('(');
3231 switch(tok) {
3232 case TOK_MODE_DI:
3233 ad->a.mode = VT_LLONG + 1;
3234 break;
3235 case TOK_MODE_QI:
3236 ad->a.mode = VT_BYTE + 1;
3237 break;
3238 case TOK_MODE_HI:
3239 ad->a.mode = VT_SHORT + 1;
3240 break;
3241 case TOK_MODE_SI:
3242 case TOK_MODE_word:
3243 ad->a.mode = VT_INT + 1;
3244 break;
3245 default:
3246 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3247 break;
3249 next();
3250 skip(')');
3251 break;
3252 case TOK_DLLEXPORT:
3253 ad->a.func_export = 1;
3254 break;
3255 case TOK_DLLIMPORT:
3256 ad->a.func_import = 1;
3257 break;
3258 default:
3259 if (tcc_state->warn_unsupported)
3260 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3261 /* skip parameters */
3262 if (tok == '(') {
3263 int parenthesis = 0;
3264 do {
3265 if (tok == '(')
3266 parenthesis++;
3267 else if (tok == ')')
3268 parenthesis--;
3269 next();
3270 } while (parenthesis && tok != -1);
3272 break;
3274 if (tok != ',')
3275 break;
3276 next();
3278 skip(')');
3279 skip(')');
3283 static Sym * find_field (CType *type, int v)
3285 Sym *s = type->ref;
3286 v |= SYM_FIELD;
3287 while ((s = s->next) != NULL) {
3288 if ((s->v & SYM_FIELD) &&
3289 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3290 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3291 Sym *ret = find_field (&s->type, v);
3292 if (ret)
3293 return ret;
3295 if (s->v == v)
3296 break;
3298 return s;
3301 static void struct_add_offset (Sym *s, int offset)
3303 while ((s = s->next) != NULL) {
3304 if ((s->v & SYM_FIELD) &&
3305 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3306 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3307 struct_add_offset(s->type.ref, offset);
3308 } else
3309 s->c += offset;
3313 static void struct_layout(CType *type, AttributeDef *ad)
3315 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3316 int pcc = !tcc_state->ms_bitfields;
3317 Sym *f;
3318 if (ad->a.aligned)
3319 maxalign = 1 << (ad->a.aligned - 1);
3320 else
3321 maxalign = 1;
3322 offset = 0;
3323 c = 0;
3324 bit_pos = 0;
3325 prevbt = VT_STRUCT; /* make it never match */
3326 prev_bit_size = 0;
3327 for (f = type->ref->next; f; f = f->next) {
3328 int typealign, bit_size;
3329 int size = type_size(&f->type, &typealign);
3330 if (f->type.t & VT_BITFIELD)
3331 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3332 else
3333 bit_size = -1;
3334 if (bit_size == 0 && pcc) {
3335 /* Zero-width bit-fields in PCC mode aren't affected
3336 by any packing (attribute or pragma). */
3337 align = typealign;
3338 } else if (f->r > 1) {
3339 align = f->r;
3340 } else if (ad->a.packed || f->r == 1) {
3341 align = 1;
3342 /* Packed fields or packed records don't let the base type
3343 influence the records type alignment. */
3344 typealign = 1;
3345 } else {
3346 align = typealign;
3348 if (type->ref->type.t != TOK_STRUCT) {
3349 if (pcc && bit_size >= 0)
3350 size = (bit_size + 7) >> 3;
3351 /* Bit position is already zero from our caller. */
3352 offset = 0;
3353 if (size > c)
3354 c = size;
3355 } else if (bit_size < 0) {
3356 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3357 prevbt = VT_STRUCT;
3358 prev_bit_size = 0;
3359 c = (c + addbytes + align - 1) & -align;
3360 offset = c;
3361 if (size > 0)
3362 c += size;
3363 bit_pos = 0;
3364 } else {
3365 /* A bit-field. Layout is more complicated. There are two
3366 options TCC implements: PCC compatible and MS compatible
3367 (PCC compatible is what GCC uses for almost all targets).
3368 In PCC layout the overall size of the struct (in c) is
3369 _excluding_ the current run of bit-fields (that is,
3370 there's at least additional bit_pos bits after c). In
3371 MS layout c does include the current run of bit-fields.
3373 This matters for calculating the natural alignment buckets
3374 in PCC mode. */
3376 /* 'align' will be used to influence records alignment,
3377 so it's the max of specified and type alignment, except
3378 in certain cases that depend on the mode. */
3379 if (align < typealign)
3380 align = typealign;
3381 if (pcc) {
3382 /* In PCC layout a non-packed bit-field is placed adjacent
3383 to the preceding bit-fields, except if it would overflow
3384 its container (depending on base type) or it's a zero-width
3385 bit-field. Packed non-zero-width bit-fields always are
3386 placed adjacent. */
3387 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3388 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3389 if (bit_size == 0 ||
3390 ((typealign != 1 || size == 1) &&
3391 (ofs2 / (typealign * 8)) > (size/typealign))) {
3392 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3393 bit_pos = 0;
3394 } else while (bit_pos + bit_size > size * 8) {
3395 c += size;
3396 bit_pos -= size * 8;
3398 offset = c;
3399 /* In PCC layout named bit-fields influence the alignment
3400 of the containing struct using the base types alignment,
3401 except for packed fields (which here have correct
3402 align/typealign). */
3403 if ((f->v & SYM_FIRST_ANOM))
3404 align = 1;
3405 } else {
3406 bt = f->type.t & VT_BTYPE;
3407 if ((bit_pos + bit_size > size * 8) ||
3408 (bit_size > 0) == (bt != prevbt)) {
3409 c = (c + typealign - 1) & -typealign;
3410 offset = c;
3411 bit_pos = 0;
3412 /* In MS bitfield mode a bit-field run always uses
3413 at least as many bits as the underlying type.
3414 To start a new run it's also required that this
3415 or the last bit-field had non-zero width. */
3416 if (bit_size || prev_bit_size)
3417 c += size;
3419 /* In MS layout the records alignment is normally
3420 influenced by the field, except for a zero-width
3421 field at the start of a run (but by further zero-width
3422 fields it is again). */
3423 if (bit_size == 0 && prevbt != bt)
3424 align = 1;
3425 prevbt = bt;
3426 prev_bit_size = bit_size;
3428 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3429 | (bit_pos << VT_STRUCT_SHIFT);
3430 bit_pos += bit_size;
3431 if (pcc && bit_pos >= size * 8) {
3432 c += size;
3433 bit_pos -= size * 8;
3436 if (align > maxalign)
3437 maxalign = align;
3438 #if 0
3439 printf("set field %s offset=%d c=%d",
3440 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3441 if (f->type.t & VT_BITFIELD) {
3442 printf(" pos=%d size=%d",
3443 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3444 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3446 printf("\n");
3447 #endif
3449 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3450 Sym *ass;
3451 /* An anonymous struct/union. Adjust member offsets
3452 to reflect the real offset of our containing struct.
3453 Also set the offset of this anon member inside
3454 the outer struct to be zero. Via this it
3455 works when accessing the field offset directly
3456 (from base object), as well as when recursing
3457 members in initializer handling. */
3458 int v2 = f->type.ref->v;
3459 if (!(v2 & SYM_FIELD) &&
3460 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3461 Sym **pps;
3462 /* This happens only with MS extensions. The
3463 anon member has a named struct type, so it
3464 potentially is shared with other references.
3465 We need to unshare members so we can modify
3466 them. */
3467 ass = f->type.ref;
3468 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3469 &f->type.ref->type, 0,
3470 f->type.ref->c);
3471 pps = &f->type.ref->next;
3472 while ((ass = ass->next) != NULL) {
3473 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3474 pps = &((*pps)->next);
3476 *pps = NULL;
3478 struct_add_offset(f->type.ref, offset);
3479 f->c = 0;
3480 } else {
3481 f->c = offset;
3484 f->r = 0;
3486 /* store size and alignment */
3487 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3488 + maxalign - 1) & -maxalign;
3489 type->ref->r = maxalign;
3492 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3493 static void struct_decl(CType *type, AttributeDef *ad, int u)
3495 int a, v, size, align, flexible, alignoverride;
3496 long c;
3497 int bit_size, bsize, bt;
3498 Sym *s, *ss, **ps;
3499 AttributeDef ad1;
3500 CType type1, btype;
3502 a = tok; /* save decl type */
3503 next();
3504 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3505 parse_attribute(ad);
3506 if (tok != '{') {
3507 v = tok;
3508 next();
3509 /* struct already defined ? return it */
3510 if (v < TOK_IDENT)
3511 expect("struct/union/enum name");
3512 s = struct_find(v);
3513 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3514 if (s->type.t != a)
3515 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3516 goto do_decl;
3518 } else {
3519 v = anon_sym++;
3521 /* Record the original enum/struct/union token. */
3522 type1.t = a;
3523 type1.ref = NULL;
3524 /* we put an undefined size for struct/union */
3525 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3526 s->r = 0; /* default alignment is zero as gcc */
3527 /* put struct/union/enum name in type */
3528 do_decl:
3529 type->t = u;
3530 type->ref = s;
3532 if (tok == '{') {
3533 next();
3534 if (s->c != -1)
3535 tcc_error("struct/union/enum already defined");
3536 /* cannot be empty */
3537 c = 0;
3538 /* non empty enums are not allowed */
3539 if (a == TOK_ENUM) {
3540 int seen_neg = 0;
3541 int seen_wide = 0;
3542 for(;;) {
3543 CType *t = &int_type;
3544 v = tok;
3545 if (v < TOK_UIDENT)
3546 expect("identifier");
3547 ss = sym_find(v);
3548 if (ss && !local_stack)
3549 tcc_error("redefinition of enumerator '%s'",
3550 get_tok_str(v, NULL));
3551 next();
3552 if (tok == '=') {
3553 next();
3554 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3555 c = expr_const64();
3556 #else
3557 /* We really want to support long long enums
3558 on i386 as well, but the Sym structure only
3559 holds a 'long' for associated constants,
3560 and enlarging it would bump its size (no
3561 available padding). So punt for now. */
3562 c = expr_const();
3563 #endif
3565 if (c < 0)
3566 seen_neg = 1;
3567 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3568 seen_wide = 1, t = &size_type;
3569 /* enum symbols have static storage */
3570 ss = sym_push(v, t, VT_CONST, c);
3571 ss->type.t |= VT_STATIC;
3572 if (tok != ',')
3573 break;
3574 next();
3575 c++;
3576 /* NOTE: we accept a trailing comma */
3577 if (tok == '}')
3578 break;
3580 if (!seen_neg)
3581 s->a.unsigned_enum = 1;
3582 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3583 skip('}');
3584 } else {
3585 ps = &s->next;
3586 flexible = 0;
3587 while (tok != '}') {
3588 if (!parse_btype(&btype, &ad1)) {
3589 skip(';');
3590 continue;
3592 while (1) {
3593 if (flexible)
3594 tcc_error("flexible array member '%s' not at the end of struct",
3595 get_tok_str(v, NULL));
3596 bit_size = -1;
3597 v = 0;
3598 type1 = btype;
3599 if (tok != ':') {
3600 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3601 if (v == 0) {
3602 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3603 expect("identifier");
3604 else {
3605 int v = btype.ref->v;
3606 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3607 if (tcc_state->ms_extensions == 0)
3608 expect("identifier");
3612 if (type_size(&type1, &align) < 0) {
3613 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3614 flexible = 1;
3615 else
3616 tcc_error("field '%s' has incomplete type",
3617 get_tok_str(v, NULL));
3619 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3620 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3621 tcc_error("invalid type for '%s'",
3622 get_tok_str(v, NULL));
3624 if (tok == ':') {
3625 next();
3626 bit_size = expr_const();
3627 /* XXX: handle v = 0 case for messages */
3628 if (bit_size < 0)
3629 tcc_error("negative width in bit-field '%s'",
3630 get_tok_str(v, NULL));
3631 if (v && bit_size == 0)
3632 tcc_error("zero width for bit-field '%s'",
3633 get_tok_str(v, NULL));
3634 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3635 parse_attribute(&ad1);
3637 size = type_size(&type1, &align);
3638 /* Only remember non-default alignment. */
3639 alignoverride = 0;
3640 if (ad1.a.aligned) {
3641 int speca = 1 << (ad1.a.aligned - 1);
3642 alignoverride = speca;
3643 } else if (ad1.a.packed || ad->a.packed) {
3644 alignoverride = 1;
3645 } else if (*tcc_state->pack_stack_ptr) {
3646 if (align > *tcc_state->pack_stack_ptr)
3647 alignoverride = *tcc_state->pack_stack_ptr;
3649 if (bit_size >= 0) {
3650 bt = type1.t & VT_BTYPE;
3651 if (bt != VT_INT &&
3652 bt != VT_BYTE &&
3653 bt != VT_SHORT &&
3654 bt != VT_BOOL &&
3655 bt != VT_ENUM &&
3656 bt != VT_LLONG)
3657 tcc_error("bitfields must have scalar type");
3658 bsize = size * 8;
3659 if (bit_size > bsize) {
3660 tcc_error("width of '%s' exceeds its type",
3661 get_tok_str(v, NULL));
3662 } else if (bit_size == bsize) {
3663 /* no need for bit fields */
3665 } else {
3666 type1.t |= VT_BITFIELD |
3667 (0 << VT_STRUCT_SHIFT) |
3668 (bit_size << (VT_STRUCT_SHIFT + 6));
3671 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3672 /* Remember we've seen a real field to check
3673 for placement of flexible array member. */
3674 c = 1;
3676 /* If member is a struct or bit-field, enforce
3677 placing into the struct (as anonymous). */
3678 if (v == 0 &&
3679 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3680 bit_size >= 0)) {
3681 v = anon_sym++;
3683 if (v) {
3684 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3685 *ps = ss;
3686 ps = &ss->next;
3688 if (tok == ';' || tok == TOK_EOF)
3689 break;
3690 skip(',');
3692 skip(';');
3694 skip('}');
3695 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3696 parse_attribute(ad);
3697 struct_layout(type, ad);
3702 /* return 1 if basic type is a type size (short, long, long long) */
3703 ST_FUNC int is_btype_size(int bt)
3705 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3708 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3709 are added to the element type, copied because it could be a typedef. */
3710 static void parse_btype_qualify(CType *type, int qualifiers)
3712 while (type->t & VT_ARRAY) {
3713 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3714 type = &type->ref->type;
3716 type->t |= qualifiers;
3719 /* return 0 if no type declaration. otherwise, return the basic type
3720 and skip it.
3722 static int parse_btype(CType *type, AttributeDef *ad)
3724 int t, u, bt_size, complete, type_found, typespec_found, g;
3725 Sym *s;
3726 CType type1;
3728 memset(ad, 0, sizeof(AttributeDef));
3729 complete = 0;
3730 type_found = 0;
3731 typespec_found = 0;
3732 t = 0;
3733 while(1) {
3734 switch(tok) {
3735 case TOK_EXTENSION:
3736 /* currently, we really ignore extension */
3737 next();
3738 continue;
3740 /* basic types */
3741 case TOK_CHAR:
3742 u = VT_BYTE;
3743 basic_type:
3744 next();
3745 basic_type1:
3746 if (complete)
3747 tcc_error("too many basic types");
3748 t |= u;
3749 bt_size = is_btype_size (u & VT_BTYPE);
3750 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3751 complete = 1;
3752 typespec_found = 1;
3753 break;
3754 case TOK_VOID:
3755 u = VT_VOID;
3756 goto basic_type;
3757 case TOK_SHORT:
3758 u = VT_SHORT;
3759 goto basic_type;
3760 case TOK_INT:
3761 u = VT_INT;
3762 goto basic_type;
3763 case TOK_LONG:
3764 next();
3765 if ((t & VT_BTYPE) == VT_DOUBLE) {
3766 #ifndef TCC_TARGET_PE
3767 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3768 #endif
3769 } else if ((t & VT_BTYPE) == VT_LONG) {
3770 t = (t & ~VT_BTYPE) | VT_LLONG;
3771 } else {
3772 u = VT_LONG;
3773 goto basic_type1;
3775 break;
3776 #ifdef TCC_TARGET_ARM64
3777 case TOK_UINT128:
3778 /* GCC's __uint128_t appears in some Linux header files. Make it a
3779 synonym for long double to get the size and alignment right. */
3780 u = VT_LDOUBLE;
3781 goto basic_type;
3782 #endif
3783 case TOK_BOOL:
3784 u = VT_BOOL;
3785 goto basic_type;
3786 case TOK_FLOAT:
3787 u = VT_FLOAT;
3788 goto basic_type;
3789 case TOK_DOUBLE:
3790 next();
3791 if ((t & VT_BTYPE) == VT_LONG) {
3792 #ifdef TCC_TARGET_PE
3793 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3794 #else
3795 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3796 #endif
3797 } else {
3798 u = VT_DOUBLE;
3799 goto basic_type1;
3801 break;
3802 case TOK_ENUM:
3803 struct_decl(&type1, ad, VT_ENUM);
3804 basic_type2:
3805 u = type1.t;
3806 type->ref = type1.ref;
3807 goto basic_type1;
3808 case TOK_STRUCT:
3809 case TOK_UNION:
3810 struct_decl(&type1, ad, VT_STRUCT);
3811 goto basic_type2;
3813 /* type modifiers */
3814 case TOK_CONST1:
3815 case TOK_CONST2:
3816 case TOK_CONST3:
3817 type->t = t;
3818 parse_btype_qualify(type, VT_CONSTANT);
3819 t = type->t;
3820 next();
3821 break;
3822 case TOK_VOLATILE1:
3823 case TOK_VOLATILE2:
3824 case TOK_VOLATILE3:
3825 type->t = t;
3826 parse_btype_qualify(type, VT_VOLATILE);
3827 t = type->t;
3828 next();
3829 break;
3830 case TOK_SIGNED1:
3831 case TOK_SIGNED2:
3832 case TOK_SIGNED3:
3833 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3834 tcc_error("signed and unsigned modifier");
3835 typespec_found = 1;
3836 t |= VT_DEFSIGN;
3837 next();
3838 break;
3839 case TOK_REGISTER:
3840 case TOK_AUTO:
3841 case TOK_RESTRICT1:
3842 case TOK_RESTRICT2:
3843 case TOK_RESTRICT3:
3844 next();
3845 break;
3846 case TOK_UNSIGNED:
3847 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3848 tcc_error("signed and unsigned modifier");
3849 t |= VT_DEFSIGN | VT_UNSIGNED;
3850 next();
3851 typespec_found = 1;
3852 break;
3854 /* storage */
3855 case TOK_EXTERN:
3856 g = VT_EXTERN;
3857 goto storage;
3858 case TOK_STATIC:
3859 g = VT_STATIC;
3860 goto storage;
3861 case TOK_TYPEDEF:
3862 g = VT_TYPEDEF;
3863 goto storage;
3864 storage:
3865 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3866 tcc_error("multiple storage classes");
3867 t |= g;
3868 next();
3869 break;
3870 case TOK_INLINE1:
3871 case TOK_INLINE2:
3872 case TOK_INLINE3:
3873 t |= VT_INLINE;
3874 next();
3875 break;
3877 /* GNUC attribute */
3878 case TOK_ATTRIBUTE1:
3879 case TOK_ATTRIBUTE2:
3880 parse_attribute(ad);
3881 if (ad->a.mode) {
3882 u = ad->a.mode -1;
3883 t = (t & ~VT_BTYPE) | u;
3885 break;
3886 /* GNUC typeof */
3887 case TOK_TYPEOF1:
3888 case TOK_TYPEOF2:
3889 case TOK_TYPEOF3:
3890 next();
3891 parse_expr_type(&type1);
3892 /* remove all storage modifiers except typedef */
3893 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3894 goto basic_type2;
3895 default:
3896 if (typespec_found)
3897 goto the_end;
3898 s = sym_find(tok);
3899 if (!s || !(s->type.t & VT_TYPEDEF))
3900 goto the_end;
3902 type->t = ((s->type.t & ~VT_TYPEDEF) |
3903 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3904 type->ref = s->type.ref;
3905 if (t & (VT_CONSTANT | VT_VOLATILE))
3906 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3907 t = type->t;
3909 if (s->r) {
3910 /* get attributes from typedef */
3911 if (0 == ad->a.aligned)
3912 ad->a.aligned = s->a.aligned;
3913 if (0 == ad->a.func_call)
3914 ad->a.func_call = s->a.func_call;
3915 ad->a.packed |= s->a.packed;
3917 next();
3918 typespec_found = 1;
3919 break;
3921 type_found = 1;
3923 the_end:
3924 if (tcc_state->char_is_unsigned) {
3925 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3926 t |= VT_UNSIGNED;
3929 /* long is never used as type */
3930 if ((t & VT_BTYPE) == VT_LONG)
3931 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3932 defined TCC_TARGET_PE
3933 t = (t & ~VT_BTYPE) | VT_INT;
3934 #else
3935 t = (t & ~VT_BTYPE) | VT_LLONG;
3936 #endif
3937 type->t = t;
3938 return type_found;
3941 /* convert a function parameter type (array to pointer and function to
3942 function pointer) */
3943 static inline void convert_parameter_type(CType *pt)
3945 /* remove const and volatile qualifiers (XXX: const could be used
3946 to indicate a const function parameter */
3947 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3948 /* array must be transformed to pointer according to ANSI C */
3949 pt->t &= ~VT_ARRAY;
3950 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3951 mk_pointer(pt);
3955 ST_FUNC void parse_asm_str(CString *astr)
3957 skip('(');
3958 parse_mult_str(astr, "string constant");
3961 /* Parse an asm label and return the token */
3962 static int asm_label_instr(void)
3964 int v;
3965 CString astr;
3967 next();
3968 parse_asm_str(&astr);
3969 skip(')');
3970 #ifdef ASM_DEBUG
3971 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3972 #endif
3973 v = tok_alloc(astr.data, astr.size - 1)->tok;
3974 cstr_free(&astr);
3975 return v;
3978 static void post_type(CType *type, AttributeDef *ad, int storage)
3980 int n, l, t1, arg_size, align;
3981 Sym **plast, *s, *first;
3982 AttributeDef ad1;
3983 CType pt;
3985 if (tok == '(') {
3986 /* function declaration */
3987 next();
3988 l = 0;
3989 first = NULL;
3990 plast = &first;
3991 arg_size = 0;
3992 if (tok != ')') {
3993 for(;;) {
3994 /* read param name and compute offset */
3995 if (l != FUNC_OLD) {
3996 if (!parse_btype(&pt, &ad1)) {
3997 if (l) {
3998 tcc_error("invalid type");
3999 } else {
4000 l = FUNC_OLD;
4001 goto old_proto;
4004 l = FUNC_NEW;
4005 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4006 break;
4007 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4008 if ((pt.t & VT_BTYPE) == VT_VOID)
4009 tcc_error("parameter declared as void");
4010 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4011 } else {
4012 old_proto:
4013 n = tok;
4014 if (n < TOK_UIDENT)
4015 expect("identifier");
4016 pt.t = VT_INT;
4017 next();
4019 convert_parameter_type(&pt);
4020 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4021 *plast = s;
4022 plast = &s->next;
4023 if (tok == ')')
4024 break;
4025 skip(',');
4026 if (l == FUNC_NEW && tok == TOK_DOTS) {
4027 l = FUNC_ELLIPSIS;
4028 next();
4029 break;
4033 /* if no parameters, then old type prototype */
4034 if (l == 0)
4035 l = FUNC_OLD;
4036 skip(')');
4037 /* NOTE: const is ignored in returned type as it has a special
4038 meaning in gcc / C++ */
4039 type->t &= ~VT_CONSTANT;
4040 /* some ancient pre-K&R C allows a function to return an array
4041 and the array brackets to be put after the arguments, such
4042 that "int c()[]" means something like "int[] c()" */
4043 if (tok == '[') {
4044 next();
4045 skip(']'); /* only handle simple "[]" */
4046 type->t |= VT_PTR;
4048 /* we push a anonymous symbol which will contain the function prototype */
4049 ad->a.func_args = arg_size;
4050 s = sym_push(SYM_FIELD, type, 0, l);
4051 s->a = ad->a;
4052 s->next = first;
4053 type->t = VT_FUNC;
4054 type->ref = s;
4055 } else if (tok == '[') {
4056 int saved_nocode_wanted = nocode_wanted;
4057 /* array definition */
4058 next();
4059 if (tok == TOK_RESTRICT1)
4060 next();
4061 n = -1;
4062 t1 = 0;
4063 if (tok != ']') {
4064 if (!local_stack || (storage & VT_STATIC))
4065 vpushi(expr_const());
4066 else {
4067 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4068 length must always be evaluated, even under nocode_wanted,
4069 so that its size slot is initialized (e.g. under sizeof
4070 or typeof). */
4071 nocode_wanted = 0;
4072 gexpr();
4074 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4075 n = vtop->c.i;
4076 if (n < 0)
4077 tcc_error("invalid array size");
4078 } else {
4079 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4080 tcc_error("size of variable length array should be an integer");
4081 t1 = VT_VLA;
4084 skip(']');
4085 /* parse next post type */
4086 post_type(type, ad, storage);
4087 if (type->t == VT_FUNC)
4088 tcc_error("declaration of an array of functions");
4089 t1 |= type->t & VT_VLA;
4091 if (t1 & VT_VLA) {
4092 loc -= type_size(&int_type, &align);
4093 loc &= -align;
4094 n = loc;
4096 vla_runtime_type_size(type, &align);
4097 gen_op('*');
4098 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4099 vswap();
4100 vstore();
4102 if (n != -1)
4103 vpop();
4104 nocode_wanted = saved_nocode_wanted;
4106 /* we push an anonymous symbol which will contain the array
4107 element type */
4108 s = sym_push(SYM_FIELD, type, 0, n);
4109 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4110 type->ref = s;
4114 /* Parse a type declaration (except basic type), and return the type
4115 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4116 expected. 'type' should contain the basic type. 'ad' is the
4117 attribute definition of the basic type. It can be modified by
4118 type_decl().
4120 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4122 Sym *s;
4123 CType type1, *type2;
4124 int qualifiers, storage;
4126 while (tok == '*') {
4127 qualifiers = 0;
4128 redo:
4129 next();
4130 switch(tok) {
4131 case TOK_CONST1:
4132 case TOK_CONST2:
4133 case TOK_CONST3:
4134 qualifiers |= VT_CONSTANT;
4135 goto redo;
4136 case TOK_VOLATILE1:
4137 case TOK_VOLATILE2:
4138 case TOK_VOLATILE3:
4139 qualifiers |= VT_VOLATILE;
4140 goto redo;
4141 case TOK_RESTRICT1:
4142 case TOK_RESTRICT2:
4143 case TOK_RESTRICT3:
4144 goto redo;
4145 /* XXX: clarify attribute handling */
4146 case TOK_ATTRIBUTE1:
4147 case TOK_ATTRIBUTE2:
4148 parse_attribute(ad);
4149 break;
4151 mk_pointer(type);
4152 type->t |= qualifiers;
4155 /* recursive type */
4156 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4157 type1.t = 0; /* XXX: same as int */
4158 if (tok == '(') {
4159 next();
4160 /* XXX: this is not correct to modify 'ad' at this point, but
4161 the syntax is not clear */
4162 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4163 parse_attribute(ad);
4164 type_decl(&type1, ad, v, td);
4165 skip(')');
4166 } else {
4167 /* type identifier */
4168 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4169 *v = tok;
4170 next();
4171 } else {
4172 if (!(td & TYPE_ABSTRACT))
4173 expect("identifier");
4174 *v = 0;
4177 storage = type->t & VT_STORAGE;
4178 type->t &= ~VT_STORAGE;
4179 post_type(type, ad, storage);
4180 type->t |= storage;
4181 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4182 parse_attribute(ad);
4184 if (!type1.t)
4185 return;
4186 /* append type at the end of type1 */
4187 type2 = &type1;
4188 for(;;) {
4189 s = type2->ref;
4190 type2 = &s->type;
4191 if (!type2->t) {
4192 *type2 = *type;
4193 break;
4196 *type = type1;
4197 type->t |= storage;
4200 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4201 ST_FUNC int lvalue_type(int t)
4203 int bt, r;
4204 r = VT_LVAL;
4205 bt = t & VT_BTYPE;
4206 if (bt == VT_BYTE || bt == VT_BOOL)
4207 r |= VT_LVAL_BYTE;
4208 else if (bt == VT_SHORT)
4209 r |= VT_LVAL_SHORT;
4210 else
4211 return r;
4212 if (t & VT_UNSIGNED)
4213 r |= VT_LVAL_UNSIGNED;
4214 return r;
4217 /* indirection with full error checking and bound check */
4218 ST_FUNC void indir(void)
4220 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4221 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4222 return;
4223 expect("pointer");
4225 if (vtop->r & VT_LVAL)
4226 gv(RC_INT);
4227 vtop->type = *pointed_type(&vtop->type);
4228 /* Arrays and functions are never lvalues */
4229 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4230 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4231 vtop->r |= lvalue_type(vtop->type.t);
4232 /* if bound checking, the referenced pointer must be checked */
4233 #ifdef CONFIG_TCC_BCHECK
4234 if (tcc_state->do_bounds_check)
4235 vtop->r |= VT_MUSTBOUND;
4236 #endif
4240 /* pass a parameter to a function and do type checking and casting */
4241 static void gfunc_param_typed(Sym *func, Sym *arg)
4243 int func_type;
4244 CType type;
4246 func_type = func->c;
4247 if (func_type == FUNC_OLD ||
4248 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4249 /* default casting : only need to convert float to double */
4250 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4251 type.t = VT_DOUBLE;
4252 gen_cast(&type);
4253 } else if (vtop->type.t & VT_BITFIELD) {
4254 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4255 type.ref = vtop->type.ref;
4256 gen_cast(&type);
4258 } else if (arg == NULL) {
4259 tcc_error("too many arguments to function");
4260 } else {
4261 type = arg->type;
4262 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4263 gen_assign_cast(&type);
4267 /* parse an expression of the form '(type)' or '(expr)' and return its
4268 type */
4269 static void parse_expr_type(CType *type)
4271 int n;
4272 AttributeDef ad;
4274 skip('(');
4275 if (parse_btype(type, &ad)) {
4276 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4277 } else {
4278 expr_type(type);
4280 skip(')');
4283 static void parse_type(CType *type)
4285 AttributeDef ad;
4286 int n;
4288 if (!parse_btype(type, &ad)) {
4289 expect("type");
4291 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4294 static void vpush_tokc(int t)
4296 CType type;
4297 type.t = t;
4298 type.ref = 0;
4299 vsetc(&type, VT_CONST, &tokc);
4302 ST_FUNC void unary(void)
4304 int n, t, align, size, r, sizeof_caller;
4305 CType type;
4306 Sym *s;
4307 AttributeDef ad;
4309 sizeof_caller = in_sizeof;
4310 in_sizeof = 0;
4311 /* XXX: GCC 2.95.3 does not generate a table although it should be
4312 better here */
4313 tok_next:
4314 switch(tok) {
4315 case TOK_EXTENSION:
4316 next();
4317 goto tok_next;
4318 case TOK_CINT:
4319 case TOK_CCHAR:
4320 case TOK_LCHAR:
4321 vpushi(tokc.i);
4322 next();
4323 break;
4324 case TOK_CUINT:
4325 vpush_tokc(VT_INT | VT_UNSIGNED);
4326 next();
4327 break;
4328 case TOK_CLLONG:
4329 vpush_tokc(VT_LLONG);
4330 next();
4331 break;
4332 case TOK_CULLONG:
4333 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4334 next();
4335 break;
4336 case TOK_CFLOAT:
4337 vpush_tokc(VT_FLOAT);
4338 next();
4339 break;
4340 case TOK_CDOUBLE:
4341 vpush_tokc(VT_DOUBLE);
4342 next();
4343 break;
4344 case TOK_CLDOUBLE:
4345 vpush_tokc(VT_LDOUBLE);
4346 next();
4347 break;
4348 case TOK___FUNCTION__:
4349 if (!gnu_ext)
4350 goto tok_identifier;
4351 /* fall thru */
4352 case TOK___FUNC__:
4354 void *ptr;
4355 int len;
4356 /* special function name identifier */
4357 len = strlen(funcname) + 1;
4358 /* generate char[len] type */
4359 type.t = VT_BYTE;
4360 mk_pointer(&type);
4361 type.t |= VT_ARRAY;
4362 type.ref->c = len;
4363 vpush_ref(&type, data_section, data_section->data_offset, len);
4364 ptr = section_ptr_add(data_section, len);
4365 memcpy(ptr, funcname, len);
4366 next();
4368 break;
4369 case TOK_LSTR:
4370 #ifdef TCC_TARGET_PE
4371 t = VT_SHORT | VT_UNSIGNED;
4372 #else
4373 t = VT_INT;
4374 #endif
4375 goto str_init;
4376 case TOK_STR:
4377 /* string parsing */
4378 t = VT_BYTE;
4379 str_init:
4380 if (tcc_state->warn_write_strings)
4381 t |= VT_CONSTANT;
4382 type.t = t;
4383 mk_pointer(&type);
4384 type.t |= VT_ARRAY;
4385 memset(&ad, 0, sizeof(AttributeDef));
4386 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4387 break;
4388 case '(':
4389 next();
4390 /* cast ? */
4391 if (parse_btype(&type, &ad)) {
4392 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4393 skip(')');
4394 /* check ISOC99 compound literal */
4395 if (tok == '{') {
4396 /* data is allocated locally by default */
4397 if (global_expr)
4398 r = VT_CONST;
4399 else
4400 r = VT_LOCAL;
4401 /* all except arrays are lvalues */
4402 if (!(type.t & VT_ARRAY))
4403 r |= lvalue_type(type.t);
4404 memset(&ad, 0, sizeof(AttributeDef));
4405 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4406 } else {
4407 if (sizeof_caller) {
4408 vpush(&type);
4409 return;
4411 unary();
4412 gen_cast(&type);
4414 } else if (tok == '{') {
4415 int saved_nocode_wanted = nocode_wanted;
4416 if (const_wanted)
4417 tcc_error("expected constant");
4418 /* save all registers */
4419 save_regs(0);
4420 /* statement expression : we do not accept break/continue
4421 inside as GCC does. We do retain the nocode_wanted state,
4422 as statement expressions can't ever be entered from the
4423 outside, so any reactivation of code emission (from labels
4424 or loop heads) can be disabled again after the end of it. */
4425 block(NULL, NULL, 1);
4426 nocode_wanted = saved_nocode_wanted;
4427 skip(')');
4428 } else {
4429 gexpr();
4430 skip(')');
4432 break;
4433 case '*':
4434 next();
4435 unary();
4436 indir();
4437 break;
4438 case '&':
4439 next();
4440 unary();
4441 /* functions names must be treated as function pointers,
4442 except for unary '&' and sizeof. Since we consider that
4443 functions are not lvalues, we only have to handle it
4444 there and in function calls. */
4445 /* arrays can also be used although they are not lvalues */
4446 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4447 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4448 test_lvalue();
4449 mk_pointer(&vtop->type);
4450 gaddrof();
4451 break;
4452 case '!':
4453 next();
4454 unary();
4455 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4456 CType boolean;
4457 boolean.t = VT_BOOL;
4458 gen_cast(&boolean);
4459 vtop->c.i = !vtop->c.i;
4460 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4461 vtop->c.i ^= 1;
4462 else {
4463 save_regs(1);
4464 vseti(VT_JMP, gvtst(1, 0));
4466 break;
4467 case '~':
4468 next();
4469 unary();
4470 vpushi(-1);
4471 gen_op('^');
4472 break;
4473 case '+':
4474 next();
4475 unary();
4476 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4477 tcc_error("pointer not accepted for unary plus");
4478 /* In order to force cast, we add zero, except for floating point
4479 where we really need an noop (otherwise -0.0 will be transformed
4480 into +0.0). */
4481 if (!is_float(vtop->type.t)) {
4482 vpushi(0);
4483 gen_op('+');
4485 break;
4486 case TOK_SIZEOF:
4487 case TOK_ALIGNOF1:
4488 case TOK_ALIGNOF2:
4489 t = tok;
4490 next();
4491 in_sizeof++;
4492 unary_type(&type); // Perform a in_sizeof = 0;
4493 size = type_size(&type, &align);
4494 if (t == TOK_SIZEOF) {
4495 if (!(type.t & VT_VLA)) {
4496 if (size < 0)
4497 tcc_error("sizeof applied to an incomplete type");
4498 vpushs(size);
4499 } else {
4500 vla_runtime_type_size(&type, &align);
4502 } else {
4503 vpushs(align);
4505 vtop->type.t |= VT_UNSIGNED;
4506 break;
4508 case TOK_builtin_expect:
4510 /* __builtin_expect is a no-op for now */
4511 next();
4512 skip('(');
4513 expr_eq();
4514 skip(',');
4515 nocode_wanted++;
4516 expr_lor_const();
4517 vpop();
4518 nocode_wanted--;
4519 skip(')');
4521 break;
4522 case TOK_builtin_types_compatible_p:
4524 CType type1, type2;
4525 next();
4526 skip('(');
4527 parse_type(&type1);
4528 skip(',');
4529 parse_type(&type2);
4530 skip(')');
4531 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4532 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4533 vpushi(is_compatible_types(&type1, &type2));
4535 break;
4536 case TOK_builtin_choose_expr:
4538 int64_t c;
4539 next();
4540 skip('(');
4541 c = expr_const64();
4542 skip(',');
4543 if (!c) {
4544 nocode_wanted++;
4546 expr_eq();
4547 if (!c) {
4548 vpop();
4549 nocode_wanted--;
4551 skip(',');
4552 if (c) {
4553 nocode_wanted++;
4555 expr_eq();
4556 if (c) {
4557 vpop();
4558 nocode_wanted--;
4560 skip(')');
4562 break;
4563 case TOK_builtin_constant_p:
4565 int res;
4566 next();
4567 skip('(');
4568 nocode_wanted++;
4569 gexpr();
4570 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4571 vpop();
4572 nocode_wanted--;
4573 skip(')');
4574 vpushi(res);
4576 break;
4577 case TOK_builtin_frame_address:
4578 case TOK_builtin_return_address:
4580 int tok1 = tok;
4581 int level;
4582 CType type;
4583 next();
4584 skip('(');
4585 if (tok != TOK_CINT) {
4586 tcc_error("%s only takes positive integers",
4587 tok1 == TOK_builtin_return_address ?
4588 "__builtin_return_address" :
4589 "__builtin_frame_address");
4591 level = (uint32_t)tokc.i;
4592 next();
4593 skip(')');
4594 type.t = VT_VOID;
4595 mk_pointer(&type);
4596 vset(&type, VT_LOCAL, 0); /* local frame */
4597 while (level--) {
4598 mk_pointer(&vtop->type);
4599 indir(); /* -> parent frame */
4601 if (tok1 == TOK_builtin_return_address) {
4602 // assume return address is just above frame pointer on stack
4603 vpushi(PTR_SIZE);
4604 gen_op('+');
4605 mk_pointer(&vtop->type);
4606 indir();
4609 break;
4610 #ifdef TCC_TARGET_X86_64
4611 #ifdef TCC_TARGET_PE
4612 case TOK_builtin_va_start:
4614 next();
4615 skip('(');
4616 expr_eq();
4617 skip(',');
4618 expr_eq();
4619 skip(')');
4620 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4621 tcc_error("__builtin_va_start expects a local variable");
4622 vtop->r &= ~(VT_LVAL | VT_REF);
4623 vtop->type = char_pointer_type;
4624 vtop->c.i += 8;
4625 vstore();
4627 break;
4628 #else
4629 case TOK_builtin_va_arg_types:
4631 CType type;
4632 next();
4633 skip('(');
4634 parse_type(&type);
4635 skip(')');
4636 vpushi(classify_x86_64_va_arg(&type));
4638 break;
4639 #endif
4640 #endif
4642 #ifdef TCC_TARGET_ARM64
4643 case TOK___va_start: {
4644 next();
4645 skip('(');
4646 expr_eq();
4647 skip(',');
4648 expr_eq();
4649 skip(')');
4650 //xx check types
4651 gen_va_start();
4652 vpushi(0);
4653 vtop->type.t = VT_VOID;
4654 break;
4656 case TOK___va_arg: {
4657 CType type;
4658 next();
4659 skip('(');
4660 expr_eq();
4661 skip(',');
4662 parse_type(&type);
4663 skip(')');
4664 //xx check types
4665 gen_va_arg(&type);
4666 vtop->type = type;
4667 break;
4669 case TOK___arm64_clear_cache: {
4670 next();
4671 skip('(');
4672 expr_eq();
4673 skip(',');
4674 expr_eq();
4675 skip(')');
4676 gen_clear_cache();
4677 vpushi(0);
4678 vtop->type.t = VT_VOID;
4679 break;
4681 #endif
4682 /* pre operations */
4683 case TOK_INC:
4684 case TOK_DEC:
4685 t = tok;
4686 next();
4687 unary();
4688 inc(0, t);
4689 break;
4690 case '-':
4691 next();
4692 unary();
4693 t = vtop->type.t & VT_BTYPE;
4694 if (is_float(t)) {
4695 /* In IEEE negate(x) isn't subtract(0,x), but rather
4696 subtract(-0, x). */
4697 vpush(&vtop->type);
4698 if (t == VT_FLOAT)
4699 vtop->c.f = -1.0 * 0.0;
4700 else if (t == VT_DOUBLE)
4701 vtop->c.d = -1.0 * 0.0;
4702 else
4703 vtop->c.ld = -1.0 * 0.0;
4704 } else
4705 vpushi(0);
4706 vswap();
4707 gen_op('-');
4708 break;
4709 case TOK_LAND:
4710 if (!gnu_ext)
4711 goto tok_identifier;
4712 next();
4713 /* allow to take the address of a label */
4714 if (tok < TOK_UIDENT)
4715 expect("label identifier");
4716 s = label_find(tok);
4717 if (!s) {
4718 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4719 } else {
4720 if (s->r == LABEL_DECLARED)
4721 s->r = LABEL_FORWARD;
4723 if (!s->type.t) {
4724 s->type.t = VT_VOID;
4725 mk_pointer(&s->type);
4726 s->type.t |= VT_STATIC;
4728 vpushsym(&s->type, s);
4729 next();
4730 break;
4732 // special qnan , snan and infinity values
4733 case TOK___NAN__:
4734 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4735 next();
4736 break;
4737 case TOK___SNAN__:
4738 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4739 next();
4740 break;
4741 case TOK___INF__:
4742 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4743 next();
4744 break;
4746 default:
4747 tok_identifier:
4748 t = tok;
4749 next();
4750 if (t < TOK_UIDENT)
4751 expect("identifier");
4752 s = sym_find(t);
4753 if (!s) {
4754 const char *name = get_tok_str(t, NULL);
4755 if (tok != '(')
4756 tcc_error("'%s' undeclared", name);
4757 /* for simple function calls, we tolerate undeclared
4758 external reference to int() function */
4759 if (tcc_state->warn_implicit_function_declaration
4760 #ifdef TCC_TARGET_PE
4761 /* people must be warned about using undeclared WINAPI functions
4762 (which usually start with uppercase letter) */
4763 || (name[0] >= 'A' && name[0] <= 'Z')
4764 #endif
4766 tcc_warning("implicit declaration of function '%s'", name);
4767 s = external_global_sym(t, &func_old_type, 0);
4770 r = s->r;
4771 /* A symbol that has a register is a local register variable,
4772 which starts out as VT_LOCAL value. */
4773 if ((r & VT_VALMASK) < VT_CONST)
4774 r = (r & ~VT_VALMASK) | VT_LOCAL;
4776 vset(&s->type, r, s->c);
4777 /* Point to s as backpointer (even without r&VT_SYM).
4778 Will be used by at least the x86 inline asm parser for
4779 regvars. */
4780 vtop->sym = s;
4781 if (vtop->r & VT_SYM) {
4782 vtop->c.i = 0;
4784 break;
4787 /* post operations */
4788 while (1) {
4789 if (tok == TOK_INC || tok == TOK_DEC) {
4790 inc(1, tok);
4791 next();
4792 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4793 int qualifiers;
4794 /* field */
4795 if (tok == TOK_ARROW)
4796 indir();
4797 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4798 test_lvalue();
4799 gaddrof();
4800 /* expect pointer on structure */
4801 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4802 expect("struct or union");
4803 if (tok == TOK_CDOUBLE)
4804 expect("field name");
4805 next();
4806 if (tok == TOK_CINT || tok == TOK_CUINT)
4807 expect("field name");
4808 s = find_field(&vtop->type, tok);
4809 if (!s)
4810 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4811 /* add field offset to pointer */
4812 vtop->type = char_pointer_type; /* change type to 'char *' */
4813 vpushi(s->c);
4814 gen_op('+');
4815 /* change type to field type, and set to lvalue */
4816 vtop->type = s->type;
4817 vtop->type.t |= qualifiers;
4818 /* an array is never an lvalue */
4819 if (!(vtop->type.t & VT_ARRAY)) {
4820 vtop->r |= lvalue_type(vtop->type.t);
4821 #ifdef CONFIG_TCC_BCHECK
4822 /* if bound checking, the referenced pointer must be checked */
4823 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4824 vtop->r |= VT_MUSTBOUND;
4825 #endif
4827 next();
4828 } else if (tok == '[') {
4829 next();
4830 gexpr();
4831 gen_op('+');
4832 indir();
4833 skip(']');
4834 } else if (tok == '(') {
4835 SValue ret;
4836 Sym *sa;
4837 int nb_args, ret_nregs, ret_align, regsize, variadic;
4839 /* function call */
4840 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4841 /* pointer test (no array accepted) */
4842 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4843 vtop->type = *pointed_type(&vtop->type);
4844 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4845 goto error_func;
4846 } else {
4847 error_func:
4848 expect("function pointer");
4850 } else {
4851 vtop->r &= ~VT_LVAL; /* no lvalue */
4853 /* get return type */
4854 s = vtop->type.ref;
4855 next();
4856 sa = s->next; /* first parameter */
4857 nb_args = regsize = 0;
4858 ret.r2 = VT_CONST;
4859 /* compute first implicit argument if a structure is returned */
4860 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4861 variadic = (s->c == FUNC_ELLIPSIS);
4862 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4863 &ret_align, &regsize);
4864 if (!ret_nregs) {
4865 /* get some space for the returned structure */
4866 size = type_size(&s->type, &align);
4867 #ifdef TCC_TARGET_ARM64
4868 /* On arm64, a small struct is return in registers.
4869 It is much easier to write it to memory if we know
4870 that we are allowed to write some extra bytes, so
4871 round the allocated space up to a power of 2: */
4872 if (size < 16)
4873 while (size & (size - 1))
4874 size = (size | (size - 1)) + 1;
4875 #endif
4876 loc = (loc - size) & -align;
4877 ret.type = s->type;
4878 ret.r = VT_LOCAL | VT_LVAL;
4879 /* pass it as 'int' to avoid structure arg passing
4880 problems */
4881 vseti(VT_LOCAL, loc);
4882 ret.c = vtop->c;
4883 nb_args++;
4885 } else {
4886 ret_nregs = 1;
4887 ret.type = s->type;
4890 if (ret_nregs) {
4891 /* return in register */
4892 if (is_float(ret.type.t)) {
4893 ret.r = reg_fret(ret.type.t);
4894 #ifdef TCC_TARGET_X86_64
4895 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4896 ret.r2 = REG_QRET;
4897 #endif
4898 } else {
4899 #ifndef TCC_TARGET_ARM64
4900 #ifdef TCC_TARGET_X86_64
4901 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4902 #else
4903 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4904 #endif
4905 ret.r2 = REG_LRET;
4906 #endif
4907 ret.r = REG_IRET;
4909 ret.c.i = 0;
4911 if (tok != ')') {
4912 for(;;) {
4913 expr_eq();
4914 gfunc_param_typed(s, sa);
4915 nb_args++;
4916 if (sa)
4917 sa = sa->next;
4918 if (tok == ')')
4919 break;
4920 skip(',');
4923 if (sa)
4924 tcc_error("too few arguments to function");
4925 skip(')');
4926 gfunc_call(nb_args);
4928 /* return value */
4929 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4930 vsetc(&ret.type, r, &ret.c);
4931 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4934 /* handle packed struct return */
4935 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4936 int addr, offset;
4938 size = type_size(&s->type, &align);
4939 /* We're writing whole regs often, make sure there's enough
4940 space. Assume register size is power of 2. */
4941 if (regsize > align)
4942 align = regsize;
4943 loc = (loc - size) & -align;
4944 addr = loc;
4945 offset = 0;
4946 for (;;) {
4947 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4948 vswap();
4949 vstore();
4950 vtop--;
4951 if (--ret_nregs == 0)
4952 break;
4953 offset += regsize;
4955 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4957 } else {
4958 break;
4963 ST_FUNC void expr_prod(void)
4965 int t;
4967 unary();
4968 while (tok == '*' || tok == '/' || tok == '%') {
4969 t = tok;
4970 next();
4971 unary();
4972 gen_op(t);
4976 ST_FUNC void expr_sum(void)
4978 int t;
4980 expr_prod();
4981 while (tok == '+' || tok == '-') {
4982 t = tok;
4983 next();
4984 expr_prod();
4985 gen_op(t);
4989 static void expr_shift(void)
4991 int t;
4993 expr_sum();
4994 while (tok == TOK_SHL || tok == TOK_SAR) {
4995 t = tok;
4996 next();
4997 expr_sum();
4998 gen_op(t);
5002 static void expr_cmp(void)
5004 int t;
5006 expr_shift();
5007 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5008 tok == TOK_ULT || tok == TOK_UGE) {
5009 t = tok;
5010 next();
5011 expr_shift();
5012 gen_op(t);
5016 static void expr_cmpeq(void)
5018 int t;
5020 expr_cmp();
5021 while (tok == TOK_EQ || tok == TOK_NE) {
5022 t = tok;
5023 next();
5024 expr_cmp();
5025 gen_op(t);
5029 static void expr_and(void)
5031 expr_cmpeq();
5032 while (tok == '&') {
5033 next();
5034 expr_cmpeq();
5035 gen_op('&');
5039 static void expr_xor(void)
5041 expr_and();
5042 while (tok == '^') {
5043 next();
5044 expr_and();
5045 gen_op('^');
5049 static void expr_or(void)
5051 expr_xor();
5052 while (tok == '|') {
5053 next();
5054 expr_xor();
5055 gen_op('|');
5059 /* XXX: fix this mess */
5060 static void expr_land_const(void)
5062 expr_or();
5063 while (tok == TOK_LAND) {
5064 next();
5065 expr_or();
5066 gen_op(TOK_LAND);
5069 static void expr_lor_const(void)
5071 expr_land_const();
5072 while (tok == TOK_LOR) {
5073 next();
5074 expr_land_const();
5075 gen_op(TOK_LOR);
5079 static void expr_land(void)
5081 expr_or();
5082 if (tok == TOK_LAND) {
5083 int t = 0;
5084 for(;;) {
5085 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5086 CType ctb;
5087 ctb.t = VT_BOOL;
5088 gen_cast(&ctb);
5089 if (vtop->c.i) {
5090 vpop();
5091 } else {
5092 nocode_wanted++;
5093 while (tok == TOK_LAND) {
5094 next();
5095 expr_or();
5096 vpop();
5098 nocode_wanted--;
5099 if (t)
5100 gsym(t);
5101 gen_cast(&int_type);
5102 break;
5104 } else {
5105 if (!t)
5106 save_regs(1);
5107 t = gvtst(1, t);
5109 if (tok != TOK_LAND) {
5110 if (t)
5111 vseti(VT_JMPI, t);
5112 else
5113 vpushi(1);
5114 break;
5116 next();
5117 expr_or();
5122 static void expr_lor(void)
5124 expr_land();
5125 if (tok == TOK_LOR) {
5126 int t = 0;
5127 for(;;) {
5128 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5129 CType ctb;
5130 ctb.t = VT_BOOL;
5131 gen_cast(&ctb);
5132 if (!vtop->c.i) {
5133 vpop();
5134 } else {
5135 nocode_wanted++;
5136 while (tok == TOK_LOR) {
5137 next();
5138 expr_land();
5139 vpop();
5141 nocode_wanted--;
5142 if (t)
5143 gsym(t);
5144 gen_cast(&int_type);
5145 break;
5147 } else {
5148 if (!t)
5149 save_regs(1);
5150 t = gvtst(0, t);
5152 if (tok != TOK_LOR) {
5153 if (t)
5154 vseti(VT_JMP, t);
5155 else
5156 vpushi(0);
5157 break;
5159 next();
5160 expr_land();
5165 /* Assuming vtop is a value used in a conditional context
5166 (i.e. compared with zero) return 0 if it's false, 1 if
5167 true and -1 if it can't be statically determined. */
5168 static int condition_3way(void)
5170 int c = -1;
5171 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5172 (!(vtop->r & VT_SYM) ||
5173 !(vtop->sym->type.t & VT_WEAK))) {
5174 CType boolean;
5175 boolean.t = VT_BOOL;
5176 vdup();
5177 gen_cast(&boolean);
5178 c = vtop->c.i;
5179 vpop();
5181 return c;
5184 static void expr_cond(void)
5186 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5187 SValue sv;
5188 CType type, type1, type2;
5190 expr_lor();
5191 if (tok == '?') {
5192 next();
5193 c = condition_3way();
5194 g = (tok == ':' && gnu_ext);
5195 if (c < 0) {
5196 /* needed to avoid having different registers saved in
5197 each branch */
5198 if (is_float(vtop->type.t)) {
5199 rc = RC_FLOAT;
5200 #ifdef TCC_TARGET_X86_64
5201 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5202 rc = RC_ST0;
5204 #endif
5205 } else
5206 rc = RC_INT;
5207 gv(rc);
5208 save_regs(1);
5209 if (g)
5210 gv_dup();
5211 tt = gvtst(1, 0);
5213 } else {
5214 if (!g)
5215 vpop();
5216 tt = 0;
5219 if (1) {
5220 if (c == 0)
5221 nocode_wanted++;
5222 if (!g)
5223 gexpr();
5225 type1 = vtop->type;
5226 sv = *vtop; /* save value to handle it later */
5227 vtop--; /* no vpop so that FP stack is not flushed */
5228 skip(':');
5230 u = 0;
5231 if (c < 0)
5232 u = gjmp(0);
5233 gsym(tt);
5235 if (c == 0)
5236 nocode_wanted--;
5237 if (c == 1)
5238 nocode_wanted++;
5239 expr_cond();
5240 if (c == 1)
5241 nocode_wanted--;
5243 type2 = vtop->type;
5244 t1 = type1.t;
5245 bt1 = t1 & VT_BTYPE;
5246 t2 = type2.t;
5247 bt2 = t2 & VT_BTYPE;
5248 /* cast operands to correct type according to ISOC rules */
5249 if (is_float(bt1) || is_float(bt2)) {
5250 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5251 type.t = VT_LDOUBLE;
5253 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5254 type.t = VT_DOUBLE;
5255 } else {
5256 type.t = VT_FLOAT;
5258 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5259 /* cast to biggest op */
5260 type.t = VT_LLONG;
5261 /* convert to unsigned if it does not fit in a long long */
5262 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5263 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5264 type.t |= VT_UNSIGNED;
5265 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5266 /* If one is a null ptr constant the result type
5267 is the other. */
5268 if (is_null_pointer (vtop))
5269 type = type1;
5270 else if (is_null_pointer (&sv))
5271 type = type2;
5272 /* XXX: test pointer compatibility, C99 has more elaborate
5273 rules here. */
5274 else
5275 type = type1;
5276 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5277 /* XXX: test function pointer compatibility */
5278 type = bt1 == VT_FUNC ? type1 : type2;
5279 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5280 /* XXX: test structure compatibility */
5281 type = bt1 == VT_STRUCT ? type1 : type2;
5282 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5283 /* NOTE: as an extension, we accept void on only one side */
5284 type.t = VT_VOID;
5285 } else {
5286 /* integer operations */
5287 type.t = VT_INT;
5288 /* convert to unsigned if it does not fit in an integer */
5289 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5290 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5291 type.t |= VT_UNSIGNED;
5293 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5294 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5295 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5296 islv &= c < 0;
5298 /* now we convert second operand */
5299 if (c != 1) {
5300 gen_cast(&type);
5301 if (islv) {
5302 mk_pointer(&vtop->type);
5303 gaddrof();
5304 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5305 gaddrof();
5308 rc = RC_INT;
5309 if (is_float(type.t)) {
5310 rc = RC_FLOAT;
5311 #ifdef TCC_TARGET_X86_64
5312 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5313 rc = RC_ST0;
5315 #endif
5316 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5317 /* for long longs, we use fixed registers to avoid having
5318 to handle a complicated move */
5319 rc = RC_IRET;
5322 tt = r2 = 0;
5323 if (c < 0) {
5324 r2 = gv(rc);
5325 tt = gjmp(0);
5327 gsym(u);
5329 /* this is horrible, but we must also convert first
5330 operand */
5331 if (c != 0) {
5332 *vtop = sv;
5333 gen_cast(&type);
5334 if (islv) {
5335 mk_pointer(&vtop->type);
5336 gaddrof();
5337 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5338 gaddrof();
5341 if (c < 0) {
5342 r1 = gv(rc);
5343 move_reg(r2, r1, type.t);
5344 vtop->r = r2;
5345 gsym(tt);
5346 if (islv)
5347 indir();
5353 static void expr_eq(void)
5355 int t;
5357 expr_cond();
5358 if (tok == '=' ||
5359 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5360 tok == TOK_A_XOR || tok == TOK_A_OR ||
5361 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5362 test_lvalue();
5363 t = tok;
5364 next();
5365 if (t == '=') {
5366 expr_eq();
5367 } else {
5368 vdup();
5369 expr_eq();
5370 gen_op(t & 0x7f);
5372 vstore();
5376 ST_FUNC void gexpr(void)
5378 while (1) {
5379 expr_eq();
5380 if (tok != ',')
5381 break;
5382 vpop();
5383 next();
5387 /* parse an expression and return its type without any side effect. */
5388 static void expr_type(CType *type)
5391 nocode_wanted++;
5392 gexpr();
5393 *type = vtop->type;
5394 vpop();
5395 nocode_wanted--;
5398 /* parse a unary expression and return its type without any side
5399 effect. */
5400 static void unary_type(CType *type)
5402 nocode_wanted++;
5403 unary();
5404 *type = vtop->type;
5405 vpop();
5406 nocode_wanted--;
5409 /* parse a constant expression and return value in vtop. */
5410 static void expr_const1(void)
5412 const_wanted++;
5413 expr_cond();
5414 const_wanted--;
5417 /* parse an integer constant and return its value. */
5418 static inline int64_t expr_const64(void)
5420 int64_t c;
5421 expr_const1();
5422 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5423 expect("constant expression");
5424 c = vtop->c.i;
5425 vpop();
5426 return c;
5429 /* parse an integer constant and return its value.
5430 Complain if it doesn't fit 32bit (signed or unsigned). */
5431 ST_FUNC int expr_const(void)
5433 int c;
5434 int64_t wc = expr_const64();
5435 c = wc;
5436 if (c != wc && (unsigned)c != wc)
5437 tcc_error("constant exceeds 32 bit");
5438 return c;
5441 /* return the label token if current token is a label, otherwise
5442 return zero */
5443 static int is_label(void)
5445 int last_tok;
5447 /* fast test first */
5448 if (tok < TOK_UIDENT)
5449 return 0;
5450 /* no need to save tokc because tok is an identifier */
5451 last_tok = tok;
5452 next();
5453 if (tok == ':') {
5454 next();
5455 return last_tok;
5456 } else {
5457 unget_tok(last_tok);
5458 return 0;
5462 static void label_or_decl(int l)
5464 int last_tok;
5466 /* fast test first */
5467 if (tok >= TOK_UIDENT)
5469 /* no need to save tokc because tok is an identifier */
5470 last_tok = tok;
5471 next();
5472 if (tok == ':') {
5473 unget_tok(last_tok);
5474 return;
5476 unget_tok(last_tok);
5478 decl(l);
5481 #ifndef TCC_TARGET_ARM64
5482 static void gfunc_return(CType *func_type)
5484 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5485 CType type, ret_type;
5486 int ret_align, ret_nregs, regsize;
5487 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5488 &ret_align, &regsize);
5489 if (0 == ret_nregs) {
5490 /* if returning structure, must copy it to implicit
5491 first pointer arg location */
5492 type = *func_type;
5493 mk_pointer(&type);
5494 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5495 indir();
5496 vswap();
5497 /* copy structure value to pointer */
5498 vstore();
5499 } else {
5500 /* returning structure packed into registers */
5501 int r, size, addr, align;
5502 size = type_size(func_type,&align);
5503 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5504 (vtop->c.i & (ret_align-1)))
5505 && (align & (ret_align-1))) {
5506 loc = (loc - size) & -ret_align;
5507 addr = loc;
5508 type = *func_type;
5509 vset(&type, VT_LOCAL | VT_LVAL, addr);
5510 vswap();
5511 vstore();
5512 vpop();
5513 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5515 vtop->type = ret_type;
5516 if (is_float(ret_type.t))
5517 r = rc_fret(ret_type.t);
5518 else
5519 r = RC_IRET;
5521 if (ret_nregs == 1)
5522 gv(r);
5523 else {
5524 for (;;) {
5525 vdup();
5526 gv(r);
5527 vpop();
5528 if (--ret_nregs == 0)
5529 break;
5530 /* We assume that when a structure is returned in multiple
5531 registers, their classes are consecutive values of the
5532 suite s(n) = 2^n */
5533 r <<= 1;
5534 vtop->c.i += regsize;
5538 } else if (is_float(func_type->t)) {
5539 gv(rc_fret(func_type->t));
5540 } else {
5541 gv(RC_IRET);
5543 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5545 #endif
5547 static int case_cmp(const void *pa, const void *pb)
5549 int64_t a = (*(struct case_t**) pa)->v1;
5550 int64_t b = (*(struct case_t**) pb)->v1;
5551 return a < b ? -1 : a > b;
5554 static void gcase(struct case_t **base, int len, int *bsym)
5556 struct case_t *p;
5557 int e;
5558 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5559 gv(RC_INT);
5560 while (len > 4) {
5561 /* binary search */
5562 p = base[len/2];
5563 vdup();
5564 if (ll)
5565 vpushll(p->v2);
5566 else
5567 vpushi(p->v2);
5568 gen_op(TOK_LE);
5569 e = gtst(1, 0);
5570 vdup();
5571 if (ll)
5572 vpushll(p->v1);
5573 else
5574 vpushi(p->v1);
5575 gen_op(TOK_GE);
5576 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5577 /* x < v1 */
5578 gcase(base, len/2, bsym);
5579 if (cur_switch->def_sym)
5580 gjmp_addr(cur_switch->def_sym);
5581 else
5582 *bsym = gjmp(*bsym);
5583 /* x > v2 */
5584 gsym(e);
5585 e = len/2 + 1;
5586 base += e; len -= e;
5588 /* linear scan */
5589 while (len--) {
5590 p = *base++;
5591 vdup();
5592 if (ll)
5593 vpushll(p->v2);
5594 else
5595 vpushi(p->v2);
5596 if (p->v1 == p->v2) {
5597 gen_op(TOK_EQ);
5598 gtst_addr(0, p->sym);
5599 } else {
5600 gen_op(TOK_LE);
5601 e = gtst(1, 0);
5602 vdup();
5603 if (ll)
5604 vpushll(p->v1);
5605 else
5606 vpushi(p->v1);
5607 gen_op(TOK_GE);
5608 gtst_addr(0, p->sym);
5609 gsym(e);
5614 static void block(int *bsym, int *csym, int is_expr)
5616 int a, b, c, d, cond;
5617 Sym *s;
5619 /* generate line number info */
5620 if (tcc_state->do_debug)
5621 tcc_debug_line(tcc_state);
5623 if (is_expr) {
5624 /* default return value is (void) */
5625 vpushi(0);
5626 vtop->type.t = VT_VOID;
5629 if (tok == TOK_IF) {
5630 /* if test */
5631 int saved_nocode_wanted = nocode_wanted;
5632 next();
5633 skip('(');
5634 gexpr();
5635 skip(')');
5636 cond = condition_3way();
5637 if (cond == 1)
5638 a = 0, vpop();
5639 else
5640 a = gvtst(1, 0);
5641 if (cond == 0)
5642 nocode_wanted |= 0x20000000;
5643 block(bsym, csym, 0);
5644 if (cond != 1)
5645 nocode_wanted = saved_nocode_wanted;
5646 c = tok;
5647 if (c == TOK_ELSE) {
5648 next();
5649 d = gjmp(0);
5650 gsym(a);
5651 if (cond == 1)
5652 nocode_wanted |= 0x20000000;
5653 block(bsym, csym, 0);
5654 gsym(d); /* patch else jmp */
5655 if (cond != 0)
5656 nocode_wanted = saved_nocode_wanted;
5657 } else
5658 gsym(a);
5659 } else if (tok == TOK_WHILE) {
5660 int saved_nocode_wanted;
5661 nocode_wanted &= ~0x20000000;
5662 next();
5663 d = ind;
5664 vla_sp_restore();
5665 skip('(');
5666 gexpr();
5667 skip(')');
5668 a = gvtst(1, 0);
5669 b = 0;
5670 ++local_scope;
5671 saved_nocode_wanted = nocode_wanted;
5672 block(&a, &b, 0);
5673 nocode_wanted = saved_nocode_wanted;
5674 --local_scope;
5675 gjmp_addr(d);
5676 gsym(a);
5677 gsym_addr(b, d);
5678 } else if (tok == '{') {
5679 Sym *llabel;
5680 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5682 next();
5683 /* record local declaration stack position */
5684 s = local_stack;
5685 llabel = local_label_stack;
5686 ++local_scope;
5688 /* handle local labels declarations */
5689 if (tok == TOK_LABEL) {
5690 next();
5691 for(;;) {
5692 if (tok < TOK_UIDENT)
5693 expect("label identifier");
5694 label_push(&local_label_stack, tok, LABEL_DECLARED);
5695 next();
5696 if (tok == ',') {
5697 next();
5698 } else {
5699 skip(';');
5700 break;
5704 while (tok != '}') {
5705 label_or_decl(VT_LOCAL);
5706 if (tok != '}') {
5707 if (is_expr)
5708 vpop();
5709 block(bsym, csym, is_expr);
5712 /* pop locally defined labels */
5713 label_pop(&local_label_stack, llabel);
5714 /* pop locally defined symbols */
5715 --local_scope;
5716 /* In the is_expr case (a statement expression is finished here),
5717 vtop might refer to symbols on the local_stack. Either via the
5718 type or via vtop->sym. We can't pop those nor any that in turn
5719 might be referred to. To make it easier we don't roll back
5720 any symbols in that case; some upper level call to block() will
5721 do that. We do have to remove such symbols from the lookup
5722 tables, though. sym_pop will do that. */
5723 sym_pop(&local_stack, s, is_expr);
5725 /* Pop VLA frames and restore stack pointer if required */
5726 if (vlas_in_scope > saved_vlas_in_scope) {
5727 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5728 vla_sp_restore();
5730 vlas_in_scope = saved_vlas_in_scope;
5732 next();
5733 } else if (tok == TOK_RETURN) {
5734 next();
5735 if (tok != ';') {
5736 gexpr();
5737 gen_assign_cast(&func_vt);
5738 gfunc_return(&func_vt);
5740 skip(';');
5741 /* jump unless last stmt in top-level block */
5742 if (tok != '}' || local_scope != 1)
5743 rsym = gjmp(rsym);
5744 nocode_wanted |= 0x20000000;
5745 } else if (tok == TOK_BREAK) {
5746 /* compute jump */
5747 if (!bsym)
5748 tcc_error("cannot break");
5749 *bsym = gjmp(*bsym);
5750 next();
5751 skip(';');
5752 nocode_wanted |= 0x20000000;
5753 } else if (tok == TOK_CONTINUE) {
5754 /* compute jump */
5755 if (!csym)
5756 tcc_error("cannot continue");
5757 vla_sp_restore_root();
5758 *csym = gjmp(*csym);
5759 next();
5760 skip(';');
5761 } else if (tok == TOK_FOR) {
5762 int e;
5763 int saved_nocode_wanted;
5764 nocode_wanted &= ~0x20000000;
5765 next();
5766 skip('(');
5767 s = local_stack;
5768 ++local_scope;
5769 if (tok != ';') {
5770 /* c99 for-loop init decl? */
5771 if (!decl0(VT_LOCAL, 1)) {
5772 /* no, regular for-loop init expr */
5773 gexpr();
5774 vpop();
5777 skip(';');
5778 d = ind;
5779 c = ind;
5780 vla_sp_restore();
5781 a = 0;
5782 b = 0;
5783 if (tok != ';') {
5784 gexpr();
5785 a = gvtst(1, 0);
5787 skip(';');
5788 if (tok != ')') {
5789 e = gjmp(0);
5790 c = ind;
5791 vla_sp_restore();
5792 gexpr();
5793 vpop();
5794 gjmp_addr(d);
5795 gsym(e);
5797 skip(')');
5798 saved_nocode_wanted = nocode_wanted;
5799 block(&a, &b, 0);
5800 nocode_wanted = saved_nocode_wanted;
5801 gjmp_addr(c);
5802 gsym(a);
5803 gsym_addr(b, c);
5804 --local_scope;
5805 sym_pop(&local_stack, s, 0);
5807 } else
5808 if (tok == TOK_DO) {
5809 int saved_nocode_wanted;
5810 nocode_wanted &= ~0x20000000;
5811 next();
5812 a = 0;
5813 b = 0;
5814 d = ind;
5815 vla_sp_restore();
5816 saved_nocode_wanted = nocode_wanted;
5817 block(&a, &b, 0);
5818 skip(TOK_WHILE);
5819 skip('(');
5820 gsym(b);
5821 gexpr();
5822 c = gvtst(0, 0);
5823 gsym_addr(c, d);
5824 nocode_wanted = saved_nocode_wanted;
5825 skip(')');
5826 gsym(a);
5827 skip(';');
5828 } else
5829 if (tok == TOK_SWITCH) {
5830 struct switch_t *saved, sw;
5831 int saved_nocode_wanted = nocode_wanted;
5832 SValue switchval;
5833 next();
5834 skip('(');
5835 gexpr();
5836 skip(')');
5837 switchval = *vtop--;
5838 a = 0;
5839 b = gjmp(0); /* jump to first case */
5840 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5841 saved = cur_switch;
5842 cur_switch = &sw;
5843 block(&a, csym, 0);
5844 nocode_wanted = saved_nocode_wanted;
5845 a = gjmp(a); /* add implicit break */
5846 /* case lookup */
5847 gsym(b);
5848 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5849 for (b = 1; b < sw.n; b++)
5850 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5851 tcc_error("duplicate case value");
5852 /* Our switch table sorting is signed, so the compared
5853 value needs to be as well when it's 64bit. */
5854 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5855 switchval.type.t &= ~VT_UNSIGNED;
5856 vpushv(&switchval);
5857 gcase(sw.p, sw.n, &a);
5858 vpop();
5859 if (sw.def_sym)
5860 gjmp_addr(sw.def_sym);
5861 dynarray_reset(&sw.p, &sw.n);
5862 cur_switch = saved;
5863 /* break label */
5864 gsym(a);
5865 } else
5866 if (tok == TOK_CASE) {
5867 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5868 if (!cur_switch)
5869 expect("switch");
5870 nocode_wanted &= ~0x20000000;
5871 next();
5872 cr->v1 = cr->v2 = expr_const64();
5873 if (gnu_ext && tok == TOK_DOTS) {
5874 next();
5875 cr->v2 = expr_const64();
5876 if (cr->v2 < cr->v1)
5877 tcc_warning("empty case range");
5879 cr->sym = ind;
5880 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5881 skip(':');
5882 is_expr = 0;
5883 goto block_after_label;
5884 } else
5885 if (tok == TOK_DEFAULT) {
5886 next();
5887 skip(':');
5888 if (!cur_switch)
5889 expect("switch");
5890 if (cur_switch->def_sym)
5891 tcc_error("too many 'default'");
5892 cur_switch->def_sym = ind;
5893 is_expr = 0;
5894 goto block_after_label;
5895 } else
5896 if (tok == TOK_GOTO) {
5897 next();
5898 if (tok == '*' && gnu_ext) {
5899 /* computed goto */
5900 next();
5901 gexpr();
5902 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5903 expect("pointer");
5904 ggoto();
5905 } else if (tok >= TOK_UIDENT) {
5906 s = label_find(tok);
5907 /* put forward definition if needed */
5908 if (!s) {
5909 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5910 } else {
5911 if (s->r == LABEL_DECLARED)
5912 s->r = LABEL_FORWARD;
5914 vla_sp_restore_root();
5915 if (s->r & LABEL_FORWARD)
5916 s->jnext = gjmp(s->jnext);
5917 else
5918 gjmp_addr(s->jnext);
5919 next();
5920 } else {
5921 expect("label identifier");
5923 skip(';');
5924 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5925 asm_instr();
5926 } else {
5927 b = is_label();
5928 if (b) {
5929 /* label case */
5930 s = label_find(b);
5931 if (s) {
5932 if (s->r == LABEL_DEFINED)
5933 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5934 gsym(s->jnext);
5935 s->r = LABEL_DEFINED;
5936 } else {
5937 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5939 s->jnext = ind;
5940 vla_sp_restore();
5941 /* we accept this, but it is a mistake */
5942 block_after_label:
5943 nocode_wanted &= ~0x20000000;
5944 if (tok == '}') {
5945 tcc_warning("deprecated use of label at end of compound statement");
5946 } else {
5947 if (is_expr)
5948 vpop();
5949 block(bsym, csym, is_expr);
5951 } else {
5952 /* expression case */
5953 if (tok != ';') {
5954 if (is_expr) {
5955 vpop();
5956 gexpr();
5957 } else {
5958 gexpr();
5959 vpop();
5962 skip(';');
5967 #define EXPR_CONST 1
5968 #define EXPR_ANY 2
5970 static void parse_init_elem(int expr_type)
5972 int saved_global_expr;
5973 switch(expr_type) {
5974 case EXPR_CONST:
5975 /* compound literals must be allocated globally in this case */
5976 saved_global_expr = global_expr;
5977 global_expr = 1;
5978 expr_const1();
5979 global_expr = saved_global_expr;
5980 /* NOTE: symbols are accepted */
5981 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5982 #ifdef TCC_TARGET_PE
5983 || (vtop->type.t & VT_IMPORT)
5984 #endif
5986 tcc_error("initializer element is not constant");
5987 break;
5988 case EXPR_ANY:
5989 expr_eq();
5990 break;
5994 /* t is the array or struct type. c is the array or struct
5995 address. cur_field is the pointer to the current
5996 value, for arrays the 'c' member contains the current start
5997 index and the 'r' contains the end index (in case of range init).
5998 'size_only' is true if only size info is needed (only used
5999 in arrays) */
6000 static void decl_designator(CType *type, Section *sec, unsigned long c,
6001 Sym **cur_field, int size_only)
6003 Sym *s, *f;
6004 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6005 CType type1;
6007 notfirst = 0;
6008 elem_size = 0;
6009 nb_elems = 1;
6010 if (gnu_ext && (l = is_label()) != 0)
6011 goto struct_field;
6012 while (tok == '[' || tok == '.') {
6013 if (tok == '[') {
6014 if (!(type->t & VT_ARRAY))
6015 expect("array type");
6016 s = type->ref;
6017 next();
6018 index = expr_const();
6019 if (index < 0 || (s->c >= 0 && index >= s->c))
6020 tcc_error("invalid index");
6021 if (tok == TOK_DOTS && gnu_ext) {
6022 next();
6023 index_last = expr_const();
6024 if (index_last < 0 ||
6025 (s->c >= 0 && index_last >= s->c) ||
6026 index_last < index)
6027 tcc_error("invalid index");
6028 } else {
6029 index_last = index;
6031 skip(']');
6032 if (!notfirst) {
6033 (*cur_field)->c = index;
6034 (*cur_field)->r = index_last;
6036 type = pointed_type(type);
6037 elem_size = type_size(type, &align);
6038 c += index * elem_size;
6039 /* NOTE: we only support ranges for last designator */
6040 nb_elems = index_last - index + 1;
6041 if (nb_elems != 1) {
6042 notfirst = 1;
6043 break;
6045 } else {
6046 next();
6047 l = tok;
6048 next();
6049 struct_field:
6050 if ((type->t & VT_BTYPE) != VT_STRUCT)
6051 expect("struct/union type");
6052 f = find_field(type, l);
6053 if (!f)
6054 expect("field");
6055 if (!notfirst)
6056 *cur_field = f;
6057 /* XXX: fix this mess by using explicit storage field */
6058 type1 = f->type;
6059 type1.t |= (type->t & ~VT_TYPE);
6060 type = &type1;
6061 c += f->c;
6063 notfirst = 1;
6065 if (notfirst) {
6066 if (tok == '=') {
6067 next();
6068 } else {
6069 if (!gnu_ext)
6070 expect("=");
6072 } else {
6073 if (type->t & VT_ARRAY) {
6074 index = (*cur_field)->c;
6075 if (type->ref->c >= 0 && index >= type->ref->c)
6076 tcc_error("index too large");
6077 type = pointed_type(type);
6078 c += index * type_size(type, &align);
6079 } else {
6080 f = *cur_field;
6081 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6082 *cur_field = f = f->next;
6083 if (!f)
6084 tcc_error("too many field init");
6085 /* XXX: fix this mess by using explicit storage field */
6086 type1 = f->type;
6087 type1.t |= (type->t & ~VT_TYPE);
6088 type = &type1;
6089 c += f->c;
6092 decl_initializer(type, sec, c, 0, size_only);
6094 /* XXX: make it more general */
6095 if (!size_only && nb_elems > 1) {
6096 unsigned long c_end;
6097 uint8_t *src, *dst;
6098 int i;
6100 if (!sec) {
6101 vset(type, VT_LOCAL|VT_LVAL, c);
6102 for (i = 1; i < nb_elems; i++) {
6103 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6104 vswap();
6105 vstore();
6107 vpop();
6108 } else {
6109 c_end = c + nb_elems * elem_size;
6110 if (c_end > sec->data_allocated)
6111 section_realloc(sec, c_end);
6112 src = sec->data + c;
6113 dst = src;
6114 for(i = 1; i < nb_elems; i++) {
6115 dst += elem_size;
6116 memcpy(dst, src, elem_size);
6122 /* store a value or an expression directly in global data or in local array */
6123 static void init_putv(CType *type, Section *sec, unsigned long c)
6125 int bt, bit_pos, bit_size;
6126 void *ptr;
6127 unsigned long long bit_mask;
6128 CType dtype;
6130 dtype = *type;
6131 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6133 if (sec) {
6134 int size, align;
6135 /* XXX: not portable */
6136 /* XXX: generate error if incorrect relocation */
6137 gen_assign_cast(&dtype);
6138 bt = type->t & VT_BTYPE;
6139 size = type_size(type, &align);
6140 if (c + size > sec->data_allocated) {
6141 section_realloc(sec, c + size);
6143 ptr = sec->data + c;
6144 /* XXX: make code faster ? */
6145 if (!(type->t & VT_BITFIELD)) {
6146 bit_pos = 0;
6147 bit_size = PTR_SIZE * 8;
6148 bit_mask = -1LL;
6149 } else {
6150 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6151 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6152 bit_mask = (1LL << bit_size) - 1;
6154 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6155 vtop->sym->v >= SYM_FIRST_ANOM &&
6156 /* XXX This rejects compount literals like
6157 '(void *){ptr}'. The problem is that '&sym' is
6158 represented the same way, which would be ruled out
6159 by the SYM_FIRST_ANOM check above, but also '"string"'
6160 in 'char *p = "string"' is represented the same
6161 with the type being VT_PTR and the symbol being an
6162 anonymous one. That is, there's no difference in vtop
6163 between '(void *){x}' and '&(void *){x}'. Ignore
6164 pointer typed entities here. Hopefully no real code
6165 will every use compound literals with scalar type. */
6166 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6167 /* These come from compound literals, memcpy stuff over. */
6168 Section *ssec;
6169 ElfW(Sym) *esym;
6170 ElfW_Rel *rel;
6171 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6172 ssec = tcc_state->sections[esym->st_shndx];
6173 memmove (ptr, ssec->data + esym->st_value, size);
6174 if (ssec->reloc) {
6175 /* We need to copy over all memory contents, and that
6176 includes relocations. Use the fact that relocs are
6177 created it order, so look from the end of relocs
6178 until we hit one before the copied region. */
6179 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6180 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6181 while (num_relocs--) {
6182 rel--;
6183 if (rel->r_offset >= esym->st_value + size)
6184 continue;
6185 if (rel->r_offset < esym->st_value)
6186 break;
6187 /* Note: if the same fields are initialized multiple
6188 times (possible with designators) then we possibly
6189 add multiple relocations for the same offset here.
6190 That would lead to wrong code, the last reloc needs
6191 to win. We clean this up later after the whole
6192 initializer is parsed. */
6193 put_elf_reloca(symtab_section, sec,
6194 c + rel->r_offset - esym->st_value,
6195 ELFW(R_TYPE)(rel->r_info),
6196 ELFW(R_SYM)(rel->r_info),
6197 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6198 rel->r_addend
6199 #else
6201 #endif
6205 } else {
6206 if ((vtop->r & VT_SYM) &&
6207 (bt == VT_BYTE ||
6208 bt == VT_SHORT ||
6209 bt == VT_DOUBLE ||
6210 bt == VT_LDOUBLE ||
6211 #if PTR_SIZE == 8
6212 (bt == VT_LLONG && bit_size != 64) ||
6213 bt == VT_INT
6214 #else
6215 bt == VT_LLONG ||
6216 (bt == VT_INT && bit_size != 32)
6217 #endif
6219 tcc_error("initializer element is not computable at load time");
6220 switch(bt) {
6221 /* XXX: when cross-compiling we assume that each type has the
6222 same representation on host and target, which is likely to
6223 be wrong in the case of long double */
6224 case VT_BOOL:
6225 vtop->c.i = (vtop->c.i != 0);
6226 case VT_BYTE:
6227 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6228 break;
6229 case VT_SHORT:
6230 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6231 break;
6232 case VT_DOUBLE:
6233 *(double *)ptr = vtop->c.d;
6234 break;
6235 case VT_LDOUBLE:
6236 if (sizeof(long double) == LDOUBLE_SIZE)
6237 *(long double *)ptr = vtop->c.ld;
6238 else if (sizeof(double) == LDOUBLE_SIZE)
6239 *(double *)ptr = vtop->c.ld;
6240 else
6241 tcc_error("can't cross compile long double constants");
6242 break;
6243 #if PTR_SIZE != 8
6244 case VT_LLONG:
6245 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6246 break;
6247 #else
6248 case VT_LLONG:
6249 #endif
6250 case VT_PTR:
6252 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6253 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6254 if (vtop->r & VT_SYM)
6255 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6256 else
6257 *(addr_t *)ptr |= val;
6258 #else
6259 if (vtop->r & VT_SYM)
6260 greloc(sec, vtop->sym, c, R_DATA_PTR);
6261 *(addr_t *)ptr |= val;
6262 #endif
6263 break;
6265 default:
6267 int val = (vtop->c.i & bit_mask) << bit_pos;
6268 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6269 if (vtop->r & VT_SYM)
6270 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6271 else
6272 *(int *)ptr |= val;
6273 #else
6274 if (vtop->r & VT_SYM)
6275 greloc(sec, vtop->sym, c, R_DATA_PTR);
6276 *(int *)ptr |= val;
6277 #endif
6278 break;
6282 vtop--;
6283 } else {
6284 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6285 vswap();
6286 vstore();
6287 vpop();
6291 /* put zeros for variable based init */
6292 static void init_putz(Section *sec, unsigned long c, int size)
6294 if (sec) {
6295 /* nothing to do because globals are already set to zero */
6296 } else {
6297 vpush_global_sym(&func_old_type, TOK_memset);
6298 vseti(VT_LOCAL, c);
6299 #ifdef TCC_TARGET_ARM
6300 vpushs(size);
6301 vpushi(0);
6302 #else
6303 vpushi(0);
6304 vpushs(size);
6305 #endif
6306 gfunc_call(3);
6310 /* 't' contains the type and storage info. 'c' is the offset of the
6311 object in section 'sec'. If 'sec' is NULL, it means stack based
6312 allocation. 'first' is true if array '{' must be read (multi
6313 dimension implicit array init handling). 'size_only' is true if
6314 size only evaluation is wanted (only for arrays). */
6315 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6316 int first, int size_only)
6318 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6319 int size1, align1;
6320 int have_elem;
6321 Sym *s, *f;
6322 Sym indexsym;
6323 CType *t1;
6325 /* If we currently are at an '}' or ',' we have read an initializer
6326 element in one of our callers, and not yet consumed it. */
6327 have_elem = tok == '}' || tok == ',';
6328 if (!have_elem && tok != '{' &&
6329 /* In case of strings we have special handling for arrays, so
6330 don't consume them as initializer value (which would commit them
6331 to some anonymous symbol). */
6332 tok != TOK_LSTR && tok != TOK_STR &&
6333 !size_only) {
6334 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6335 have_elem = 1;
6338 if (have_elem &&
6339 !(type->t & VT_ARRAY) &&
6340 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6341 The source type might have VT_CONSTANT set, which is
6342 of course assignable to non-const elements. */
6343 is_compatible_parameter_types(type, &vtop->type)) {
6344 init_putv(type, sec, c);
6345 } else if (type->t & VT_ARRAY) {
6346 s = type->ref;
6347 n = s->c;
6348 array_length = 0;
6349 t1 = pointed_type(type);
6350 size1 = type_size(t1, &align1);
6352 no_oblock = 1;
6353 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6354 tok == '{') {
6355 if (tok != '{')
6356 tcc_error("character array initializer must be a literal,"
6357 " optionally enclosed in braces");
6358 skip('{');
6359 no_oblock = 0;
6362 /* only parse strings here if correct type (otherwise: handle
6363 them as ((w)char *) expressions */
6364 if ((tok == TOK_LSTR &&
6365 #ifdef TCC_TARGET_PE
6366 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6367 #else
6368 (t1->t & VT_BTYPE) == VT_INT
6369 #endif
6370 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6371 while (tok == TOK_STR || tok == TOK_LSTR) {
6372 int cstr_len, ch;
6374 /* compute maximum number of chars wanted */
6375 if (tok == TOK_STR)
6376 cstr_len = tokc.str.size;
6377 else
6378 cstr_len = tokc.str.size / sizeof(nwchar_t);
6379 cstr_len--;
6380 nb = cstr_len;
6381 if (n >= 0 && nb > (n - array_length))
6382 nb = n - array_length;
6383 if (!size_only) {
6384 if (cstr_len > nb)
6385 tcc_warning("initializer-string for array is too long");
6386 /* in order to go faster for common case (char
6387 string in global variable, we handle it
6388 specifically */
6389 if (sec && tok == TOK_STR && size1 == 1) {
6390 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6391 } else {
6392 for(i=0;i<nb;i++) {
6393 if (tok == TOK_STR)
6394 ch = ((unsigned char *)tokc.str.data)[i];
6395 else
6396 ch = ((nwchar_t *)tokc.str.data)[i];
6397 vpushi(ch);
6398 init_putv(t1, sec, c + (array_length + i) * size1);
6402 array_length += nb;
6403 next();
6405 /* only add trailing zero if enough storage (no
6406 warning in this case since it is standard) */
6407 if (n < 0 || array_length < n) {
6408 if (!size_only) {
6409 vpushi(0);
6410 init_putv(t1, sec, c + (array_length * size1));
6412 array_length++;
6414 } else {
6415 indexsym.c = 0;
6416 indexsym.r = 0;
6417 f = &indexsym;
6419 do_init_list:
6420 while (tok != '}' || have_elem) {
6421 decl_designator(type, sec, c, &f, size_only);
6422 have_elem = 0;
6423 index = f->c;
6424 /* must put zero in holes (note that doing it that way
6425 ensures that it even works with designators) */
6426 if (!size_only && array_length < index) {
6427 init_putz(sec, c + array_length * size1,
6428 (index - array_length) * size1);
6430 if (type->t & VT_ARRAY) {
6431 index = indexsym.c = ++indexsym.r;
6432 } else {
6433 index = index + type_size(&f->type, &align1);
6434 if (s->type.t == TOK_UNION)
6435 f = NULL;
6436 else
6437 f = f->next;
6439 if (index > array_length)
6440 array_length = index;
6442 if (type->t & VT_ARRAY) {
6443 /* special test for multi dimensional arrays (may not
6444 be strictly correct if designators are used at the
6445 same time) */
6446 if (no_oblock && index >= n)
6447 break;
6448 } else {
6449 if (no_oblock && f == NULL)
6450 break;
6452 if (tok == '}')
6453 break;
6454 skip(',');
6457 /* put zeros at the end */
6458 if (!size_only && array_length < n) {
6459 init_putz(sec, c + array_length * size1,
6460 (n - array_length) * size1);
6462 if (!no_oblock)
6463 skip('}');
6464 /* patch type size if needed, which happens only for array types */
6465 if (n < 0)
6466 s->c = array_length;
6467 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6468 size1 = 1;
6469 no_oblock = 1;
6470 if (first || tok == '{') {
6471 skip('{');
6472 no_oblock = 0;
6474 s = type->ref;
6475 f = s->next;
6476 array_length = 0;
6477 n = s->c;
6478 goto do_init_list;
6479 } else if (tok == '{') {
6480 next();
6481 decl_initializer(type, sec, c, first, size_only);
6482 skip('}');
6483 } else if (size_only) {
6484 /* If we supported only ISO C we wouldn't have to accept calling
6485 this on anything than an array size_only==1 (and even then
6486 only on the outermost level, so no recursion would be needed),
6487 because initializing a flex array member isn't supported.
6488 But GNU C supports it, so we need to recurse even into
6489 subfields of structs and arrays when size_only is set. */
6490 /* just skip expression */
6491 parlevel = parlevel1 = 0;
6492 while ((parlevel > 0 || parlevel1 > 0 ||
6493 (tok != '}' && tok != ',')) && tok != -1) {
6494 if (tok == '(')
6495 parlevel++;
6496 else if (tok == ')') {
6497 if (parlevel == 0 && parlevel1 == 0)
6498 break;
6499 parlevel--;
6501 else if (tok == '{')
6502 parlevel1++;
6503 else if (tok == '}') {
6504 if (parlevel == 0 && parlevel1 == 0)
6505 break;
6506 parlevel1--;
6508 next();
6510 } else {
6511 if (!have_elem) {
6512 /* This should happen only when we haven't parsed
6513 the init element above for fear of committing a
6514 string constant to memory too early. */
6515 if (tok != TOK_STR && tok != TOK_LSTR)
6516 expect("string constant");
6517 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6519 init_putv(type, sec, c);
6523 /* parse an initializer for type 't' if 'has_init' is non zero, and
6524 allocate space in local or global data space ('r' is either
6525 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6526 variable 'v' of scope 'scope' is declared before initializers
6527 are parsed. If 'v' is zero, then a reference to the new object
6528 is put in the value stack. If 'has_init' is 2, a special parsing
6529 is done to handle string constants. */
6530 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6531 int has_init, int v, int scope)
6533 int size, align, addr, data_offset;
6534 int level;
6535 ParseState saved_parse_state = {0};
6536 TokenString *init_str = NULL;
6537 Section *sec;
6538 Sym *flexible_array;
6540 flexible_array = NULL;
6541 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6542 Sym *field = type->ref->next;
6543 if (field) {
6544 while (field->next)
6545 field = field->next;
6546 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6547 flexible_array = field;
6551 size = type_size(type, &align);
6552 /* If unknown size, we must evaluate it before
6553 evaluating initializers because
6554 initializers can generate global data too
6555 (e.g. string pointers or ISOC99 compound
6556 literals). It also simplifies local
6557 initializers handling */
6558 if (size < 0 || (flexible_array && has_init)) {
6559 if (!has_init)
6560 tcc_error("unknown type size");
6561 /* get all init string */
6562 init_str = tok_str_alloc();
6563 if (has_init == 2) {
6564 /* only get strings */
6565 while (tok == TOK_STR || tok == TOK_LSTR) {
6566 tok_str_add_tok(init_str);
6567 next();
6569 } else {
6570 level = 0;
6571 while (level > 0 || (tok != ',' && tok != ';')) {
6572 if (tok < 0)
6573 tcc_error("unexpected end of file in initializer");
6574 tok_str_add_tok(init_str);
6575 if (tok == '{')
6576 level++;
6577 else if (tok == '}') {
6578 level--;
6579 if (level <= 0) {
6580 next();
6581 break;
6584 next();
6587 tok_str_add(init_str, -1);
6588 tok_str_add(init_str, 0);
6590 /* compute size */
6591 save_parse_state(&saved_parse_state);
6593 begin_macro(init_str, 1);
6594 next();
6595 decl_initializer(type, NULL, 0, 1, 1);
6596 /* prepare second initializer parsing */
6597 macro_ptr = init_str->str;
6598 next();
6600 /* if still unknown size, error */
6601 size = type_size(type, &align);
6602 if (size < 0)
6603 tcc_error("unknown type size");
6605 /* If there's a flex member and it was used in the initializer
6606 adjust size. */
6607 if (flexible_array &&
6608 flexible_array->type.ref->c > 0)
6609 size += flexible_array->type.ref->c
6610 * pointed_size(&flexible_array->type);
6611 /* take into account specified alignment if bigger */
6612 if (ad->a.aligned) {
6613 int speca = 1 << (ad->a.aligned - 1);
6614 if (speca > align)
6615 align = speca;
6616 } else if (ad->a.packed) {
6617 align = 1;
6619 if ((r & VT_VALMASK) == VT_LOCAL) {
6620 sec = NULL;
6621 #ifdef CONFIG_TCC_BCHECK
6622 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6623 loc--;
6625 #endif
6626 loc = (loc - size) & -align;
6627 addr = loc;
6628 #ifdef CONFIG_TCC_BCHECK
6629 /* handles bounds */
6630 /* XXX: currently, since we do only one pass, we cannot track
6631 '&' operators, so we add only arrays */
6632 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6633 addr_t *bounds_ptr;
6634 /* add padding between regions */
6635 loc--;
6636 /* then add local bound info */
6637 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6638 bounds_ptr[0] = addr;
6639 bounds_ptr[1] = size;
6641 #endif
6642 if (v) {
6643 /* local variable */
6644 #ifdef CONFIG_TCC_ASM
6645 if (ad->asm_label) {
6646 int reg = asm_parse_regvar(ad->asm_label);
6647 if (reg >= 0)
6648 r = (r & ~VT_VALMASK) | reg;
6650 #endif
6651 sym_push(v, type, r, addr);
6652 } else {
6653 /* push local reference */
6654 vset(type, r, addr);
6656 } else {
6657 Sym *sym = NULL;
6658 if (v && scope == VT_CONST) {
6659 /* see if the symbol was already defined */
6660 sym = sym_find(v);
6661 if (sym) {
6662 patch_storage(sym, type);
6663 if (sym->type.t & VT_EXTERN) {
6664 /* if the variable is extern, it was not allocated */
6665 sym->type.t &= ~VT_EXTERN;
6666 /* set array size if it was omitted in extern
6667 declaration */
6668 if ((sym->type.t & VT_ARRAY) &&
6669 sym->type.ref->c < 0 &&
6670 type->ref->c >= 0)
6671 sym->type.ref->c = type->ref->c;
6672 } else if (!has_init) {
6673 /* we accept several definitions of the same
6674 global variable. this is tricky, because we
6675 must play with the SHN_COMMON type of the symbol */
6676 /* no init data, we won't add more to the symbol */
6677 update_storage(sym);
6678 goto no_alloc;
6679 } else if (sym->c) {
6680 ElfW(Sym) *esym;
6681 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6682 if (esym->st_shndx == data_section->sh_num)
6683 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6688 /* allocate symbol in corresponding section */
6689 sec = ad->section;
6690 if (!sec) {
6691 if (has_init)
6692 sec = data_section;
6693 else if (tcc_state->nocommon)
6694 sec = bss_section;
6697 if (sec) {
6698 data_offset = sec->data_offset;
6699 data_offset = (data_offset + align - 1) & -align;
6700 addr = data_offset;
6701 /* very important to increment global pointer at this time
6702 because initializers themselves can create new initializers */
6703 data_offset += size;
6704 #ifdef CONFIG_TCC_BCHECK
6705 /* add padding if bound check */
6706 if (tcc_state->do_bounds_check)
6707 data_offset++;
6708 #endif
6709 sec->data_offset = data_offset;
6710 /* allocate section space to put the data */
6711 if (sec->sh_type != SHT_NOBITS &&
6712 data_offset > sec->data_allocated)
6713 section_realloc(sec, data_offset);
6714 /* align section if needed */
6715 if (align > sec->sh_addralign)
6716 sec->sh_addralign = align;
6717 } else {
6718 addr = 0; /* avoid warning */
6721 if (v) {
6722 if (scope != VT_CONST || !sym) {
6723 sym = sym_push(v, type, r | VT_SYM, 0);
6724 sym->asm_label = ad->asm_label;
6726 /* update symbol definition */
6727 if (sec) {
6728 put_extern_sym(sym, sec, addr, size);
6729 } else {
6730 put_extern_sym(sym, SECTION_COMMON, align, size);
6733 } else {
6734 /* push global reference */
6735 sym = get_sym_ref(type, sec, addr, size);
6736 vpushsym(type, sym);
6739 #ifdef CONFIG_TCC_BCHECK
6740 /* handles bounds now because the symbol must be defined
6741 before for the relocation */
6742 if (tcc_state->do_bounds_check) {
6743 addr_t *bounds_ptr;
6745 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6746 /* then add global bound info */
6747 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6748 bounds_ptr[0] = 0; /* relocated */
6749 bounds_ptr[1] = size;
6751 #endif
6754 if (type->t & VT_VLA) {
6755 int a;
6757 /* save current stack pointer */
6758 if (vlas_in_scope == 0) {
6759 if (vla_sp_root_loc == -1)
6760 vla_sp_root_loc = (loc -= PTR_SIZE);
6761 gen_vla_sp_save(vla_sp_root_loc);
6764 vla_runtime_type_size(type, &a);
6765 gen_vla_alloc(type, a);
6766 gen_vla_sp_save(addr);
6767 vla_sp_loc = addr;
6768 vlas_in_scope++;
6770 } else if (has_init) {
6771 size_t oldreloc_offset = 0;
6772 if (sec && sec->reloc)
6773 oldreloc_offset = sec->reloc->data_offset;
6774 decl_initializer(type, sec, addr, 1, 0);
6775 if (sec && sec->reloc)
6776 squeeze_multi_relocs(sec, oldreloc_offset);
6777 /* patch flexible array member size back to -1, */
6778 /* for possible subsequent similar declarations */
6779 if (flexible_array)
6780 flexible_array->type.ref->c = -1;
6783 no_alloc:
6784 /* restore parse state if needed */
6785 if (init_str) {
6786 end_macro();
6787 restore_parse_state(&saved_parse_state);
6791 /* parse an old style function declaration list */
6792 /* XXX: check multiple parameter */
6793 static void func_decl_list(Sym *func_sym)
6795 AttributeDef ad;
6796 int v;
6797 Sym *s;
6798 CType btype, type;
6800 /* parse each declaration */
6801 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6802 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6803 if (!parse_btype(&btype, &ad))
6804 expect("declaration list");
6805 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6806 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6807 tok == ';') {
6808 /* we accept no variable after */
6809 } else {
6810 for(;;) {
6811 type = btype;
6812 type_decl(&type, &ad, &v, TYPE_DIRECT);
6813 /* find parameter in function parameter list */
6814 s = func_sym->next;
6815 while (s != NULL) {
6816 if ((s->v & ~SYM_FIELD) == v)
6817 goto found;
6818 s = s->next;
6820 tcc_error("declaration for parameter '%s' but no such parameter",
6821 get_tok_str(v, NULL));
6822 found:
6823 /* check that no storage specifier except 'register' was given */
6824 if (type.t & VT_STORAGE)
6825 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6826 convert_parameter_type(&type);
6827 /* we can add the type (NOTE: it could be local to the function) */
6828 s->type = type;
6829 /* accept other parameters */
6830 if (tok == ',')
6831 next();
6832 else
6833 break;
6836 skip(';');
6840 /* parse a function defined by symbol 'sym' and generate its code in
6841 'cur_text_section' */
6842 static void gen_function(Sym *sym)
6844 nocode_wanted = 0;
6845 ind = cur_text_section->data_offset;
6846 /* NOTE: we patch the symbol size later */
6847 put_extern_sym(sym, cur_text_section, ind, 0);
6848 funcname = get_tok_str(sym->v, NULL);
6849 func_ind = ind;
6850 /* Initialize VLA state */
6851 vla_sp_loc = -1;
6852 vla_sp_root_loc = -1;
6853 /* put debug symbol */
6854 tcc_debug_funcstart(tcc_state, sym);
6855 /* push a dummy symbol to enable local sym storage */
6856 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6857 local_scope = 1; /* for function parameters */
6858 gfunc_prolog(&sym->type);
6859 local_scope = 0;
6860 rsym = 0;
6861 block(NULL, NULL, 0);
6862 nocode_wanted = 0;
6863 gsym(rsym);
6864 gfunc_epilog();
6865 cur_text_section->data_offset = ind;
6866 label_pop(&global_label_stack, NULL);
6867 /* reset local stack */
6868 local_scope = 0;
6869 sym_pop(&local_stack, NULL, 0);
6870 /* end of function */
6871 /* patch symbol size */
6872 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6873 ind - func_ind;
6874 tcc_debug_funcend(tcc_state, ind - func_ind);
6875 /* It's better to crash than to generate wrong code */
6876 cur_text_section = NULL;
6877 funcname = ""; /* for safety */
6878 func_vt.t = VT_VOID; /* for safety */
6879 func_var = 0; /* for safety */
6880 ind = 0; /* for safety */
6881 nocode_wanted = 1;
6882 check_vstack();
6885 static void gen_inline_functions(TCCState *s)
6887 Sym *sym;
6888 int inline_generated, i, ln;
6889 struct InlineFunc *fn;
6891 ln = file->line_num;
6892 /* iterate while inline function are referenced */
6893 for(;;) {
6894 inline_generated = 0;
6895 for (i = 0; i < s->nb_inline_fns; ++i) {
6896 fn = s->inline_fns[i];
6897 sym = fn->sym;
6898 if (sym && sym->c) {
6899 /* the function was used: generate its code and
6900 convert it to a normal function */
6901 fn->sym = NULL;
6902 if (file)
6903 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6904 sym->type.t &= ~VT_INLINE;
6906 begin_macro(fn->func_str, 1);
6907 next();
6908 cur_text_section = text_section;
6909 gen_function(sym);
6910 end_macro();
6912 inline_generated = 1;
6915 if (!inline_generated)
6916 break;
6918 file->line_num = ln;
6921 ST_FUNC void free_inline_functions(TCCState *s)
6923 int i;
6924 /* free tokens of unused inline functions */
6925 for (i = 0; i < s->nb_inline_fns; ++i) {
6926 struct InlineFunc *fn = s->inline_fns[i];
6927 if (fn->sym)
6928 tok_str_free(fn->func_str);
6930 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6933 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6934 static int decl0(int l, int is_for_loop_init)
6936 int v, has_init, r;
6937 CType type, btype;
6938 Sym *sym;
6939 AttributeDef ad;
6941 while (1) {
6942 if (!parse_btype(&btype, &ad)) {
6943 if (is_for_loop_init)
6944 return 0;
6945 /* skip redundant ';' */
6946 /* XXX: find more elegant solution */
6947 if (tok == ';') {
6948 next();
6949 continue;
6951 if (l == VT_CONST &&
6952 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6953 /* global asm block */
6954 asm_global_instr();
6955 continue;
6957 /* special test for old K&R protos without explicit int
6958 type. Only accepted when defining global data */
6959 if (l == VT_LOCAL || tok < TOK_UIDENT)
6960 break;
6961 btype.t = VT_INT;
6963 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6964 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6965 tok == ';') {
6966 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6967 int v = btype.ref->v;
6968 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6969 tcc_warning("unnamed struct/union that defines no instances");
6971 next();
6972 continue;
6974 while (1) { /* iterate thru each declaration */
6975 type = btype;
6976 /* If the base type itself was an array type of unspecified
6977 size (like in 'typedef int arr[]; arr x = {1};') then
6978 we will overwrite the unknown size by the real one for
6979 this decl. We need to unshare the ref symbol holding
6980 that size. */
6981 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6982 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6984 type_decl(&type, &ad, &v, TYPE_DIRECT);
6985 #if 0
6987 char buf[500];
6988 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6989 printf("type = '%s'\n", buf);
6991 #endif
6992 if ((type.t & VT_BTYPE) == VT_FUNC) {
6993 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6994 tcc_error("function without file scope cannot be static");
6996 /* if old style function prototype, we accept a
6997 declaration list */
6998 sym = type.ref;
6999 if (sym->c == FUNC_OLD)
7000 func_decl_list(sym);
7003 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7004 ad.asm_label = asm_label_instr();
7005 /* parse one last attribute list, after asm label */
7006 parse_attribute(&ad);
7007 if (tok == '{')
7008 expect(";");
7011 if (ad.a.weak)
7012 type.t |= VT_WEAK;
7013 #ifdef TCC_TARGET_PE
7014 if (ad.a.func_import || ad.a.func_export) {
7015 if (type.t & (VT_STATIC|VT_TYPEDEF))
7016 tcc_error("cannot have dll linkage with static or typedef");
7017 if (ad.a.func_export)
7018 type.t |= VT_EXPORT;
7019 else if ((type.t & VT_BTYPE) != VT_FUNC)
7020 type.t |= VT_IMPORT|VT_EXTERN;
7022 #endif
7023 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7025 if (tok == '{') {
7026 if (l == VT_LOCAL)
7027 tcc_error("cannot use local functions");
7028 if ((type.t & VT_BTYPE) != VT_FUNC)
7029 expect("function definition");
7031 /* reject abstract declarators in function definition */
7032 sym = type.ref;
7033 while ((sym = sym->next) != NULL)
7034 if (!(sym->v & ~SYM_FIELD))
7035 expect("identifier");
7037 /* XXX: cannot do better now: convert extern line to static inline */
7038 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7039 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7041 sym = sym_find(v);
7042 if (sym) {
7043 Sym *ref;
7044 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7045 goto func_error1;
7047 ref = sym->type.ref;
7049 /* use func_call from prototype if not defined */
7050 if (ref->a.func_call != FUNC_CDECL
7051 && type.ref->a.func_call == FUNC_CDECL)
7052 type.ref->a.func_call = ref->a.func_call;
7054 /* use static from prototype */
7055 if (sym->type.t & VT_STATIC)
7056 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7058 /* If the definition has no visibility use the
7059 one from prototype. */
7060 if (! (type.t & VT_VIS_MASK))
7061 type.t |= sym->type.t & VT_VIS_MASK;
7063 /* apply other storage attributes from prototype */
7064 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
7066 if (!is_compatible_types(&sym->type, &type)) {
7067 func_error1:
7068 tcc_error("incompatible types for redefinition of '%s'",
7069 get_tok_str(v, NULL));
7071 if (ref->a.func_body)
7072 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7073 /* if symbol is already defined, then put complete type */
7074 sym->type = type;
7076 } else {
7077 /* put function symbol */
7078 sym = global_identifier_push(v, type.t, 0);
7079 sym->type.ref = type.ref;
7082 sym->type.ref->a.func_body = 1;
7083 sym->r = VT_SYM | VT_CONST;
7085 /* static inline functions are just recorded as a kind
7086 of macro. Their code will be emitted at the end of
7087 the compilation unit only if they are used */
7088 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7089 (VT_INLINE | VT_STATIC)) {
7090 int block_level;
7091 struct InlineFunc *fn;
7092 const char *filename;
7094 filename = file ? file->filename : "";
7095 fn = tcc_malloc(sizeof *fn + strlen(filename));
7096 strcpy(fn->filename, filename);
7097 fn->sym = sym;
7098 fn->func_str = tok_str_alloc();
7100 block_level = 0;
7101 for(;;) {
7102 int t;
7103 if (tok == TOK_EOF)
7104 tcc_error("unexpected end of file");
7105 tok_str_add_tok(fn->func_str);
7106 t = tok;
7107 next();
7108 if (t == '{') {
7109 block_level++;
7110 } else if (t == '}') {
7111 block_level--;
7112 if (block_level == 0)
7113 break;
7116 tok_str_add(fn->func_str, -1);
7117 tok_str_add(fn->func_str, 0);
7118 dynarray_add(&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7120 } else {
7121 /* compute text section */
7122 cur_text_section = ad.section;
7123 if (!cur_text_section)
7124 cur_text_section = text_section;
7125 gen_function(sym);
7127 break;
7128 } else {
7129 if (type.t & VT_TYPEDEF) {
7130 /* save typedefed type */
7131 /* XXX: test storage specifiers ? */
7132 sym = sym_find(v);
7133 if (sym && sym->scope == local_scope) {
7134 if (!is_compatible_types(&sym->type, &type)
7135 || !(sym->type.t & VT_TYPEDEF))
7136 tcc_error("incompatible redefinition of '%s'",
7137 get_tok_str(v, NULL));
7138 sym->type = type;
7139 } else {
7140 sym = sym_push(v, &type, 0, 0);
7142 sym->a = ad.a;
7143 } else {
7144 r = 0;
7145 if ((type.t & VT_BTYPE) == VT_FUNC) {
7146 /* external function definition */
7147 /* specific case for func_call attribute */
7148 type.ref->a = ad.a;
7149 } else if (!(type.t & VT_ARRAY)) {
7150 /* not lvalue if array */
7151 r |= lvalue_type(type.t);
7153 has_init = (tok == '=');
7154 if (has_init && (type.t & VT_VLA))
7155 tcc_error("variable length array cannot be initialized");
7156 if ((type.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7157 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7158 !has_init && l == VT_CONST && type.ref->c < 0)) {
7159 /* external variable or function */
7160 /* NOTE: as GCC, uninitialized global static
7161 arrays of null size are considered as
7162 extern */
7163 sym = external_sym(v, &type, r);
7164 sym->asm_label = ad.asm_label;
7165 if (ad.alias_target) {
7166 Section tsec;
7167 ElfW(Sym) *esym;
7168 Sym *alias_target;
7170 alias_target = sym_find(ad.alias_target);
7171 if (!alias_target || !alias_target->c)
7172 tcc_error("unsupported forward __alias__ attribute");
7173 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7174 tsec.sh_num = esym->st_shndx;
7175 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7177 } else {
7178 if (type.t & VT_STATIC)
7179 r |= VT_CONST;
7180 else
7181 r |= l;
7182 if (has_init)
7183 next();
7184 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7187 if (tok != ',') {
7188 if (is_for_loop_init)
7189 return 1;
7190 skip(';');
7191 break;
7193 next();
7195 ad.a.aligned = 0;
7198 return 0;
7201 ST_FUNC void decl(int l)
7203 decl0(l, 0);
7206 /* ------------------------------------------------------------------------- */