Rebuild cross compilers when sources change
[tinycc.git] / tccgen.c
blobd7a5488f56dcdffcebfa38fbcff9b849be1af66f
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void init_putv(CType *type, Section *sec, unsigned long c);
80 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
81 static void block(int *bsym, int *csym, int is_expr);
82 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
83 static int decl0(int l, int is_for_loop_init, Sym *);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static inline int64_t expr_const64(void);
90 ST_FUNC void vpush64(int ty, unsigned long long v);
91 ST_FUNC void vpush(CType *type);
92 ST_FUNC int gvtst(int inv, int t);
93 ST_FUNC int is_btype_size(int bt);
94 static void gen_inline_functions(TCCState *s);
96 ST_INLN int is_float(int t)
98 int bt;
99 bt = t & VT_BTYPE;
100 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC int ieee_finite(double d)
108 int p[4];
109 memcpy(p, &d, sizeof(double));
110 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC void test_lvalue(void)
115 if (!(vtop->r & VT_LVAL))
116 expect("lvalue");
119 ST_FUNC void check_vstack(void)
121 if (pvtop != vtop)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
128 #if 0
129 void pv (const char *lbl, int a, int b)
131 int i;
132 for (i = a; i < a + b; ++i) {
133 SValue *p = &vtop[-i];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
138 #endif
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC void tcc_debug_start(TCCState *s1)
144 if (s1->do_debug) {
145 char buf[512];
147 /* file info: full path + filename */
148 section_sym = put_elf_sym(symtab_section, 0, 0,
149 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
150 text_section->sh_num, NULL);
151 getcwd(buf, sizeof(buf));
152 #ifdef _WIN32
153 normalize_slashes(buf);
154 #endif
155 pstrcat(buf, sizeof(buf), "/");
156 put_stabs_r(buf, N_SO, 0, 0,
157 text_section->data_offset, text_section, section_sym);
158 put_stabs_r(file->filename, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 last_ind = 0;
161 last_line_num = 0;
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
168 SHN_ABS, file->filename);
171 /* put end of translation unit info */
172 ST_FUNC void tcc_debug_end(TCCState *s1)
174 if (!s1->do_debug)
175 return;
176 put_stabs_r(NULL, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
181 /* generate line number info */
182 ST_FUNC void tcc_debug_line(TCCState *s1)
184 if (!s1->do_debug)
185 return;
186 if ((last_line_num != file->line_num || last_ind != ind)) {
187 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
188 last_ind = ind;
189 last_line_num = file->line_num;
193 /* put function symbol */
194 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
196 char buf[512];
198 if (!s1->do_debug)
199 return;
201 /* stabs info */
202 /* XXX: we put here a dummy type */
203 snprintf(buf, sizeof(buf), "%s:%c1",
204 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
205 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
206 cur_text_section, sym->c);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE, 0, file->line_num, 0);
210 last_ind = 0;
211 last_line_num = 0;
214 /* put function size */
215 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
217 if (!s1->do_debug)
218 return;
219 put_stabn(N_FUN, 0, 0, size);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC void tccgen_start(TCCState *s1)
225 cur_text_section = NULL;
226 funcname = "";
227 anon_sym = SYM_FIRST_ANOM;
228 section_sym = 0;
229 const_wanted = 0;
230 nocode_wanted = 1;
232 /* define some often used types */
233 int_type.t = VT_INT;
234 char_pointer_type.t = VT_BYTE;
235 mk_pointer(&char_pointer_type);
236 #if PTR_SIZE == 4
237 size_type.t = VT_INT;
238 #else
239 size_type.t = VT_LLONG;
240 #endif
241 func_old_type.t = VT_FUNC;
242 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
244 tcc_debug_start(s1);
246 #ifdef TCC_TARGET_ARM
247 arm_init(s1);
248 #endif
251 ST_FUNC void tccgen_end(TCCState *s1)
253 gen_inline_functions(s1);
254 check_vstack();
255 /* end of translation unit info */
256 tcc_debug_end(s1);
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attibutes to Elf symbol */
262 static void update_storage(Sym *sym)
264 int t;
265 ElfW(Sym) *esym;
267 if (0 == sym->c)
268 return;
270 t = sym->type.t;
271 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
273 if (t & VT_VIS_MASK)
274 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
275 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
277 if (t & VT_WEAK)
278 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
280 #ifdef TCC_TARGET_PE
281 if (t & VT_EXPORT)
282 esym->st_other |= ST_PE_EXPORT;
283 #endif
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
291 addr_t value, unsigned long size,
292 int can_add_underscore)
294 int sym_type, sym_bind, sh_num, info, other, t;
295 ElfW(Sym) *esym;
296 const char *name;
297 char buf1[256];
298 #ifdef CONFIG_TCC_BCHECK
299 char buf[32];
300 #endif
302 if (section == NULL)
303 sh_num = SHN_UNDEF;
304 else if (section == SECTION_ABS)
305 sh_num = SHN_ABS;
306 else
307 sh_num = section->sh_num;
309 if (!sym->c) {
310 name = get_tok_str(sym->v, NULL);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state->do_bounds_check) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
316 switch(sym->v) {
317 #ifdef TCC_TARGET_PE
318 /* XXX: we rely only on malloc hooks */
319 case TOK_malloc:
320 case TOK_free:
321 case TOK_realloc:
322 case TOK_memalign:
323 case TOK_calloc:
324 #endif
325 case TOK_memcpy:
326 case TOK_memmove:
327 case TOK_memset:
328 case TOK_strlen:
329 case TOK_strcpy:
330 case TOK_alloca:
331 strcpy(buf, "__bound_");
332 strcat(buf, name);
333 name = buf;
334 break;
337 #endif
338 t = sym->type.t;
339 if ((t & VT_BTYPE) == VT_FUNC) {
340 sym_type = STT_FUNC;
341 } else if ((t & VT_BTYPE) == VT_VOID) {
342 sym_type = STT_NOTYPE;
343 } else {
344 sym_type = STT_OBJECT;
346 if (t & VT_STATIC)
347 sym_bind = STB_LOCAL;
348 else
349 sym_bind = STB_GLOBAL;
350 other = 0;
351 #ifdef TCC_TARGET_PE
352 if (sym_type == STT_FUNC && sym->type.ref) {
353 Sym *ref = sym->type.ref;
354 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
355 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
356 name = buf1;
357 other |= ST_PE_STDCALL;
358 can_add_underscore = 0;
361 if (t & VT_IMPORT)
362 other |= ST_PE_IMPORT;
363 #endif
364 if (tcc_state->leading_underscore && can_add_underscore) {
365 buf1[0] = '_';
366 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
367 name = buf1;
369 if (sym->asm_label)
370 name = get_tok_str(sym->asm_label, NULL);
371 info = ELFW(ST_INFO)(sym_bind, sym_type);
372 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
373 } else {
374 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
375 esym->st_value = value;
376 esym->st_size = size;
377 esym->st_shndx = sh_num;
379 update_storage(sym);
382 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
383 addr_t value, unsigned long size)
385 put_extern_sym2(sym, section, value, size, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
390 addr_t addend)
392 int c = 0;
394 if (nocode_wanted && s == cur_text_section)
395 return;
397 if (sym) {
398 if (0 == sym->c)
399 put_extern_sym(sym, NULL, 0, 0);
400 c = sym->c;
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section, s, offset, type, c, addend);
407 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
409 greloca(s, sym, offset, type, 0);
412 /* ------------------------------------------------------------------------- */
413 /* symbol allocator */
414 static Sym *__sym_malloc(void)
416 Sym *sym_pool, *sym, *last_sym;
417 int i;
419 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
420 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
422 last_sym = sym_free_first;
423 sym = sym_pool;
424 for(i = 0; i < SYM_POOL_NB; i++) {
425 sym->next = last_sym;
426 last_sym = sym;
427 sym++;
429 sym_free_first = last_sym;
430 return last_sym;
433 static inline Sym *sym_malloc(void)
435 Sym *sym;
436 #ifndef SYM_DEBUG
437 sym = sym_free_first;
438 if (!sym)
439 sym = __sym_malloc();
440 sym_free_first = sym->next;
441 return sym;
442 #else
443 sym = tcc_malloc(sizeof(Sym));
444 return sym;
445 #endif
448 ST_INLN void sym_free(Sym *sym)
450 #ifndef SYM_DEBUG
451 sym->next = sym_free_first;
452 sym_free_first = sym;
453 #else
454 tcc_free(sym);
455 #endif
458 /* push, without hashing */
459 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
461 Sym *s;
463 s = sym_malloc();
464 s->scope = 0;
465 s->v = v;
466 s->type.t = t;
467 s->type.ref = NULL;
468 #ifdef _WIN64
469 s->d = NULL;
470 #endif
471 s->c = c;
472 s->next = NULL;
473 /* add in stack */
474 s->prev = *ps;
475 *ps = s;
476 return s;
479 /* find a symbol and return its associated structure. 's' is the top
480 of the symbol stack */
481 ST_FUNC Sym *sym_find2(Sym *s, int v)
483 while (s) {
484 if (s->v == v)
485 return s;
486 else if (s->v == -1)
487 return NULL;
488 s = s->prev;
490 return NULL;
493 /* structure lookup */
494 ST_INLN Sym *struct_find(int v)
496 v -= TOK_IDENT;
497 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
498 return NULL;
499 return table_ident[v]->sym_struct;
502 /* find an identifier */
503 ST_INLN Sym *sym_find(int v)
505 v -= TOK_IDENT;
506 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
507 return NULL;
508 return table_ident[v]->sym_identifier;
511 /* push a given symbol on the symbol stack */
512 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
514 Sym *s, **ps;
515 TokenSym *ts;
517 if (local_stack)
518 ps = &local_stack;
519 else
520 ps = &global_stack;
521 s = sym_push2(ps, v, type->t, c);
522 s->type.ref = type->ref;
523 s->r = r;
524 /* don't record fields or anonymous symbols */
525 /* XXX: simplify */
526 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
527 /* record symbol in token array */
528 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
529 if (v & SYM_STRUCT)
530 ps = &ts->sym_struct;
531 else
532 ps = &ts->sym_identifier;
533 s->prev_tok = *ps;
534 *ps = s;
535 s->scope = local_scope;
536 if (s->prev_tok && s->prev_tok->scope == s->scope)
537 tcc_error("redeclaration of '%s'",
538 get_tok_str(v & ~SYM_STRUCT, NULL));
540 return s;
543 /* push a global identifier */
544 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
546 Sym *s, **ps;
547 s = sym_push2(&global_stack, v, t, c);
548 /* don't record anonymous symbol */
549 if (v < SYM_FIRST_ANOM) {
550 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
551 /* modify the top most local identifier, so that
552 sym_identifier will point to 's' when popped */
553 while (*ps != NULL)
554 ps = &(*ps)->prev_tok;
555 s->prev_tok = NULL;
556 *ps = s;
558 return s;
561 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
562 pop them yet from the list, but do remove them from the token array. */
563 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
565 Sym *s, *ss, **ps;
566 TokenSym *ts;
567 int v;
569 s = *ptop;
570 while(s != b) {
571 ss = s->prev;
572 v = s->v;
573 /* remove symbol in token array */
574 /* XXX: simplify */
575 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
576 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
577 if (v & SYM_STRUCT)
578 ps = &ts->sym_struct;
579 else
580 ps = &ts->sym_identifier;
581 *ps = s->prev_tok;
583 if (!keep)
584 sym_free(s);
585 s = ss;
587 if (!keep)
588 *ptop = b;
591 /* ------------------------------------------------------------------------- */
593 static void vsetc(CType *type, int r, CValue *vc)
595 int v;
597 if (vtop >= vstack + (VSTACK_SIZE - 1))
598 tcc_error("memory full (vstack)");
599 /* cannot let cpu flags if other instruction are generated. Also
600 avoid leaving VT_JMP anywhere except on the top of the stack
601 because it would complicate the code generator.
603 Don't do this when nocode_wanted. vtop might come from
604 !nocode_wanted regions (see 88_codeopt.c) and transforming
605 it to a register without actually generating code is wrong
606 as their value might still be used for real. All values
607 we push under nocode_wanted will eventually be popped
608 again, so that the VT_CMP/VT_JMP value will be in vtop
609 when code is unsuppressed again.
611 Same logic below in vswap(); */
612 if (vtop >= vstack && !nocode_wanted) {
613 v = vtop->r & VT_VALMASK;
614 if (v == VT_CMP || (v & ~1) == VT_JMP)
615 gv(RC_INT);
618 vtop++;
619 vtop->type = *type;
620 vtop->r = r;
621 vtop->r2 = VT_CONST;
622 vtop->c = *vc;
623 vtop->sym = NULL;
626 ST_FUNC void vswap(void)
628 SValue tmp;
629 /* cannot vswap cpu flags. See comment at vsetc() above */
630 if (vtop >= vstack && !nocode_wanted) {
631 int v = vtop->r & VT_VALMASK;
632 if (v == VT_CMP || (v & ~1) == VT_JMP)
633 gv(RC_INT);
635 tmp = vtop[0];
636 vtop[0] = vtop[-1];
637 vtop[-1] = tmp;
640 /* pop stack value */
641 ST_FUNC void vpop(void)
643 int v;
644 v = vtop->r & VT_VALMASK;
645 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
646 /* for x86, we need to pop the FP stack */
647 if (v == TREG_ST0) {
648 o(0xd8dd); /* fstp %st(0) */
649 } else
650 #endif
651 if (v == VT_JMP || v == VT_JMPI) {
652 /* need to put correct jump if && or || without test */
653 gsym(vtop->c.i);
655 vtop--;
658 /* push constant of type "type" with useless value */
659 ST_FUNC void vpush(CType *type)
661 CValue cval;
662 vsetc(type, VT_CONST, &cval);
665 /* push integer constant */
666 ST_FUNC void vpushi(int v)
668 CValue cval;
669 cval.i = v;
670 vsetc(&int_type, VT_CONST, &cval);
673 /* push a pointer sized constant */
674 static void vpushs(addr_t v)
676 CValue cval;
677 cval.i = v;
678 vsetc(&size_type, VT_CONST, &cval);
681 /* push arbitrary 64bit constant */
682 ST_FUNC void vpush64(int ty, unsigned long long v)
684 CValue cval;
685 CType ctype;
686 ctype.t = ty;
687 ctype.ref = NULL;
688 cval.i = v;
689 vsetc(&ctype, VT_CONST, &cval);
692 /* push long long constant */
693 static inline void vpushll(long long v)
695 vpush64(VT_LLONG, v);
698 ST_FUNC void vset(CType *type, int r, long v)
700 CValue cval;
702 cval.i = v;
703 vsetc(type, r, &cval);
706 static void vseti(int r, int v)
708 CType type;
709 type.t = VT_INT;
710 type.ref = 0;
711 vset(&type, r, v);
714 ST_FUNC void vpushv(SValue *v)
716 if (vtop >= vstack + (VSTACK_SIZE - 1))
717 tcc_error("memory full (vstack)");
718 vtop++;
719 *vtop = *v;
722 static void vdup(void)
724 vpushv(vtop);
727 /* rotate n first stack elements to the bottom
728 I1 ... In -> I2 ... In I1 [top is right]
730 ST_FUNC void vrotb(int n)
732 int i;
733 SValue tmp;
735 tmp = vtop[-n + 1];
736 for(i=-n+1;i!=0;i++)
737 vtop[i] = vtop[i+1];
738 vtop[0] = tmp;
741 /* rotate the n elements before entry e towards the top
742 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
744 ST_FUNC void vrote(SValue *e, int n)
746 int i;
747 SValue tmp;
749 tmp = *e;
750 for(i = 0;i < n - 1; i++)
751 e[-i] = e[-i - 1];
752 e[-n + 1] = tmp;
755 /* rotate n first stack elements to the top
756 I1 ... In -> In I1 ... I(n-1) [top is right]
758 ST_FUNC void vrott(int n)
760 vrote(vtop, n);
763 /* push a symbol value of TYPE */
764 static inline void vpushsym(CType *type, Sym *sym)
766 CValue cval;
767 cval.i = 0;
768 vsetc(type, VT_CONST | VT_SYM, &cval);
769 vtop->sym = sym;
772 /* Return a static symbol pointing to a section */
773 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
775 int v;
776 Sym *sym;
778 v = anon_sym++;
779 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
780 sym->type.ref = type->ref;
781 sym->r = VT_CONST | VT_SYM;
782 put_extern_sym(sym, sec, offset, size);
783 return sym;
786 /* push a reference to a section offset by adding a dummy symbol */
787 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
789 vpushsym(type, get_sym_ref(type, sec, offset, size));
792 /* define a new external reference to a symbol 'v' of type 'u' */
793 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
795 Sym *s;
797 s = sym_find(v);
798 if (!s) {
799 /* push forward reference */
800 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
801 s->type.ref = type->ref;
802 s->r = r | VT_CONST | VT_SYM;
804 return s;
807 /* Merge some storage attributes. */
808 static void patch_storage(Sym *sym, CType *type)
810 int t;
811 if (!is_compatible_types(&sym->type, type))
812 tcc_error("incompatible types for redefinition of '%s'",
813 get_tok_str(sym->v, NULL));
814 t = type->t;
815 #ifdef TCC_TARGET_PE
816 if ((sym->type.t ^ t) & VT_IMPORT)
817 tcc_error("incompatible dll linkage for redefinition of '%s'",
818 get_tok_str(sym->v, NULL));
819 #endif
820 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
821 if (t & VT_VIS_MASK) {
822 int vis = sym->type.t & VT_VIS_MASK;
823 int vis2 = t & VT_VIS_MASK;
824 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
825 vis = vis2;
826 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
827 vis = (vis < vis2) ? vis : vis2;
828 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
832 /* define a new external reference to a symbol 'v' */
833 static Sym *external_sym(int v, CType *type, int r)
835 Sym *s;
836 s = sym_find(v);
837 if (!s) {
838 /* push forward reference */
839 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
840 s->type.t |= VT_EXTERN;
841 } else {
842 if (s->type.ref == func_old_type.ref) {
843 s->type.ref = type->ref;
844 s->r = r | VT_CONST | VT_SYM;
845 s->type.t |= VT_EXTERN;
847 patch_storage(s, type);
848 update_storage(s);
850 return s;
853 /* push a reference to global symbol v */
854 ST_FUNC void vpush_global_sym(CType *type, int v)
856 vpushsym(type, external_global_sym(v, type, 0));
859 /* save registers up to (vtop - n) stack entry */
860 ST_FUNC void save_regs(int n)
862 SValue *p, *p1;
863 for(p = vstack, p1 = vtop - n; p <= p1; p++)
864 save_reg(p->r);
867 /* save r to the memory stack, and mark it as being free */
868 ST_FUNC void save_reg(int r)
870 save_reg_upstack(r, 0);
873 /* save r to the memory stack, and mark it as being free,
874 if seen up to (vtop - n) stack entry */
875 ST_FUNC void save_reg_upstack(int r, int n)
877 int l, saved, size, align;
878 SValue *p, *p1, sv;
879 CType *type;
881 if ((r &= VT_VALMASK) >= VT_CONST)
882 return;
883 if (nocode_wanted)
884 return;
886 /* modify all stack values */
887 saved = 0;
888 l = 0;
889 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
890 if ((p->r & VT_VALMASK) == r ||
891 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
892 /* must save value on stack if not already done */
893 if (!saved) {
894 /* NOTE: must reload 'r' because r might be equal to r2 */
895 r = p->r & VT_VALMASK;
896 /* store register in the stack */
897 type = &p->type;
898 if ((p->r & VT_LVAL) ||
899 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
900 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
901 type = &char_pointer_type;
902 #else
903 type = &int_type;
904 #endif
905 size = type_size(type, &align);
906 loc = (loc - size) & -align;
907 sv.type.t = type->t;
908 sv.r = VT_LOCAL | VT_LVAL;
909 sv.c.i = loc;
910 store(r, &sv);
911 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
912 /* x86 specific: need to pop fp register ST0 if saved */
913 if (r == TREG_ST0) {
914 o(0xd8dd); /* fstp %st(0) */
916 #endif
917 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
918 /* special long long case */
919 if ((type->t & VT_BTYPE) == VT_LLONG) {
920 sv.c.i += 4;
921 store(p->r2, &sv);
923 #endif
924 l = loc;
925 saved = 1;
927 /* mark that stack entry as being saved on the stack */
928 if (p->r & VT_LVAL) {
929 /* also clear the bounded flag because the
930 relocation address of the function was stored in
931 p->c.i */
932 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
933 } else {
934 p->r = lvalue_type(p->type.t) | VT_LOCAL;
936 p->r2 = VT_CONST;
937 p->c.i = l;
942 #ifdef TCC_TARGET_ARM
943 /* find a register of class 'rc2' with at most one reference on stack.
944 * If none, call get_reg(rc) */
945 ST_FUNC int get_reg_ex(int rc, int rc2)
947 int r;
948 SValue *p;
950 for(r=0;r<NB_REGS;r++) {
951 if (reg_classes[r] & rc2) {
952 int n;
953 n=0;
954 for(p = vstack; p <= vtop; p++) {
955 if ((p->r & VT_VALMASK) == r ||
956 (p->r2 & VT_VALMASK) == r)
957 n++;
959 if (n <= 1)
960 return r;
963 return get_reg(rc);
965 #endif
967 /* find a free register of class 'rc'. If none, save one register */
968 ST_FUNC int get_reg(int rc)
970 int r;
971 SValue *p;
973 /* find a free register */
974 for(r=0;r<NB_REGS;r++) {
975 if (reg_classes[r] & rc) {
976 if (nocode_wanted)
977 return r;
978 for(p=vstack;p<=vtop;p++) {
979 if ((p->r & VT_VALMASK) == r ||
980 (p->r2 & VT_VALMASK) == r)
981 goto notfound;
983 return r;
985 notfound: ;
988 /* no register left : free the first one on the stack (VERY
989 IMPORTANT to start from the bottom to ensure that we don't
990 spill registers used in gen_opi()) */
991 for(p=vstack;p<=vtop;p++) {
992 /* look at second register (if long long) */
993 r = p->r2 & VT_VALMASK;
994 if (r < VT_CONST && (reg_classes[r] & rc))
995 goto save_found;
996 r = p->r & VT_VALMASK;
997 if (r < VT_CONST && (reg_classes[r] & rc)) {
998 save_found:
999 save_reg(r);
1000 return r;
1003 /* Should never comes here */
1004 return -1;
1007 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1008 if needed */
1009 static void move_reg(int r, int s, int t)
1011 SValue sv;
1013 if (r != s) {
1014 save_reg(r);
1015 sv.type.t = t;
1016 sv.type.ref = NULL;
1017 sv.r = s;
1018 sv.c.i = 0;
1019 load(r, &sv);
1023 /* get address of vtop (vtop MUST BE an lvalue) */
1024 ST_FUNC void gaddrof(void)
1026 vtop->r &= ~VT_LVAL;
1027 /* tricky: if saved lvalue, then we can go back to lvalue */
1028 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1029 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1034 #ifdef CONFIG_TCC_BCHECK
1035 /* generate lvalue bound code */
1036 static void gbound(void)
1038 int lval_type;
1039 CType type1;
1041 vtop->r &= ~VT_MUSTBOUND;
1042 /* if lvalue, then use checking code before dereferencing */
1043 if (vtop->r & VT_LVAL) {
1044 /* if not VT_BOUNDED value, then make one */
1045 if (!(vtop->r & VT_BOUNDED)) {
1046 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1047 /* must save type because we must set it to int to get pointer */
1048 type1 = vtop->type;
1049 vtop->type.t = VT_PTR;
1050 gaddrof();
1051 vpushi(0);
1052 gen_bounded_ptr_add();
1053 vtop->r |= lval_type;
1054 vtop->type = type1;
1056 /* then check for dereferencing */
1057 gen_bounded_ptr_deref();
1060 #endif
1062 /* store vtop a register belonging to class 'rc'. lvalues are
1063 converted to values. Cannot be used if cannot be converted to
1064 register value (such as structures). */
1065 ST_FUNC int gv(int rc)
1067 int r, bit_pos, bit_size, size, align;
1068 int rc2;
1070 /* NOTE: get_reg can modify vstack[] */
1071 if (vtop->type.t & VT_BITFIELD) {
1072 CType type;
1073 int bits = 32;
1074 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1075 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1076 /* remove bit field info to avoid loops */
1077 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1078 /* cast to int to propagate signedness in following ops */
1079 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1080 type.t = VT_LLONG;
1081 bits = 64;
1082 } else
1083 type.t = VT_INT;
1084 if((vtop->type.t & VT_UNSIGNED) ||
1085 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1086 type.t |= VT_UNSIGNED;
1087 gen_cast(&type);
1088 /* generate shifts */
1089 vpushi(bits - (bit_pos + bit_size));
1090 gen_op(TOK_SHL);
1091 vpushi(bits - bit_size);
1092 /* NOTE: transformed to SHR if unsigned */
1093 gen_op(TOK_SAR);
1094 r = gv(rc);
1095 } else {
1096 if (is_float(vtop->type.t) &&
1097 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1098 unsigned long offset;
1099 /* CPUs usually cannot use float constants, so we store them
1100 generically in data segment */
1101 size = type_size(&vtop->type, &align);
1102 offset = section_add(data_section, size, align);
1103 vpush_ref(&vtop->type, data_section, offset, size);
1104 vswap();
1105 init_putv(&vtop->type, data_section, offset);
1106 vtop->r |= VT_LVAL;
1108 #ifdef CONFIG_TCC_BCHECK
1109 if (vtop->r & VT_MUSTBOUND)
1110 gbound();
1111 #endif
1113 r = vtop->r & VT_VALMASK;
1114 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1115 #ifndef TCC_TARGET_ARM64
1116 if (rc == RC_IRET)
1117 rc2 = RC_LRET;
1118 #ifdef TCC_TARGET_X86_64
1119 else if (rc == RC_FRET)
1120 rc2 = RC_QRET;
1121 #endif
1122 #endif
1123 /* need to reload if:
1124 - constant
1125 - lvalue (need to dereference pointer)
1126 - already a register, but not in the right class */
1127 if (r >= VT_CONST
1128 || (vtop->r & VT_LVAL)
1129 || !(reg_classes[r] & rc)
1130 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1131 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1132 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1133 #else
1134 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1135 #endif
1138 r = get_reg(rc);
1139 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1140 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1141 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1142 #else
1143 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1144 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1145 unsigned long long ll;
1146 #endif
1147 int r2, original_type;
1148 original_type = vtop->type.t;
1149 /* two register type load : expand to two words
1150 temporarily */
1151 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1152 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1153 /* load constant */
1154 ll = vtop->c.i;
1155 vtop->c.i = ll; /* first word */
1156 load(r, vtop);
1157 vtop->r = r; /* save register value */
1158 vpushi(ll >> 32); /* second word */
1159 } else
1160 #endif
1161 if (vtop->r & VT_LVAL) {
1162 /* We do not want to modifier the long long
1163 pointer here, so the safest (and less
1164 efficient) is to save all the other registers
1165 in the stack. XXX: totally inefficient. */
1166 #if 0
1167 save_regs(1);
1168 #else
1169 /* lvalue_save: save only if used further down the stack */
1170 save_reg_upstack(vtop->r, 1);
1171 #endif
1172 /* load from memory */
1173 vtop->type.t = load_type;
1174 load(r, vtop);
1175 vdup();
1176 vtop[-1].r = r; /* save register value */
1177 /* increment pointer to get second word */
1178 vtop->type.t = addr_type;
1179 gaddrof();
1180 vpushi(load_size);
1181 gen_op('+');
1182 vtop->r |= VT_LVAL;
1183 vtop->type.t = load_type;
1184 } else {
1185 /* move registers */
1186 load(r, vtop);
1187 vdup();
1188 vtop[-1].r = r; /* save register value */
1189 vtop->r = vtop[-1].r2;
1191 /* Allocate second register. Here we rely on the fact that
1192 get_reg() tries first to free r2 of an SValue. */
1193 r2 = get_reg(rc2);
1194 load(r2, vtop);
1195 vpop();
1196 /* write second register */
1197 vtop->r2 = r2;
1198 vtop->type.t = original_type;
1199 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1200 int t1, t;
1201 /* lvalue of scalar type : need to use lvalue type
1202 because of possible cast */
1203 t = vtop->type.t;
1204 t1 = t;
1205 /* compute memory access type */
1206 if (vtop->r & VT_LVAL_BYTE)
1207 t = VT_BYTE;
1208 else if (vtop->r & VT_LVAL_SHORT)
1209 t = VT_SHORT;
1210 if (vtop->r & VT_LVAL_UNSIGNED)
1211 t |= VT_UNSIGNED;
1212 vtop->type.t = t;
1213 load(r, vtop);
1214 /* restore wanted type */
1215 vtop->type.t = t1;
1216 } else {
1217 /* one register type load */
1218 load(r, vtop);
1221 vtop->r = r;
1222 #ifdef TCC_TARGET_C67
1223 /* uses register pairs for doubles */
1224 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1225 vtop->r2 = r+1;
1226 #endif
1228 return r;
1231 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1232 ST_FUNC void gv2(int rc1, int rc2)
1234 int v;
1236 /* generate more generic register first. But VT_JMP or VT_CMP
1237 values must be generated first in all cases to avoid possible
1238 reload errors */
1239 v = vtop[0].r & VT_VALMASK;
1240 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1241 vswap();
1242 gv(rc1);
1243 vswap();
1244 gv(rc2);
1245 /* test if reload is needed for first register */
1246 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1247 vswap();
1248 gv(rc1);
1249 vswap();
1251 } else {
1252 gv(rc2);
1253 vswap();
1254 gv(rc1);
1255 vswap();
1256 /* test if reload is needed for first register */
1257 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1258 gv(rc2);
1263 #ifndef TCC_TARGET_ARM64
1264 /* wrapper around RC_FRET to return a register by type */
1265 static int rc_fret(int t)
1267 #ifdef TCC_TARGET_X86_64
1268 if (t == VT_LDOUBLE) {
1269 return RC_ST0;
1271 #endif
1272 return RC_FRET;
1274 #endif
1276 /* wrapper around REG_FRET to return a register by type */
1277 static int reg_fret(int t)
1279 #ifdef TCC_TARGET_X86_64
1280 if (t == VT_LDOUBLE) {
1281 return TREG_ST0;
1283 #endif
1284 return REG_FRET;
1287 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1288 /* expand 64bit on stack in two ints */
1289 static void lexpand(void)
1291 int u, v;
1292 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1293 v = vtop->r & (VT_VALMASK | VT_LVAL);
1294 if (v == VT_CONST) {
1295 vdup();
1296 vtop[0].c.i >>= 32;
1297 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1298 vdup();
1299 vtop[0].c.i += 4;
1300 } else {
1301 gv(RC_INT);
1302 vdup();
1303 vtop[0].r = vtop[-1].r2;
1304 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1306 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1308 #endif
1310 #ifdef TCC_TARGET_ARM
1311 /* expand long long on stack */
1312 ST_FUNC void lexpand_nr(void)
1314 int u,v;
1316 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1317 vdup();
1318 vtop->r2 = VT_CONST;
1319 vtop->type.t = VT_INT | u;
1320 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1321 if (v == VT_CONST) {
1322 vtop[-1].c.i = vtop->c.i;
1323 vtop->c.i = vtop->c.i >> 32;
1324 vtop->r = VT_CONST;
1325 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1326 vtop->c.i += 4;
1327 vtop->r = vtop[-1].r;
1328 } else if (v > VT_CONST) {
1329 vtop--;
1330 lexpand();
1331 } else
1332 vtop->r = vtop[-1].r2;
1333 vtop[-1].r2 = VT_CONST;
1334 vtop[-1].type.t = VT_INT | u;
1336 #endif
1338 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1339 /* build a long long from two ints */
1340 static void lbuild(int t)
1342 gv2(RC_INT, RC_INT);
1343 vtop[-1].r2 = vtop[0].r;
1344 vtop[-1].type.t = t;
1345 vpop();
1347 #endif
1349 /* convert stack entry to register and duplicate its value in another
1350 register */
1351 static void gv_dup(void)
1353 int rc, t, r, r1;
1354 SValue sv;
1356 t = vtop->type.t;
1357 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1358 if ((t & VT_BTYPE) == VT_LLONG) {
1359 lexpand();
1360 gv_dup();
1361 vswap();
1362 vrotb(3);
1363 gv_dup();
1364 vrotb(4);
1365 /* stack: H L L1 H1 */
1366 lbuild(t);
1367 vrotb(3);
1368 vrotb(3);
1369 vswap();
1370 lbuild(t);
1371 vswap();
1372 } else
1373 #endif
1375 /* duplicate value */
1376 rc = RC_INT;
1377 sv.type.t = VT_INT;
1378 if (is_float(t)) {
1379 rc = RC_FLOAT;
1380 #ifdef TCC_TARGET_X86_64
1381 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1382 rc = RC_ST0;
1384 #endif
1385 sv.type.t = t;
1387 r = gv(rc);
1388 r1 = get_reg(rc);
1389 sv.r = r;
1390 sv.c.i = 0;
1391 load(r1, &sv); /* move r to r1 */
1392 vdup();
1393 /* duplicates value */
1394 if (r != r1)
1395 vtop->r = r1;
1399 /* Generate value test
1401 * Generate a test for any value (jump, comparison and integers) */
1402 ST_FUNC int gvtst(int inv, int t)
1404 int v = vtop->r & VT_VALMASK;
1405 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1406 vpushi(0);
1407 gen_op(TOK_NE);
1409 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1410 /* constant jmp optimization */
1411 if ((vtop->c.i != 0) != inv)
1412 t = gjmp(t);
1413 vtop--;
1414 return t;
1416 return gtst(inv, t);
1419 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1420 /* generate CPU independent (unsigned) long long operations */
1421 static void gen_opl(int op)
1423 int t, a, b, op1, c, i;
1424 int func;
1425 unsigned short reg_iret = REG_IRET;
1426 unsigned short reg_lret = REG_LRET;
1427 SValue tmp;
1429 switch(op) {
1430 case '/':
1431 case TOK_PDIV:
1432 func = TOK___divdi3;
1433 goto gen_func;
1434 case TOK_UDIV:
1435 func = TOK___udivdi3;
1436 goto gen_func;
1437 case '%':
1438 func = TOK___moddi3;
1439 goto gen_mod_func;
1440 case TOK_UMOD:
1441 func = TOK___umoddi3;
1442 gen_mod_func:
1443 #ifdef TCC_ARM_EABI
1444 reg_iret = TREG_R2;
1445 reg_lret = TREG_R3;
1446 #endif
1447 gen_func:
1448 /* call generic long long function */
1449 vpush_global_sym(&func_old_type, func);
1450 vrott(3);
1451 gfunc_call(2);
1452 vpushi(0);
1453 vtop->r = reg_iret;
1454 vtop->r2 = reg_lret;
1455 break;
1456 case '^':
1457 case '&':
1458 case '|':
1459 case '*':
1460 case '+':
1461 case '-':
1462 //pv("gen_opl A",0,2);
1463 t = vtop->type.t;
1464 vswap();
1465 lexpand();
1466 vrotb(3);
1467 lexpand();
1468 /* stack: L1 H1 L2 H2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-3];
1471 vtop[-3] = tmp;
1472 tmp = vtop[-2];
1473 vtop[-2] = vtop[-3];
1474 vtop[-3] = tmp;
1475 vswap();
1476 /* stack: H1 H2 L1 L2 */
1477 //pv("gen_opl B",0,4);
1478 if (op == '*') {
1479 vpushv(vtop - 1);
1480 vpushv(vtop - 1);
1481 gen_op(TOK_UMULL);
1482 lexpand();
1483 /* stack: H1 H2 L1 L2 ML MH */
1484 for(i=0;i<4;i++)
1485 vrotb(6);
1486 /* stack: ML MH H1 H2 L1 L2 */
1487 tmp = vtop[0];
1488 vtop[0] = vtop[-2];
1489 vtop[-2] = tmp;
1490 /* stack: ML MH H1 L2 H2 L1 */
1491 gen_op('*');
1492 vrotb(3);
1493 vrotb(3);
1494 gen_op('*');
1495 /* stack: ML MH M1 M2 */
1496 gen_op('+');
1497 gen_op('+');
1498 } else if (op == '+' || op == '-') {
1499 /* XXX: add non carry method too (for MIPS or alpha) */
1500 if (op == '+')
1501 op1 = TOK_ADDC1;
1502 else
1503 op1 = TOK_SUBC1;
1504 gen_op(op1);
1505 /* stack: H1 H2 (L1 op L2) */
1506 vrotb(3);
1507 vrotb(3);
1508 gen_op(op1 + 1); /* TOK_xxxC2 */
1509 } else {
1510 gen_op(op);
1511 /* stack: H1 H2 (L1 op L2) */
1512 vrotb(3);
1513 vrotb(3);
1514 /* stack: (L1 op L2) H1 H2 */
1515 gen_op(op);
1516 /* stack: (L1 op L2) (H1 op H2) */
1518 /* stack: L H */
1519 lbuild(t);
1520 break;
1521 case TOK_SAR:
1522 case TOK_SHR:
1523 case TOK_SHL:
1524 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1525 t = vtop[-1].type.t;
1526 vswap();
1527 lexpand();
1528 vrotb(3);
1529 /* stack: L H shift */
1530 c = (int)vtop->c.i;
1531 /* constant: simpler */
1532 /* NOTE: all comments are for SHL. the other cases are
1533 done by swaping words */
1534 vpop();
1535 if (op != TOK_SHL)
1536 vswap();
1537 if (c >= 32) {
1538 /* stack: L H */
1539 vpop();
1540 if (c > 32) {
1541 vpushi(c - 32);
1542 gen_op(op);
1544 if (op != TOK_SAR) {
1545 vpushi(0);
1546 } else {
1547 gv_dup();
1548 vpushi(31);
1549 gen_op(TOK_SAR);
1551 vswap();
1552 } else {
1553 vswap();
1554 gv_dup();
1555 /* stack: H L L */
1556 vpushi(c);
1557 gen_op(op);
1558 vswap();
1559 vpushi(32 - c);
1560 if (op == TOK_SHL)
1561 gen_op(TOK_SHR);
1562 else
1563 gen_op(TOK_SHL);
1564 vrotb(3);
1565 /* stack: L L H */
1566 vpushi(c);
1567 if (op == TOK_SHL)
1568 gen_op(TOK_SHL);
1569 else
1570 gen_op(TOK_SHR);
1571 gen_op('|');
1573 if (op != TOK_SHL)
1574 vswap();
1575 lbuild(t);
1576 } else {
1577 /* XXX: should provide a faster fallback on x86 ? */
1578 switch(op) {
1579 case TOK_SAR:
1580 func = TOK___ashrdi3;
1581 goto gen_func;
1582 case TOK_SHR:
1583 func = TOK___lshrdi3;
1584 goto gen_func;
1585 case TOK_SHL:
1586 func = TOK___ashldi3;
1587 goto gen_func;
1590 break;
1591 default:
1592 /* compare operations */
1593 t = vtop->type.t;
1594 vswap();
1595 lexpand();
1596 vrotb(3);
1597 lexpand();
1598 /* stack: L1 H1 L2 H2 */
1599 tmp = vtop[-1];
1600 vtop[-1] = vtop[-2];
1601 vtop[-2] = tmp;
1602 /* stack: L1 L2 H1 H2 */
1603 /* compare high */
1604 op1 = op;
1605 /* when values are equal, we need to compare low words. since
1606 the jump is inverted, we invert the test too. */
1607 if (op1 == TOK_LT)
1608 op1 = TOK_LE;
1609 else if (op1 == TOK_GT)
1610 op1 = TOK_GE;
1611 else if (op1 == TOK_ULT)
1612 op1 = TOK_ULE;
1613 else if (op1 == TOK_UGT)
1614 op1 = TOK_UGE;
1615 a = 0;
1616 b = 0;
1617 gen_op(op1);
1618 if (op == TOK_NE) {
1619 b = gvtst(0, 0);
1620 } else {
1621 a = gvtst(1, 0);
1622 if (op != TOK_EQ) {
1623 /* generate non equal test */
1624 vpushi(TOK_NE);
1625 vtop->r = VT_CMP;
1626 b = gvtst(0, 0);
1629 /* compare low. Always unsigned */
1630 op1 = op;
1631 if (op1 == TOK_LT)
1632 op1 = TOK_ULT;
1633 else if (op1 == TOK_LE)
1634 op1 = TOK_ULE;
1635 else if (op1 == TOK_GT)
1636 op1 = TOK_UGT;
1637 else if (op1 == TOK_GE)
1638 op1 = TOK_UGE;
1639 gen_op(op1);
1640 a = gvtst(1, a);
1641 gsym(b);
1642 vseti(VT_JMPI, a);
1643 break;
1646 #endif
1648 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1650 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1651 return (a ^ b) >> 63 ? -x : x;
1654 static int gen_opic_lt(uint64_t a, uint64_t b)
1656 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1659 /* handle integer constant optimizations and various machine
1660 independent opt */
1661 static void gen_opic(int op)
1663 SValue *v1 = vtop - 1;
1664 SValue *v2 = vtop;
1665 int t1 = v1->type.t & VT_BTYPE;
1666 int t2 = v2->type.t & VT_BTYPE;
1667 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1668 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1669 uint64_t l1 = c1 ? v1->c.i : 0;
1670 uint64_t l2 = c2 ? v2->c.i : 0;
1671 int shm = (t1 == VT_LLONG) ? 63 : 31;
1673 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1674 l1 = ((uint32_t)l1 |
1675 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1676 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1677 l2 = ((uint32_t)l2 |
1678 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1680 if (c1 && c2) {
1681 switch(op) {
1682 case '+': l1 += l2; break;
1683 case '-': l1 -= l2; break;
1684 case '&': l1 &= l2; break;
1685 case '^': l1 ^= l2; break;
1686 case '|': l1 |= l2; break;
1687 case '*': l1 *= l2; break;
1689 case TOK_PDIV:
1690 case '/':
1691 case '%':
1692 case TOK_UDIV:
1693 case TOK_UMOD:
1694 /* if division by zero, generate explicit division */
1695 if (l2 == 0) {
1696 if (const_wanted)
1697 tcc_error("division by zero in constant");
1698 goto general_case;
1700 switch(op) {
1701 default: l1 = gen_opic_sdiv(l1, l2); break;
1702 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1703 case TOK_UDIV: l1 = l1 / l2; break;
1704 case TOK_UMOD: l1 = l1 % l2; break;
1706 break;
1707 case TOK_SHL: l1 <<= (l2 & shm); break;
1708 case TOK_SHR: l1 >>= (l2 & shm); break;
1709 case TOK_SAR:
1710 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1711 break;
1712 /* tests */
1713 case TOK_ULT: l1 = l1 < l2; break;
1714 case TOK_UGE: l1 = l1 >= l2; break;
1715 case TOK_EQ: l1 = l1 == l2; break;
1716 case TOK_NE: l1 = l1 != l2; break;
1717 case TOK_ULE: l1 = l1 <= l2; break;
1718 case TOK_UGT: l1 = l1 > l2; break;
1719 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1720 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1721 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1722 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1723 /* logical */
1724 case TOK_LAND: l1 = l1 && l2; break;
1725 case TOK_LOR: l1 = l1 || l2; break;
1726 default:
1727 goto general_case;
1729 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1730 l1 = ((uint32_t)l1 |
1731 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1732 v1->c.i = l1;
1733 vtop--;
1734 } else {
1735 /* if commutative ops, put c2 as constant */
1736 if (c1 && (op == '+' || op == '&' || op == '^' ||
1737 op == '|' || op == '*')) {
1738 vswap();
1739 c2 = c1; //c = c1, c1 = c2, c2 = c;
1740 l2 = l1; //l = l1, l1 = l2, l2 = l;
1742 if (!const_wanted &&
1743 c1 && ((l1 == 0 &&
1744 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1745 (l1 == -1 && op == TOK_SAR))) {
1746 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1747 vtop--;
1748 } else if (!const_wanted &&
1749 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1750 (l2 == -1 && op == '|') ||
1751 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1752 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1753 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1754 if (l2 == 1)
1755 vtop->c.i = 0;
1756 vswap();
1757 vtop--;
1758 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1759 op == TOK_PDIV) &&
1760 l2 == 1) ||
1761 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1762 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1763 l2 == 0) ||
1764 (op == '&' &&
1765 l2 == -1))) {
1766 /* filter out NOP operations like x*1, x-0, x&-1... */
1767 vtop--;
1768 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1769 /* try to use shifts instead of muls or divs */
1770 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1771 int n = -1;
1772 while (l2) {
1773 l2 >>= 1;
1774 n++;
1776 vtop->c.i = n;
1777 if (op == '*')
1778 op = TOK_SHL;
1779 else if (op == TOK_PDIV)
1780 op = TOK_SAR;
1781 else
1782 op = TOK_SHR;
1784 goto general_case;
1785 } else if (c2 && (op == '+' || op == '-') &&
1786 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1787 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1788 /* symbol + constant case */
1789 if (op == '-')
1790 l2 = -l2;
1791 l2 += vtop[-1].c.i;
1792 /* The backends can't always deal with addends to symbols
1793 larger than +-1<<31. Don't construct such. */
1794 if ((int)l2 != l2)
1795 goto general_case;
1796 vtop--;
1797 vtop->c.i = l2;
1798 } else {
1799 general_case:
1800 /* call low level op generator */
1801 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1802 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1803 gen_opl(op);
1804 else
1805 gen_opi(op);
1810 /* generate a floating point operation with constant propagation */
1811 static void gen_opif(int op)
1813 int c1, c2;
1814 SValue *v1, *v2;
1815 long double f1, f2;
1817 v1 = vtop - 1;
1818 v2 = vtop;
1819 /* currently, we cannot do computations with forward symbols */
1820 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1821 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1822 if (c1 && c2) {
1823 if (v1->type.t == VT_FLOAT) {
1824 f1 = v1->c.f;
1825 f2 = v2->c.f;
1826 } else if (v1->type.t == VT_DOUBLE) {
1827 f1 = v1->c.d;
1828 f2 = v2->c.d;
1829 } else {
1830 f1 = v1->c.ld;
1831 f2 = v2->c.ld;
1834 /* NOTE: we only do constant propagation if finite number (not
1835 NaN or infinity) (ANSI spec) */
1836 if (!ieee_finite(f1) || !ieee_finite(f2))
1837 goto general_case;
1839 switch(op) {
1840 case '+': f1 += f2; break;
1841 case '-': f1 -= f2; break;
1842 case '*': f1 *= f2; break;
1843 case '/':
1844 if (f2 == 0.0) {
1845 if (const_wanted)
1846 tcc_error("division by zero in constant");
1847 goto general_case;
1849 f1 /= f2;
1850 break;
1851 /* XXX: also handles tests ? */
1852 default:
1853 goto general_case;
1855 /* XXX: overflow test ? */
1856 if (v1->type.t == VT_FLOAT) {
1857 v1->c.f = f1;
1858 } else if (v1->type.t == VT_DOUBLE) {
1859 v1->c.d = f1;
1860 } else {
1861 v1->c.ld = f1;
1863 vtop--;
1864 } else {
1865 general_case:
1866 gen_opf(op);
1870 static int pointed_size(CType *type)
1872 int align;
1873 return type_size(pointed_type(type), &align);
1876 static void vla_runtime_pointed_size(CType *type)
1878 int align;
1879 vla_runtime_type_size(pointed_type(type), &align);
1882 static inline int is_null_pointer(SValue *p)
1884 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1885 return 0;
1886 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1887 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1888 ((p->type.t & VT_BTYPE) == VT_PTR &&
1889 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1892 static inline int is_integer_btype(int bt)
1894 return (bt == VT_BYTE || bt == VT_SHORT ||
1895 bt == VT_INT || bt == VT_LLONG);
1898 /* check types for comparison or subtraction of pointers */
1899 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1901 CType *type1, *type2, tmp_type1, tmp_type2;
1902 int bt1, bt2;
1904 /* null pointers are accepted for all comparisons as gcc */
1905 if (is_null_pointer(p1) || is_null_pointer(p2))
1906 return;
1907 type1 = &p1->type;
1908 type2 = &p2->type;
1909 bt1 = type1->t & VT_BTYPE;
1910 bt2 = type2->t & VT_BTYPE;
1911 /* accept comparison between pointer and integer with a warning */
1912 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1913 if (op != TOK_LOR && op != TOK_LAND )
1914 tcc_warning("comparison between pointer and integer");
1915 return;
1918 /* both must be pointers or implicit function pointers */
1919 if (bt1 == VT_PTR) {
1920 type1 = pointed_type(type1);
1921 } else if (bt1 != VT_FUNC)
1922 goto invalid_operands;
1924 if (bt2 == VT_PTR) {
1925 type2 = pointed_type(type2);
1926 } else if (bt2 != VT_FUNC) {
1927 invalid_operands:
1928 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1930 if ((type1->t & VT_BTYPE) == VT_VOID ||
1931 (type2->t & VT_BTYPE) == VT_VOID)
1932 return;
1933 tmp_type1 = *type1;
1934 tmp_type2 = *type2;
1935 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1936 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1937 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1938 /* gcc-like error if '-' is used */
1939 if (op == '-')
1940 goto invalid_operands;
1941 else
1942 tcc_warning("comparison of distinct pointer types lacks a cast");
1946 /* generic gen_op: handles types problems */
1947 ST_FUNC void gen_op(int op)
1949 int u, t1, t2, bt1, bt2, t;
1950 CType type1;
1952 redo:
1953 t1 = vtop[-1].type.t;
1954 t2 = vtop[0].type.t;
1955 bt1 = t1 & VT_BTYPE;
1956 bt2 = t2 & VT_BTYPE;
1958 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1959 tcc_error("operation on a struct");
1960 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1961 if (bt2 == VT_FUNC) {
1962 mk_pointer(&vtop->type);
1963 gaddrof();
1965 if (bt1 == VT_FUNC) {
1966 vswap();
1967 mk_pointer(&vtop->type);
1968 gaddrof();
1969 vswap();
1971 goto redo;
1972 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1973 /* at least one operand is a pointer */
1974 /* relationnal op: must be both pointers */
1975 if (op >= TOK_ULT && op <= TOK_LOR) {
1976 check_comparison_pointer_types(vtop - 1, vtop, op);
1977 /* pointers are handled are unsigned */
1978 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1979 t = VT_LLONG | VT_UNSIGNED;
1980 #else
1981 t = VT_INT | VT_UNSIGNED;
1982 #endif
1983 goto std_op;
1985 /* if both pointers, then it must be the '-' op */
1986 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1987 if (op != '-')
1988 tcc_error("cannot use pointers here");
1989 check_comparison_pointer_types(vtop - 1, vtop, op);
1990 /* XXX: check that types are compatible */
1991 if (vtop[-1].type.t & VT_VLA) {
1992 vla_runtime_pointed_size(&vtop[-1].type);
1993 } else {
1994 vpushi(pointed_size(&vtop[-1].type));
1996 vrott(3);
1997 gen_opic(op);
1998 /* set to integer type */
1999 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2000 vtop->type.t = VT_LLONG;
2001 #else
2002 vtop->type.t = VT_INT;
2003 #endif
2004 vswap();
2005 gen_op(TOK_PDIV);
2006 } else {
2007 /* exactly one pointer : must be '+' or '-'. */
2008 if (op != '-' && op != '+')
2009 tcc_error("cannot use pointers here");
2010 /* Put pointer as first operand */
2011 if (bt2 == VT_PTR) {
2012 vswap();
2013 t = t1, t1 = t2, t2 = t;
2015 #if PTR_SIZE == 4
2016 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2017 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2018 gen_cast(&int_type);
2019 #endif
2020 type1 = vtop[-1].type;
2021 type1.t &= ~VT_ARRAY;
2022 if (vtop[-1].type.t & VT_VLA)
2023 vla_runtime_pointed_size(&vtop[-1].type);
2024 else {
2025 u = pointed_size(&vtop[-1].type);
2026 if (u < 0)
2027 tcc_error("unknown array element size");
2028 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2029 vpushll(u);
2030 #else
2031 /* XXX: cast to int ? (long long case) */
2032 vpushi(u);
2033 #endif
2035 gen_op('*');
2036 #if 0
2037 /* #ifdef CONFIG_TCC_BCHECK
2038 The main reason to removing this code:
2039 #include <stdio.h>
2040 int main ()
2042 int v[10];
2043 int i = 10;
2044 int j = 9;
2045 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2046 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2048 When this code is on. then the output looks like
2049 v+i-j = 0xfffffffe
2050 v+(i-j) = 0xbff84000
2052 /* if evaluating constant expression, no code should be
2053 generated, so no bound check */
2054 if (tcc_state->do_bounds_check && !const_wanted) {
2055 /* if bounded pointers, we generate a special code to
2056 test bounds */
2057 if (op == '-') {
2058 vpushi(0);
2059 vswap();
2060 gen_op('-');
2062 gen_bounded_ptr_add();
2063 } else
2064 #endif
2066 gen_opic(op);
2068 /* put again type if gen_opic() swaped operands */
2069 vtop->type = type1;
2071 } else if (is_float(bt1) || is_float(bt2)) {
2072 /* compute bigger type and do implicit casts */
2073 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2074 t = VT_LDOUBLE;
2075 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2076 t = VT_DOUBLE;
2077 } else {
2078 t = VT_FLOAT;
2080 /* floats can only be used for a few operations */
2081 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2082 (op < TOK_ULT || op > TOK_GT))
2083 tcc_error("invalid operands for binary operation");
2084 goto std_op;
2085 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2086 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2087 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2088 t |= VT_UNSIGNED;
2089 goto std_op;
2090 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2091 /* cast to biggest op */
2092 t = VT_LLONG;
2093 /* convert to unsigned if it does not fit in a long long */
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2095 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2096 t |= VT_UNSIGNED;
2097 goto std_op;
2098 } else {
2099 /* integer operations */
2100 t = VT_INT;
2101 /* convert to unsigned if it does not fit in an integer */
2102 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2103 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2104 t |= VT_UNSIGNED;
2105 std_op:
2106 /* XXX: currently, some unsigned operations are explicit, so
2107 we modify them here */
2108 if (t & VT_UNSIGNED) {
2109 if (op == TOK_SAR)
2110 op = TOK_SHR;
2111 else if (op == '/')
2112 op = TOK_UDIV;
2113 else if (op == '%')
2114 op = TOK_UMOD;
2115 else if (op == TOK_LT)
2116 op = TOK_ULT;
2117 else if (op == TOK_GT)
2118 op = TOK_UGT;
2119 else if (op == TOK_LE)
2120 op = TOK_ULE;
2121 else if (op == TOK_GE)
2122 op = TOK_UGE;
2124 vswap();
2125 type1.t = t;
2126 gen_cast(&type1);
2127 vswap();
2128 /* special case for shifts and long long: we keep the shift as
2129 an integer */
2130 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2131 type1.t = VT_INT;
2132 gen_cast(&type1);
2133 if (is_float(t))
2134 gen_opif(op);
2135 else
2136 gen_opic(op);
2137 if (op >= TOK_ULT && op <= TOK_GT) {
2138 /* relationnal op: the result is an int */
2139 vtop->type.t = VT_INT;
2140 } else {
2141 vtop->type.t = t;
2144 // Make sure that we have converted to an rvalue:
2145 if (vtop->r & VT_LVAL)
2146 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2149 #ifndef TCC_TARGET_ARM
2150 /* generic itof for unsigned long long case */
2151 static void gen_cvt_itof1(int t)
2153 #ifdef TCC_TARGET_ARM64
2154 gen_cvt_itof(t);
2155 #else
2156 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2157 (VT_LLONG | VT_UNSIGNED)) {
2159 if (t == VT_FLOAT)
2160 vpush_global_sym(&func_old_type, TOK___floatundisf);
2161 #if LDOUBLE_SIZE != 8
2162 else if (t == VT_LDOUBLE)
2163 vpush_global_sym(&func_old_type, TOK___floatundixf);
2164 #endif
2165 else
2166 vpush_global_sym(&func_old_type, TOK___floatundidf);
2167 vrott(2);
2168 gfunc_call(1);
2169 vpushi(0);
2170 vtop->r = reg_fret(t);
2171 } else {
2172 gen_cvt_itof(t);
2174 #endif
2176 #endif
2178 /* generic ftoi for unsigned long long case */
2179 static void gen_cvt_ftoi1(int t)
2181 #ifdef TCC_TARGET_ARM64
2182 gen_cvt_ftoi(t);
2183 #else
2184 int st;
2186 if (t == (VT_LLONG | VT_UNSIGNED)) {
2187 /* not handled natively */
2188 st = vtop->type.t & VT_BTYPE;
2189 if (st == VT_FLOAT)
2190 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2191 #if LDOUBLE_SIZE != 8
2192 else if (st == VT_LDOUBLE)
2193 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2194 #endif
2195 else
2196 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2197 vrott(2);
2198 gfunc_call(1);
2199 vpushi(0);
2200 vtop->r = REG_IRET;
2201 vtop->r2 = REG_LRET;
2202 } else {
2203 gen_cvt_ftoi(t);
2205 #endif
2208 /* force char or short cast */
2209 static void force_charshort_cast(int t)
2211 int bits, dbt;
2212 dbt = t & VT_BTYPE;
2213 /* XXX: add optimization if lvalue : just change type and offset */
2214 if (dbt == VT_BYTE)
2215 bits = 8;
2216 else
2217 bits = 16;
2218 if (t & VT_UNSIGNED) {
2219 vpushi((1 << bits) - 1);
2220 gen_op('&');
2221 } else {
2222 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2223 bits = 64 - bits;
2224 else
2225 bits = 32 - bits;
2226 vpushi(bits);
2227 gen_op(TOK_SHL);
2228 /* result must be signed or the SAR is converted to an SHL
2229 This was not the case when "t" was a signed short
2230 and the last value on the stack was an unsigned int */
2231 vtop->type.t &= ~VT_UNSIGNED;
2232 vpushi(bits);
2233 gen_op(TOK_SAR);
2237 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2238 static void gen_cast(CType *type)
2240 int sbt, dbt, sf, df, c, p;
2242 /* special delayed cast for char/short */
2243 /* XXX: in some cases (multiple cascaded casts), it may still
2244 be incorrect */
2245 if (vtop->r & VT_MUSTCAST) {
2246 vtop->r &= ~VT_MUSTCAST;
2247 force_charshort_cast(vtop->type.t);
2250 /* bitfields first get cast to ints */
2251 if (vtop->type.t & VT_BITFIELD) {
2252 gv(RC_INT);
2255 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2256 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2258 if (sbt != dbt) {
2259 sf = is_float(sbt);
2260 df = is_float(dbt);
2261 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2262 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2263 if (c) {
2264 /* constant case: we can do it now */
2265 /* XXX: in ISOC, cannot do it if error in convert */
2266 if (sbt == VT_FLOAT)
2267 vtop->c.ld = vtop->c.f;
2268 else if (sbt == VT_DOUBLE)
2269 vtop->c.ld = vtop->c.d;
2271 if (df) {
2272 if ((sbt & VT_BTYPE) == VT_LLONG) {
2273 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2274 vtop->c.ld = vtop->c.i;
2275 else
2276 vtop->c.ld = -(long double)-vtop->c.i;
2277 } else if(!sf) {
2278 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2279 vtop->c.ld = (uint32_t)vtop->c.i;
2280 else
2281 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2284 if (dbt == VT_FLOAT)
2285 vtop->c.f = (float)vtop->c.ld;
2286 else if (dbt == VT_DOUBLE)
2287 vtop->c.d = (double)vtop->c.ld;
2288 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2289 vtop->c.i = vtop->c.ld;
2290 } else if (sf && dbt == VT_BOOL) {
2291 vtop->c.i = (vtop->c.ld != 0);
2292 } else {
2293 if(sf)
2294 vtop->c.i = vtop->c.ld;
2295 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2297 else if (sbt & VT_UNSIGNED)
2298 vtop->c.i = (uint32_t)vtop->c.i;
2299 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2300 else if (sbt == VT_PTR)
2302 #endif
2303 else if (sbt != VT_LLONG)
2304 vtop->c.i = ((uint32_t)vtop->c.i |
2305 -(vtop->c.i & 0x80000000));
2307 if (dbt == (VT_LLONG|VT_UNSIGNED))
2309 else if (dbt == VT_BOOL)
2310 vtop->c.i = (vtop->c.i != 0);
2311 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2312 else if (dbt == VT_PTR)
2314 #endif
2315 else if (dbt != VT_LLONG) {
2316 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2317 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2318 0xffffffff);
2319 vtop->c.i &= m;
2320 if (!(dbt & VT_UNSIGNED))
2321 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2324 } else if (p && dbt == VT_BOOL) {
2325 vtop->r = VT_CONST;
2326 vtop->c.i = 1;
2327 } else {
2328 /* non constant case: generate code */
2329 if (sf && df) {
2330 /* convert from fp to fp */
2331 gen_cvt_ftof(dbt);
2332 } else if (df) {
2333 /* convert int to fp */
2334 gen_cvt_itof1(dbt);
2335 } else if (sf) {
2336 /* convert fp to int */
2337 if (dbt == VT_BOOL) {
2338 vpushi(0);
2339 gen_op(TOK_NE);
2340 } else {
2341 /* we handle char/short/etc... with generic code */
2342 if (dbt != (VT_INT | VT_UNSIGNED) &&
2343 dbt != (VT_LLONG | VT_UNSIGNED) &&
2344 dbt != VT_LLONG)
2345 dbt = VT_INT;
2346 gen_cvt_ftoi1(dbt);
2347 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2348 /* additional cast for char/short... */
2349 vtop->type.t = dbt;
2350 gen_cast(type);
2353 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2354 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2355 if ((sbt & VT_BTYPE) != VT_LLONG) {
2356 /* scalar to long long */
2357 /* machine independent conversion */
2358 gv(RC_INT);
2359 /* generate high word */
2360 if (sbt == (VT_INT | VT_UNSIGNED)) {
2361 vpushi(0);
2362 gv(RC_INT);
2363 } else {
2364 if (sbt == VT_PTR) {
2365 /* cast from pointer to int before we apply
2366 shift operation, which pointers don't support*/
2367 gen_cast(&int_type);
2369 gv_dup();
2370 vpushi(31);
2371 gen_op(TOK_SAR);
2373 /* patch second register */
2374 vtop[-1].r2 = vtop->r;
2375 vpop();
2377 #else
2378 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2379 (dbt & VT_BTYPE) == VT_PTR ||
2380 (dbt & VT_BTYPE) == VT_FUNC) {
2381 if ((sbt & VT_BTYPE) != VT_LLONG &&
2382 (sbt & VT_BTYPE) != VT_PTR &&
2383 (sbt & VT_BTYPE) != VT_FUNC) {
2384 /* need to convert from 32bit to 64bit */
2385 gv(RC_INT);
2386 if (sbt != (VT_INT | VT_UNSIGNED)) {
2387 #if defined(TCC_TARGET_ARM64)
2388 gen_cvt_sxtw();
2389 #elif defined(TCC_TARGET_X86_64)
2390 int r = gv(RC_INT);
2391 /* x86_64 specific: movslq */
2392 o(0x6348);
2393 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2394 #else
2395 #error
2396 #endif
2399 #endif
2400 } else if (dbt == VT_BOOL) {
2401 /* scalar to bool */
2402 vpushi(0);
2403 gen_op(TOK_NE);
2404 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2405 (dbt & VT_BTYPE) == VT_SHORT) {
2406 if (sbt == VT_PTR) {
2407 vtop->type.t = VT_INT;
2408 tcc_warning("nonportable conversion from pointer to char/short");
2410 force_charshort_cast(dbt);
2411 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2412 } else if ((dbt & VT_BTYPE) == VT_INT) {
2413 /* scalar to int */
2414 if ((sbt & VT_BTYPE) == VT_LLONG) {
2415 /* from long long: just take low order word */
2416 lexpand();
2417 vpop();
2419 /* if lvalue and single word type, nothing to do because
2420 the lvalue already contains the real type size (see
2421 VT_LVAL_xxx constants) */
2422 #endif
2425 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2426 /* if we are casting between pointer types,
2427 we must update the VT_LVAL_xxx size */
2428 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2429 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2431 vtop->type = *type;
2434 /* return type size as known at compile time. Put alignment at 'a' */
2435 ST_FUNC int type_size(CType *type, int *a)
2437 Sym *s;
2438 int bt;
2440 bt = type->t & VT_BTYPE;
2441 if (bt == VT_STRUCT) {
2442 /* struct/union */
2443 s = type->ref;
2444 *a = s->r;
2445 return s->c;
2446 } else if (bt == VT_PTR) {
2447 if (type->t & VT_ARRAY) {
2448 int ts;
2450 s = type->ref;
2451 ts = type_size(&s->type, a);
2453 if (ts < 0 && s->c < 0)
2454 ts = -ts;
2456 return ts * s->c;
2457 } else {
2458 *a = PTR_SIZE;
2459 return PTR_SIZE;
2461 } else if (bt == VT_LDOUBLE) {
2462 *a = LDOUBLE_ALIGN;
2463 return LDOUBLE_SIZE;
2464 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2465 #ifdef TCC_TARGET_I386
2466 #ifdef TCC_TARGET_PE
2467 *a = 8;
2468 #else
2469 *a = 4;
2470 #endif
2471 #elif defined(TCC_TARGET_ARM)
2472 #ifdef TCC_ARM_EABI
2473 *a = 8;
2474 #else
2475 *a = 4;
2476 #endif
2477 #else
2478 *a = 8;
2479 #endif
2480 return 8;
2481 } else if (bt == VT_INT || bt == VT_FLOAT) {
2482 *a = 4;
2483 return 4;
2484 } else if (bt == VT_SHORT) {
2485 *a = 2;
2486 return 2;
2487 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2488 *a = 8;
2489 return 16;
2490 } else if (bt == VT_ENUM) {
2491 *a = 4;
2492 /* Enums might be incomplete, so don't just return '4' here. */
2493 return type->ref->c;
2494 } else {
2495 /* char, void, function, _Bool */
2496 *a = 1;
2497 return 1;
2501 /* push type size as known at runtime time on top of value stack. Put
2502 alignment at 'a' */
2503 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2505 if (type->t & VT_VLA) {
2506 type_size(&type->ref->type, a);
2507 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2508 } else {
2509 vpushi(type_size(type, a));
2513 static void vla_sp_restore(void) {
2514 if (vlas_in_scope) {
2515 gen_vla_sp_restore(vla_sp_loc);
2519 static void vla_sp_restore_root(void) {
2520 if (vlas_in_scope) {
2521 gen_vla_sp_restore(vla_sp_root_loc);
2525 /* return the pointed type of t */
2526 static inline CType *pointed_type(CType *type)
2528 return &type->ref->type;
2531 /* modify type so that its it is a pointer to type. */
2532 ST_FUNC void mk_pointer(CType *type)
2534 Sym *s;
2535 s = sym_push(SYM_FIELD, type, 0, -1);
2536 type->t = VT_PTR | (type->t & ~VT_TYPE);
2537 type->ref = s;
2540 /* compare function types. OLD functions match any new functions */
2541 static int is_compatible_func(CType *type1, CType *type2)
2543 Sym *s1, *s2;
2545 s1 = type1->ref;
2546 s2 = type2->ref;
2547 if (!is_compatible_types(&s1->type, &s2->type))
2548 return 0;
2549 /* check func_call */
2550 if (s1->a.func_call != s2->a.func_call)
2551 return 0;
2552 /* XXX: not complete */
2553 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2554 return 1;
2555 if (s1->c != s2->c)
2556 return 0;
2557 while (s1 != NULL) {
2558 if (s2 == NULL)
2559 return 0;
2560 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2561 return 0;
2562 s1 = s1->next;
2563 s2 = s2->next;
2565 if (s2)
2566 return 0;
2567 return 1;
2570 /* return true if type1 and type2 are the same. If unqualified is
2571 true, qualifiers on the types are ignored.
2573 - enums are not checked as gcc __builtin_types_compatible_p ()
2575 static int compare_types(CType *type1, CType *type2, int unqualified)
2577 int bt1, t1, t2;
2579 t1 = type1->t & VT_TYPE;
2580 t2 = type2->t & VT_TYPE;
2581 if (unqualified) {
2582 /* strip qualifiers before comparing */
2583 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2584 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2586 /* Default Vs explicit signedness only matters for char */
2587 if ((t1 & VT_BTYPE) != VT_BYTE) {
2588 t1 &= ~VT_DEFSIGN;
2589 t2 &= ~VT_DEFSIGN;
2591 /* An enum is compatible with (unsigned) int. Ideally we would
2592 store the enums signedness in type->ref.a.<some_bit> and
2593 only accept unsigned enums with unsigned int and vice versa.
2594 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2595 from pointer target types, so we can't add it here either. */
2596 if ((t1 & VT_BTYPE) == VT_ENUM) {
2597 t1 = VT_INT;
2598 if (type1->ref->a.unsigned_enum)
2599 t1 |= VT_UNSIGNED;
2601 if ((t2 & VT_BTYPE) == VT_ENUM) {
2602 t2 = VT_INT;
2603 if (type2->ref->a.unsigned_enum)
2604 t2 |= VT_UNSIGNED;
2606 /* XXX: bitfields ? */
2607 if (t1 != t2)
2608 return 0;
2609 /* test more complicated cases */
2610 bt1 = t1 & VT_BTYPE;
2611 if (bt1 == VT_PTR) {
2612 type1 = pointed_type(type1);
2613 type2 = pointed_type(type2);
2614 return is_compatible_types(type1, type2);
2615 } else if (bt1 == VT_STRUCT) {
2616 return (type1->ref == type2->ref);
2617 } else if (bt1 == VT_FUNC) {
2618 return is_compatible_func(type1, type2);
2619 } else {
2620 return 1;
2624 /* return true if type1 and type2 are exactly the same (including
2625 qualifiers).
2627 static int is_compatible_types(CType *type1, CType *type2)
2629 return compare_types(type1,type2,0);
2632 /* return true if type1 and type2 are the same (ignoring qualifiers).
2634 static int is_compatible_parameter_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,1);
2639 /* print a type. If 'varstr' is not NULL, then the variable is also
2640 printed in the type */
2641 /* XXX: union */
2642 /* XXX: add array and function pointers */
2643 static void type_to_str(char *buf, int buf_size,
2644 CType *type, const char *varstr)
2646 int bt, v, t;
2647 Sym *s, *sa;
2648 char buf1[256];
2649 const char *tstr;
2651 t = type->t;
2652 bt = t & VT_BTYPE;
2653 buf[0] = '\0';
2654 if (t & VT_CONSTANT)
2655 pstrcat(buf, buf_size, "const ");
2656 if (t & VT_VOLATILE)
2657 pstrcat(buf, buf_size, "volatile ");
2658 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2659 pstrcat(buf, buf_size, "unsigned ");
2660 else if (t & VT_DEFSIGN)
2661 pstrcat(buf, buf_size, "signed ");
2662 if (t & VT_EXTERN)
2663 pstrcat(buf, buf_size, "extern ");
2664 if (t & VT_STATIC)
2665 pstrcat(buf, buf_size, "static ");
2666 if (t & VT_TYPEDEF)
2667 pstrcat(buf, buf_size, "typedef ");
2668 if (t & VT_INLINE)
2669 pstrcat(buf, buf_size, "inline ");
2670 buf_size -= strlen(buf);
2671 buf += strlen(buf);
2672 switch(bt) {
2673 case VT_VOID:
2674 tstr = "void";
2675 goto add_tstr;
2676 case VT_BOOL:
2677 tstr = "_Bool";
2678 goto add_tstr;
2679 case VT_BYTE:
2680 tstr = "char";
2681 goto add_tstr;
2682 case VT_SHORT:
2683 tstr = "short";
2684 goto add_tstr;
2685 case VT_INT:
2686 tstr = "int";
2687 goto add_tstr;
2688 case VT_LONG:
2689 tstr = "long";
2690 goto add_tstr;
2691 case VT_LLONG:
2692 tstr = "long long";
2693 goto add_tstr;
2694 case VT_FLOAT:
2695 tstr = "float";
2696 goto add_tstr;
2697 case VT_DOUBLE:
2698 tstr = "double";
2699 goto add_tstr;
2700 case VT_LDOUBLE:
2701 tstr = "long double";
2702 add_tstr:
2703 pstrcat(buf, buf_size, tstr);
2704 break;
2705 case VT_ENUM:
2706 case VT_STRUCT:
2707 if (bt == VT_STRUCT)
2708 tstr = "struct ";
2709 else
2710 tstr = "enum ";
2711 pstrcat(buf, buf_size, tstr);
2712 v = type->ref->v & ~SYM_STRUCT;
2713 if (v >= SYM_FIRST_ANOM)
2714 pstrcat(buf, buf_size, "<anonymous>");
2715 else
2716 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2717 break;
2718 case VT_FUNC:
2719 s = type->ref;
2720 type_to_str(buf, buf_size, &s->type, varstr);
2721 pstrcat(buf, buf_size, "(");
2722 sa = s->next;
2723 while (sa != NULL) {
2724 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2725 pstrcat(buf, buf_size, buf1);
2726 sa = sa->next;
2727 if (sa)
2728 pstrcat(buf, buf_size, ", ");
2730 pstrcat(buf, buf_size, ")");
2731 goto no_var;
2732 case VT_PTR:
2733 s = type->ref;
2734 if (t & VT_ARRAY) {
2735 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2736 type_to_str(buf, buf_size, &s->type, buf1);
2737 goto no_var;
2739 pstrcpy(buf1, sizeof(buf1), "*");
2740 if (t & VT_CONSTANT)
2741 pstrcat(buf1, buf_size, "const ");
2742 if (t & VT_VOLATILE)
2743 pstrcat(buf1, buf_size, "volatile ");
2744 if (varstr)
2745 pstrcat(buf1, sizeof(buf1), varstr);
2746 type_to_str(buf, buf_size, &s->type, buf1);
2747 goto no_var;
2749 if (varstr) {
2750 pstrcat(buf, buf_size, " ");
2751 pstrcat(buf, buf_size, varstr);
2753 no_var: ;
2756 /* verify type compatibility to store vtop in 'dt' type, and generate
2757 casts if needed. */
2758 static void gen_assign_cast(CType *dt)
2760 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2761 char buf1[256], buf2[256];
2762 int dbt, sbt;
2764 st = &vtop->type; /* source type */
2765 dbt = dt->t & VT_BTYPE;
2766 sbt = st->t & VT_BTYPE;
2767 if (sbt == VT_VOID || dbt == VT_VOID) {
2768 if (sbt == VT_VOID && dbt == VT_VOID)
2769 ; /*
2770 It is Ok if both are void
2771 A test program:
2772 void func1() {}
2773 void func2() {
2774 return func1();
2776 gcc accepts this program
2778 else
2779 tcc_error("cannot cast from/to void");
2781 if (dt->t & VT_CONSTANT)
2782 tcc_warning("assignment of read-only location");
2783 switch(dbt) {
2784 case VT_PTR:
2785 /* special cases for pointers */
2786 /* '0' can also be a pointer */
2787 if (is_null_pointer(vtop))
2788 goto type_ok;
2789 /* accept implicit pointer to integer cast with warning */
2790 if (is_integer_btype(sbt)) {
2791 tcc_warning("assignment makes pointer from integer without a cast");
2792 goto type_ok;
2794 type1 = pointed_type(dt);
2795 /* a function is implicitely a function pointer */
2796 if (sbt == VT_FUNC) {
2797 if ((type1->t & VT_BTYPE) != VT_VOID &&
2798 !is_compatible_types(pointed_type(dt), st))
2799 tcc_warning("assignment from incompatible pointer type");
2800 goto type_ok;
2802 if (sbt != VT_PTR)
2803 goto error;
2804 type2 = pointed_type(st);
2805 if ((type1->t & VT_BTYPE) == VT_VOID ||
2806 (type2->t & VT_BTYPE) == VT_VOID) {
2807 /* void * can match anything */
2808 } else {
2809 /* exact type match, except for qualifiers */
2810 tmp_type1 = *type1;
2811 tmp_type2 = *type2;
2812 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2813 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2814 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2815 /* Like GCC don't warn by default for merely changes
2816 in pointer target signedness. Do warn for different
2817 base types, though, in particular for unsigned enums
2818 and signed int targets. */
2819 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2820 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2821 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2823 else
2824 tcc_warning("assignment from incompatible pointer type");
2827 /* check const and volatile */
2828 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2829 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2830 tcc_warning("assignment discards qualifiers from pointer target type");
2831 break;
2832 case VT_BYTE:
2833 case VT_SHORT:
2834 case VT_INT:
2835 case VT_LLONG:
2836 if (sbt == VT_PTR || sbt == VT_FUNC) {
2837 tcc_warning("assignment makes integer from pointer without a cast");
2838 } else if (sbt == VT_STRUCT) {
2839 goto case_VT_STRUCT;
2841 /* XXX: more tests */
2842 break;
2843 case VT_STRUCT:
2844 case_VT_STRUCT:
2845 tmp_type1 = *dt;
2846 tmp_type2 = *st;
2847 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2848 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2849 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2850 error:
2851 type_to_str(buf1, sizeof(buf1), st, NULL);
2852 type_to_str(buf2, sizeof(buf2), dt, NULL);
2853 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2855 break;
2857 type_ok:
2858 gen_cast(dt);
2861 /* store vtop in lvalue pushed on stack */
2862 ST_FUNC void vstore(void)
2864 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2866 ft = vtop[-1].type.t;
2867 sbt = vtop->type.t & VT_BTYPE;
2868 dbt = ft & VT_BTYPE;
2869 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2870 (sbt == VT_INT && dbt == VT_SHORT))
2871 && !(vtop->type.t & VT_BITFIELD)) {
2872 /* optimize char/short casts */
2873 delayed_cast = VT_MUSTCAST;
2874 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2875 ((1 << VT_STRUCT_SHIFT) - 1));
2876 /* XXX: factorize */
2877 if (ft & VT_CONSTANT)
2878 tcc_warning("assignment of read-only location");
2879 } else {
2880 delayed_cast = 0;
2881 if (!(ft & VT_BITFIELD))
2882 gen_assign_cast(&vtop[-1].type);
2885 if (sbt == VT_STRUCT) {
2886 /* if structure, only generate pointer */
2887 /* structure assignment : generate memcpy */
2888 /* XXX: optimize if small size */
2889 size = type_size(&vtop->type, &align);
2891 /* destination */
2892 vswap();
2893 vtop->type.t = VT_PTR;
2894 gaddrof();
2896 /* address of memcpy() */
2897 #ifdef TCC_ARM_EABI
2898 if(!(align & 7))
2899 vpush_global_sym(&func_old_type, TOK_memcpy8);
2900 else if(!(align & 3))
2901 vpush_global_sym(&func_old_type, TOK_memcpy4);
2902 else
2903 #endif
2904 /* Use memmove, rather than memcpy, as dest and src may be same: */
2905 vpush_global_sym(&func_old_type, TOK_memmove);
2907 vswap();
2908 /* source */
2909 vpushv(vtop - 2);
2910 vtop->type.t = VT_PTR;
2911 gaddrof();
2912 /* type size */
2913 vpushi(size);
2914 gfunc_call(3);
2916 /* leave source on stack */
2917 } else if (ft & VT_BITFIELD) {
2918 /* bitfield store handling */
2920 /* save lvalue as expression result (example: s.b = s.a = n;) */
2921 vdup(), vtop[-1] = vtop[-2];
2923 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2924 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2925 /* remove bit field info to avoid loops */
2926 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2928 if((ft & VT_BTYPE) == VT_BOOL) {
2929 gen_cast(&vtop[-1].type);
2930 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2933 /* duplicate destination */
2934 vdup();
2935 vtop[-1] = vtop[-2];
2937 /* mask and shift source */
2938 if((ft & VT_BTYPE) != VT_BOOL) {
2939 if((ft & VT_BTYPE) == VT_LLONG) {
2940 vpushll((1ULL << bit_size) - 1ULL);
2941 } else {
2942 vpushi((1 << bit_size) - 1);
2944 gen_op('&');
2946 vpushi(bit_pos);
2947 gen_op(TOK_SHL);
2948 /* load destination, mask and or with source */
2949 vswap();
2950 if((ft & VT_BTYPE) == VT_LLONG) {
2951 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2952 } else {
2953 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2955 gen_op('&');
2956 gen_op('|');
2957 /* store result */
2958 vstore();
2959 /* ... and discard */
2960 vpop();
2962 } else {
2963 #ifdef CONFIG_TCC_BCHECK
2964 /* bound check case */
2965 if (vtop[-1].r & VT_MUSTBOUND) {
2966 vswap();
2967 gbound();
2968 vswap();
2970 #endif
2971 rc = RC_INT;
2972 if (is_float(ft)) {
2973 rc = RC_FLOAT;
2974 #ifdef TCC_TARGET_X86_64
2975 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2976 rc = RC_ST0;
2977 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2978 rc = RC_FRET;
2980 #endif
2982 r = gv(rc); /* generate value */
2983 /* if lvalue was saved on stack, must read it */
2984 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2985 SValue sv;
2986 t = get_reg(RC_INT);
2987 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2988 sv.type.t = VT_PTR;
2989 #else
2990 sv.type.t = VT_INT;
2991 #endif
2992 sv.r = VT_LOCAL | VT_LVAL;
2993 sv.c.i = vtop[-1].c.i;
2994 load(t, &sv);
2995 vtop[-1].r = t | VT_LVAL;
2997 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2998 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2999 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3000 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3001 #else
3002 if ((ft & VT_BTYPE) == VT_LLONG) {
3003 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3004 #endif
3005 vtop[-1].type.t = load_type;
3006 store(r, vtop - 1);
3007 vswap();
3008 /* convert to int to increment easily */
3009 vtop->type.t = addr_type;
3010 gaddrof();
3011 vpushi(load_size);
3012 gen_op('+');
3013 vtop->r |= VT_LVAL;
3014 vswap();
3015 vtop[-1].type.t = load_type;
3016 /* XXX: it works because r2 is spilled last ! */
3017 store(vtop->r2, vtop - 1);
3018 } else {
3019 store(r, vtop - 1);
3022 vswap();
3023 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3024 vtop->r |= delayed_cast;
3028 /* post defines POST/PRE add. c is the token ++ or -- */
3029 ST_FUNC void inc(int post, int c)
3031 test_lvalue();
3032 vdup(); /* save lvalue */
3033 if (post) {
3034 gv_dup(); /* duplicate value */
3035 vrotb(3);
3036 vrotb(3);
3038 /* add constant */
3039 vpushi(c - TOK_MID);
3040 gen_op('+');
3041 vstore(); /* store value */
3042 if (post)
3043 vpop(); /* if post op, return saved value */
3046 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3048 /* read the string */
3049 if (tok != TOK_STR)
3050 expect(msg);
3051 cstr_new(astr);
3052 while (tok == TOK_STR) {
3053 /* XXX: add \0 handling too ? */
3054 cstr_cat(astr, tokc.str.data, -1);
3055 next();
3057 cstr_ccat(astr, '\0');
3060 /* If I is >= 1 and a power of two, returns log2(i)+1.
3061 If I is 0 returns 0. */
3062 static int exact_log2p1(int i)
3064 int ret;
3065 if (!i)
3066 return 0;
3067 for (ret = 1; i >= 1 << 8; ret += 8)
3068 i >>= 8;
3069 if (i >= 1 << 4)
3070 ret += 4, i >>= 4;
3071 if (i >= 1 << 2)
3072 ret += 2, i >>= 2;
3073 if (i >= 1 << 1)
3074 ret++;
3075 return ret;
3078 /* Parse GNUC __attribute__ extension. Currently, the following
3079 extensions are recognized:
3080 - aligned(n) : set data/function alignment.
3081 - packed : force data alignment to 1
3082 - section(x) : generate data/code in this section.
3083 - unused : currently ignored, but may be used someday.
3084 - regparm(n) : pass function parameters in registers (i386 only)
3086 static void parse_attribute(AttributeDef *ad)
3088 int t, n;
3089 CString astr;
3091 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3092 next();
3093 skip('(');
3094 skip('(');
3095 while (tok != ')') {
3096 if (tok < TOK_IDENT)
3097 expect("attribute name");
3098 t = tok;
3099 next();
3100 switch(t) {
3101 case TOK_SECTION1:
3102 case TOK_SECTION2:
3103 skip('(');
3104 parse_mult_str(&astr, "section name");
3105 ad->section = find_section(tcc_state, (char *)astr.data);
3106 skip(')');
3107 cstr_free(&astr);
3108 break;
3109 case TOK_ALIAS1:
3110 case TOK_ALIAS2:
3111 skip('(');
3112 parse_mult_str(&astr, "alias(\"target\")");
3113 ad->alias_target = /* save string as token, for later */
3114 tok_alloc((char*)astr.data, astr.size-1)->tok;
3115 skip(')');
3116 cstr_free(&astr);
3117 break;
3118 case TOK_VISIBILITY1:
3119 case TOK_VISIBILITY2:
3120 skip('(');
3121 parse_mult_str(&astr,
3122 "visibility(\"default|hidden|internal|protected\")");
3123 if (!strcmp (astr.data, "default"))
3124 ad->a.visibility = STV_DEFAULT;
3125 else if (!strcmp (astr.data, "hidden"))
3126 ad->a.visibility = STV_HIDDEN;
3127 else if (!strcmp (astr.data, "internal"))
3128 ad->a.visibility = STV_INTERNAL;
3129 else if (!strcmp (astr.data, "protected"))
3130 ad->a.visibility = STV_PROTECTED;
3131 else
3132 expect("visibility(\"default|hidden|internal|protected\")");
3133 skip(')');
3134 cstr_free(&astr);
3135 break;
3136 case TOK_ALIGNED1:
3137 case TOK_ALIGNED2:
3138 if (tok == '(') {
3139 next();
3140 n = expr_const();
3141 if (n <= 0 || (n & (n - 1)) != 0)
3142 tcc_error("alignment must be a positive power of two");
3143 skip(')');
3144 } else {
3145 n = MAX_ALIGN;
3147 ad->a.aligned = exact_log2p1(n);
3148 if (n != 1 << (ad->a.aligned - 1))
3149 tcc_error("alignment of %d is larger than implemented", n);
3150 break;
3151 case TOK_PACKED1:
3152 case TOK_PACKED2:
3153 ad->a.packed = 1;
3154 break;
3155 case TOK_WEAK1:
3156 case TOK_WEAK2:
3157 ad->a.weak = 1;
3158 break;
3159 case TOK_UNUSED1:
3160 case TOK_UNUSED2:
3161 /* currently, no need to handle it because tcc does not
3162 track unused objects */
3163 break;
3164 case TOK_NORETURN1:
3165 case TOK_NORETURN2:
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3168 break;
3169 case TOK_CDECL1:
3170 case TOK_CDECL2:
3171 case TOK_CDECL3:
3172 ad->a.func_call = FUNC_CDECL;
3173 break;
3174 case TOK_STDCALL1:
3175 case TOK_STDCALL2:
3176 case TOK_STDCALL3:
3177 ad->a.func_call = FUNC_STDCALL;
3178 break;
3179 #ifdef TCC_TARGET_I386
3180 case TOK_REGPARM1:
3181 case TOK_REGPARM2:
3182 skip('(');
3183 n = expr_const();
3184 if (n > 3)
3185 n = 3;
3186 else if (n < 0)
3187 n = 0;
3188 if (n > 0)
3189 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3190 skip(')');
3191 break;
3192 case TOK_FASTCALL1:
3193 case TOK_FASTCALL2:
3194 case TOK_FASTCALL3:
3195 ad->a.func_call = FUNC_FASTCALLW;
3196 break;
3197 #endif
3198 case TOK_MODE:
3199 skip('(');
3200 switch(tok) {
3201 case TOK_MODE_DI:
3202 ad->a.mode = VT_LLONG + 1;
3203 break;
3204 case TOK_MODE_QI:
3205 ad->a.mode = VT_BYTE + 1;
3206 break;
3207 case TOK_MODE_HI:
3208 ad->a.mode = VT_SHORT + 1;
3209 break;
3210 case TOK_MODE_SI:
3211 case TOK_MODE_word:
3212 ad->a.mode = VT_INT + 1;
3213 break;
3214 default:
3215 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3216 break;
3218 next();
3219 skip(')');
3220 break;
3221 case TOK_DLLEXPORT:
3222 ad->a.func_export = 1;
3223 break;
3224 case TOK_DLLIMPORT:
3225 ad->a.func_import = 1;
3226 break;
3227 default:
3228 if (tcc_state->warn_unsupported)
3229 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3230 /* skip parameters */
3231 if (tok == '(') {
3232 int parenthesis = 0;
3233 do {
3234 if (tok == '(')
3235 parenthesis++;
3236 else if (tok == ')')
3237 parenthesis--;
3238 next();
3239 } while (parenthesis && tok != -1);
3241 break;
3243 if (tok != ',')
3244 break;
3245 next();
3247 skip(')');
3248 skip(')');
3252 static Sym * find_field (CType *type, int v)
3254 Sym *s = type->ref;
3255 v |= SYM_FIELD;
3256 while ((s = s->next) != NULL) {
3257 if ((s->v & SYM_FIELD) &&
3258 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3259 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3260 Sym *ret = find_field (&s->type, v);
3261 if (ret)
3262 return ret;
3264 if (s->v == v)
3265 break;
3267 return s;
3270 static void struct_add_offset (Sym *s, int offset)
3272 while ((s = s->next) != NULL) {
3273 if ((s->v & SYM_FIELD) &&
3274 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3275 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3276 struct_add_offset(s->type.ref, offset);
3277 } else
3278 s->c += offset;
3282 static void struct_layout(CType *type, AttributeDef *ad)
3284 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3285 int pcc = !tcc_state->ms_bitfields;
3286 Sym *f;
3287 if (ad->a.aligned)
3288 maxalign = 1 << (ad->a.aligned - 1);
3289 else
3290 maxalign = 1;
3291 offset = 0;
3292 c = 0;
3293 bit_pos = 0;
3294 prevbt = VT_STRUCT; /* make it never match */
3295 prev_bit_size = 0;
3296 for (f = type->ref->next; f; f = f->next) {
3297 int typealign, bit_size;
3298 int size = type_size(&f->type, &typealign);
3299 if (f->type.t & VT_BITFIELD)
3300 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3301 else
3302 bit_size = -1;
3303 if (bit_size == 0 && pcc) {
3304 /* Zero-width bit-fields in PCC mode aren't affected
3305 by any packing (attribute or pragma). */
3306 align = typealign;
3307 } else if (f->r > 1) {
3308 align = f->r;
3309 } else if (ad->a.packed || f->r == 1) {
3310 align = 1;
3311 /* Packed fields or packed records don't let the base type
3312 influence the records type alignment. */
3313 typealign = 1;
3314 } else {
3315 align = typealign;
3317 if (type->ref->type.t != TOK_STRUCT) {
3318 if (pcc && bit_size >= 0)
3319 size = (bit_size + 7) >> 3;
3320 /* Bit position is already zero from our caller. */
3321 offset = 0;
3322 if (size > c)
3323 c = size;
3324 } else if (bit_size < 0) {
3325 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3326 prevbt = VT_STRUCT;
3327 prev_bit_size = 0;
3328 c = (c + addbytes + align - 1) & -align;
3329 offset = c;
3330 if (size > 0)
3331 c += size;
3332 bit_pos = 0;
3333 } else {
3334 /* A bit-field. Layout is more complicated. There are two
3335 options TCC implements: PCC compatible and MS compatible
3336 (PCC compatible is what GCC uses for almost all targets).
3337 In PCC layout the overall size of the struct (in c) is
3338 _excluding_ the current run of bit-fields (that is,
3339 there's at least additional bit_pos bits after c). In
3340 MS layout c does include the current run of bit-fields.
3342 This matters for calculating the natural alignment buckets
3343 in PCC mode. */
3345 /* 'align' will be used to influence records alignment,
3346 so it's the max of specified and type alignment, except
3347 in certain cases that depend on the mode. */
3348 if (align < typealign)
3349 align = typealign;
3350 if (pcc) {
3351 /* In PCC layout a non-packed bit-field is placed adjacent
3352 to the preceding bit-fields, except if it would overflow
3353 its container (depending on base type) or it's a zero-width
3354 bit-field. Packed non-zero-width bit-fields always are
3355 placed adjacent. */
3356 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3357 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3358 if (bit_size == 0 ||
3359 ((typealign != 1 || size == 1) &&
3360 (ofs2 / (typealign * 8)) > (size/typealign))) {
3361 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3362 bit_pos = 0;
3363 } else while (bit_pos + bit_size > size * 8) {
3364 c += size;
3365 bit_pos -= size * 8;
3367 offset = c;
3368 /* In PCC layout named bit-fields influence the alignment
3369 of the containing struct using the base types alignment,
3370 except for packed fields (which here have correct
3371 align/typealign). */
3372 if ((f->v & SYM_FIRST_ANOM))
3373 align = 1;
3374 } else {
3375 bt = f->type.t & VT_BTYPE;
3376 if ((bit_pos + bit_size > size * 8) ||
3377 (bit_size > 0) == (bt != prevbt)) {
3378 c = (c + typealign - 1) & -typealign;
3379 offset = c;
3380 bit_pos = 0;
3381 /* In MS bitfield mode a bit-field run always uses
3382 at least as many bits as the underlying type.
3383 To start a new run it's also required that this
3384 or the last bit-field had non-zero width. */
3385 if (bit_size || prev_bit_size)
3386 c += size;
3388 /* In MS layout the records alignment is normally
3389 influenced by the field, except for a zero-width
3390 field at the start of a run (but by further zero-width
3391 fields it is again). */
3392 if (bit_size == 0 && prevbt != bt)
3393 align = 1;
3394 prevbt = bt;
3395 prev_bit_size = bit_size;
3397 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3398 | (bit_pos << VT_STRUCT_SHIFT);
3399 bit_pos += bit_size;
3400 if (pcc && bit_pos >= size * 8) {
3401 c += size;
3402 bit_pos -= size * 8;
3405 if (align > maxalign)
3406 maxalign = align;
3407 #if 0
3408 printf("set field %s offset=%d c=%d",
3409 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3410 if (f->type.t & VT_BITFIELD) {
3411 printf(" pos=%d size=%d",
3412 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3413 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3415 printf("\n");
3416 #endif
3418 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3419 Sym *ass;
3420 /* An anonymous struct/union. Adjust member offsets
3421 to reflect the real offset of our containing struct.
3422 Also set the offset of this anon member inside
3423 the outer struct to be zero. Via this it
3424 works when accessing the field offset directly
3425 (from base object), as well as when recursing
3426 members in initializer handling. */
3427 int v2 = f->type.ref->v;
3428 if (!(v2 & SYM_FIELD) &&
3429 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3430 Sym **pps;
3431 /* This happens only with MS extensions. The
3432 anon member has a named struct type, so it
3433 potentially is shared with other references.
3434 We need to unshare members so we can modify
3435 them. */
3436 ass = f->type.ref;
3437 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3438 &f->type.ref->type, 0,
3439 f->type.ref->c);
3440 pps = &f->type.ref->next;
3441 while ((ass = ass->next) != NULL) {
3442 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3443 pps = &((*pps)->next);
3445 *pps = NULL;
3447 struct_add_offset(f->type.ref, offset);
3448 f->c = 0;
3449 } else {
3450 f->c = offset;
3453 f->r = 0;
3455 /* store size and alignment */
3456 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3457 + maxalign - 1) & -maxalign;
3458 type->ref->r = maxalign;
3461 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3462 static void struct_decl(CType *type, AttributeDef *ad, int u)
3464 int a, v, size, align, flexible, alignoverride;
3465 long c;
3466 int bit_size, bsize, bt;
3467 Sym *s, *ss, **ps;
3468 AttributeDef ad1;
3469 CType type1, btype;
3471 a = tok; /* save decl type */
3472 next();
3473 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3474 parse_attribute(ad);
3475 if (tok != '{') {
3476 v = tok;
3477 next();
3478 /* struct already defined ? return it */
3479 if (v < TOK_IDENT)
3480 expect("struct/union/enum name");
3481 s = struct_find(v);
3482 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3483 if (s->type.t != a)
3484 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3485 goto do_decl;
3487 } else {
3488 v = anon_sym++;
3490 /* Record the original enum/struct/union token. */
3491 type1.t = a;
3492 type1.ref = NULL;
3493 /* we put an undefined size for struct/union */
3494 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3495 s->r = 0; /* default alignment is zero as gcc */
3496 /* put struct/union/enum name in type */
3497 do_decl:
3498 type->t = u;
3499 type->ref = s;
3501 if (tok == '{') {
3502 next();
3503 if (s->c != -1)
3504 tcc_error("struct/union/enum already defined");
3505 /* cannot be empty */
3506 c = 0;
3507 /* non empty enums are not allowed */
3508 if (a == TOK_ENUM) {
3509 int seen_neg = 0;
3510 int seen_wide = 0;
3511 for(;;) {
3512 CType *t = &int_type;
3513 v = tok;
3514 if (v < TOK_UIDENT)
3515 expect("identifier");
3516 ss = sym_find(v);
3517 if (ss && !local_stack)
3518 tcc_error("redefinition of enumerator '%s'",
3519 get_tok_str(v, NULL));
3520 next();
3521 if (tok == '=') {
3522 next();
3523 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3524 c = expr_const64();
3525 #else
3526 /* We really want to support long long enums
3527 on i386 as well, but the Sym structure only
3528 holds a 'long' for associated constants,
3529 and enlarging it would bump its size (no
3530 available padding). So punt for now. */
3531 c = expr_const();
3532 #endif
3534 if (c < 0)
3535 seen_neg = 1;
3536 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3537 seen_wide = 1, t = &size_type;
3538 /* enum symbols have static storage */
3539 ss = sym_push(v, t, VT_CONST, c);
3540 ss->type.t |= VT_STATIC;
3541 if (tok != ',')
3542 break;
3543 next();
3544 c++;
3545 /* NOTE: we accept a trailing comma */
3546 if (tok == '}')
3547 break;
3549 if (!seen_neg)
3550 s->a.unsigned_enum = 1;
3551 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3552 skip('}');
3553 } else {
3554 ps = &s->next;
3555 flexible = 0;
3556 while (tok != '}') {
3557 if (!parse_btype(&btype, &ad1)) {
3558 skip(';');
3559 continue;
3561 while (1) {
3562 if (flexible)
3563 tcc_error("flexible array member '%s' not at the end of struct",
3564 get_tok_str(v, NULL));
3565 bit_size = -1;
3566 v = 0;
3567 type1 = btype;
3568 if (tok != ':') {
3569 if (tok != ';')
3570 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3571 if (v == 0) {
3572 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3573 expect("identifier");
3574 else {
3575 int v = btype.ref->v;
3576 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3577 if (tcc_state->ms_extensions == 0)
3578 expect("identifier");
3582 if (type_size(&type1, &align) < 0) {
3583 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3584 flexible = 1;
3585 else
3586 tcc_error("field '%s' has incomplete type",
3587 get_tok_str(v, NULL));
3589 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3590 (type1.t & VT_STORAGE))
3591 tcc_error("invalid type for '%s'",
3592 get_tok_str(v, NULL));
3594 if (tok == ':') {
3595 next();
3596 bit_size = expr_const();
3597 /* XXX: handle v = 0 case for messages */
3598 if (bit_size < 0)
3599 tcc_error("negative width in bit-field '%s'",
3600 get_tok_str(v, NULL));
3601 if (v && bit_size == 0)
3602 tcc_error("zero width for bit-field '%s'",
3603 get_tok_str(v, NULL));
3604 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3605 parse_attribute(&ad1);
3607 size = type_size(&type1, &align);
3608 /* Only remember non-default alignment. */
3609 alignoverride = 0;
3610 if (ad1.a.aligned) {
3611 int speca = 1 << (ad1.a.aligned - 1);
3612 alignoverride = speca;
3613 } else if (ad1.a.packed || ad->a.packed) {
3614 alignoverride = 1;
3615 } else if (*tcc_state->pack_stack_ptr) {
3616 if (align > *tcc_state->pack_stack_ptr)
3617 alignoverride = *tcc_state->pack_stack_ptr;
3619 if (bit_size >= 0) {
3620 bt = type1.t & VT_BTYPE;
3621 if (bt != VT_INT &&
3622 bt != VT_BYTE &&
3623 bt != VT_SHORT &&
3624 bt != VT_BOOL &&
3625 bt != VT_ENUM &&
3626 bt != VT_LLONG)
3627 tcc_error("bitfields must have scalar type");
3628 bsize = size * 8;
3629 if (bit_size > bsize) {
3630 tcc_error("width of '%s' exceeds its type",
3631 get_tok_str(v, NULL));
3632 } else if (bit_size == bsize) {
3633 /* no need for bit fields */
3635 } else {
3636 type1.t |= VT_BITFIELD |
3637 (0 << VT_STRUCT_SHIFT) |
3638 (bit_size << (VT_STRUCT_SHIFT + 6));
3641 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3642 /* Remember we've seen a real field to check
3643 for placement of flexible array member. */
3644 c = 1;
3646 /* If member is a struct or bit-field, enforce
3647 placing into the struct (as anonymous). */
3648 if (v == 0 &&
3649 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3650 bit_size >= 0)) {
3651 v = anon_sym++;
3653 if (v) {
3654 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3655 *ps = ss;
3656 ps = &ss->next;
3658 if (tok == ';' || tok == TOK_EOF)
3659 break;
3660 skip(',');
3662 skip(';');
3664 skip('}');
3665 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3666 parse_attribute(ad);
3667 struct_layout(type, ad);
3672 /* return 1 if basic type is a type size (short, long, long long) */
3673 ST_FUNC int is_btype_size(int bt)
3675 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3678 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3679 are added to the element type, copied because it could be a typedef. */
3680 static void parse_btype_qualify(CType *type, int qualifiers)
3682 while (type->t & VT_ARRAY) {
3683 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3684 type = &type->ref->type;
3686 type->t |= qualifiers;
3689 /* return 0 if no type declaration. otherwise, return the basic type
3690 and skip it.
3692 static int parse_btype(CType *type, AttributeDef *ad)
3694 int t, u, bt_size, complete, type_found, typespec_found, g;
3695 Sym *s;
3696 CType type1;
3698 memset(ad, 0, sizeof(AttributeDef));
3699 complete = 0;
3700 type_found = 0;
3701 typespec_found = 0;
3702 t = 0;
3703 while(1) {
3704 switch(tok) {
3705 case TOK_EXTENSION:
3706 /* currently, we really ignore extension */
3707 next();
3708 continue;
3710 /* basic types */
3711 case TOK_CHAR:
3712 u = VT_BYTE;
3713 basic_type:
3714 next();
3715 basic_type1:
3716 if (complete)
3717 tcc_error("too many basic types");
3718 t |= u;
3719 bt_size = is_btype_size (u & VT_BTYPE);
3720 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3721 complete = 1;
3722 typespec_found = 1;
3723 break;
3724 case TOK_VOID:
3725 u = VT_VOID;
3726 goto basic_type;
3727 case TOK_SHORT:
3728 u = VT_SHORT;
3729 goto basic_type;
3730 case TOK_INT:
3731 u = VT_INT;
3732 goto basic_type;
3733 case TOK_LONG:
3734 next();
3735 if ((t & VT_BTYPE) == VT_DOUBLE) {
3736 #ifndef TCC_TARGET_PE
3737 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3738 #endif
3739 } else if ((t & VT_BTYPE) == VT_LONG) {
3740 t = (t & ~VT_BTYPE) | VT_LLONG;
3741 } else {
3742 u = VT_LONG;
3743 goto basic_type1;
3745 break;
3746 #ifdef TCC_TARGET_ARM64
3747 case TOK_UINT128:
3748 /* GCC's __uint128_t appears in some Linux header files. Make it a
3749 synonym for long double to get the size and alignment right. */
3750 u = VT_LDOUBLE;
3751 goto basic_type;
3752 #endif
3753 case TOK_BOOL:
3754 u = VT_BOOL;
3755 goto basic_type;
3756 case TOK_FLOAT:
3757 u = VT_FLOAT;
3758 goto basic_type;
3759 case TOK_DOUBLE:
3760 next();
3761 if ((t & VT_BTYPE) == VT_LONG) {
3762 #ifdef TCC_TARGET_PE
3763 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3764 #else
3765 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3766 #endif
3767 } else {
3768 u = VT_DOUBLE;
3769 goto basic_type1;
3771 break;
3772 case TOK_ENUM:
3773 struct_decl(&type1, ad, VT_ENUM);
3774 basic_type2:
3775 u = type1.t;
3776 type->ref = type1.ref;
3777 goto basic_type1;
3778 case TOK_STRUCT:
3779 case TOK_UNION:
3780 struct_decl(&type1, ad, VT_STRUCT);
3781 goto basic_type2;
3783 /* type modifiers */
3784 case TOK_CONST1:
3785 case TOK_CONST2:
3786 case TOK_CONST3:
3787 type->t = t;
3788 parse_btype_qualify(type, VT_CONSTANT);
3789 t = type->t;
3790 next();
3791 break;
3792 case TOK_VOLATILE1:
3793 case TOK_VOLATILE2:
3794 case TOK_VOLATILE3:
3795 type->t = t;
3796 parse_btype_qualify(type, VT_VOLATILE);
3797 t = type->t;
3798 next();
3799 break;
3800 case TOK_SIGNED1:
3801 case TOK_SIGNED2:
3802 case TOK_SIGNED3:
3803 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3804 tcc_error("signed and unsigned modifier");
3805 typespec_found = 1;
3806 t |= VT_DEFSIGN;
3807 next();
3808 break;
3809 case TOK_REGISTER:
3810 case TOK_AUTO:
3811 case TOK_RESTRICT1:
3812 case TOK_RESTRICT2:
3813 case TOK_RESTRICT3:
3814 next();
3815 break;
3816 case TOK_UNSIGNED:
3817 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3818 tcc_error("signed and unsigned modifier");
3819 t |= VT_DEFSIGN | VT_UNSIGNED;
3820 next();
3821 typespec_found = 1;
3822 break;
3824 /* storage */
3825 case TOK_EXTERN:
3826 g = VT_EXTERN;
3827 goto storage;
3828 case TOK_STATIC:
3829 g = VT_STATIC;
3830 goto storage;
3831 case TOK_TYPEDEF:
3832 g = VT_TYPEDEF;
3833 goto storage;
3834 storage:
3835 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3836 tcc_error("multiple storage classes");
3837 t |= g;
3838 next();
3839 break;
3840 case TOK_INLINE1:
3841 case TOK_INLINE2:
3842 case TOK_INLINE3:
3843 t |= VT_INLINE;
3844 next();
3845 break;
3847 /* GNUC attribute */
3848 case TOK_ATTRIBUTE1:
3849 case TOK_ATTRIBUTE2:
3850 parse_attribute(ad);
3851 if (ad->a.mode) {
3852 u = ad->a.mode -1;
3853 t = (t & ~VT_BTYPE) | u;
3855 break;
3856 /* GNUC typeof */
3857 case TOK_TYPEOF1:
3858 case TOK_TYPEOF2:
3859 case TOK_TYPEOF3:
3860 next();
3861 parse_expr_type(&type1);
3862 /* remove all storage modifiers except typedef */
3863 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3864 goto basic_type2;
3865 default:
3866 if (typespec_found)
3867 goto the_end;
3868 s = sym_find(tok);
3869 if (!s || !(s->type.t & VT_TYPEDEF))
3870 goto the_end;
3872 type->t = ((s->type.t & ~VT_TYPEDEF) |
3873 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3874 type->ref = s->type.ref;
3875 if (t & (VT_CONSTANT | VT_VOLATILE))
3876 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3877 t = type->t;
3879 if (s->r) {
3880 /* get attributes from typedef */
3881 if (0 == ad->a.aligned)
3882 ad->a.aligned = s->a.aligned;
3883 if (0 == ad->a.func_call)
3884 ad->a.func_call = s->a.func_call;
3885 ad->a.packed |= s->a.packed;
3887 next();
3888 typespec_found = 1;
3889 break;
3891 type_found = 1;
3893 the_end:
3894 if (tcc_state->char_is_unsigned) {
3895 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3896 t |= VT_UNSIGNED;
3899 /* long is never used as type */
3900 if ((t & VT_BTYPE) == VT_LONG)
3901 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3902 defined TCC_TARGET_PE
3903 t = (t & ~VT_BTYPE) | VT_INT;
3904 #else
3905 t = (t & ~VT_BTYPE) | VT_LLONG;
3906 #endif
3907 type->t = t;
3908 return type_found;
3911 /* convert a function parameter type (array to pointer and function to
3912 function pointer) */
3913 static inline void convert_parameter_type(CType *pt)
3915 /* remove const and volatile qualifiers (XXX: const could be used
3916 to indicate a const function parameter */
3917 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3918 /* array must be transformed to pointer according to ANSI C */
3919 pt->t &= ~VT_ARRAY;
3920 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3921 mk_pointer(pt);
3925 ST_FUNC void parse_asm_str(CString *astr)
3927 skip('(');
3928 parse_mult_str(astr, "string constant");
3931 /* Parse an asm label and return the token */
3932 static int asm_label_instr(void)
3934 int v;
3935 CString astr;
3937 next();
3938 parse_asm_str(&astr);
3939 skip(')');
3940 #ifdef ASM_DEBUG
3941 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3942 #endif
3943 v = tok_alloc(astr.data, astr.size - 1)->tok;
3944 cstr_free(&astr);
3945 return v;
3948 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
3950 int n, l, t1, arg_size, align;
3951 Sym **plast, *s, *first;
3952 AttributeDef ad1;
3953 CType pt;
3955 if (tok == '(') {
3956 /* function type, or recursive declarator (return if so) */
3957 next();
3958 if (td && !(td & TYPE_ABSTRACT))
3959 return 0;
3960 if (tok == ')')
3961 l = 0;
3962 else if (parse_btype(&pt, &ad1))
3963 l = FUNC_NEW;
3964 else if (td)
3965 return 0;
3966 else
3967 l = FUNC_OLD;
3968 first = NULL;
3969 plast = &first;
3970 arg_size = 0;
3971 if (l) {
3972 for(;;) {
3973 /* read param name and compute offset */
3974 if (l != FUNC_OLD) {
3975 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3976 break;
3977 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3978 if ((pt.t & VT_BTYPE) == VT_VOID)
3979 tcc_error("parameter declared as void");
3980 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3981 } else {
3982 n = tok;
3983 if (n < TOK_UIDENT)
3984 expect("identifier");
3985 pt.t = VT_VOID; /* invalid type */
3986 next();
3988 convert_parameter_type(&pt);
3989 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3990 *plast = s;
3991 plast = &s->next;
3992 if (tok == ')')
3993 break;
3994 skip(',');
3995 if (l == FUNC_NEW && tok == TOK_DOTS) {
3996 l = FUNC_ELLIPSIS;
3997 next();
3998 break;
4000 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4001 tcc_error("invalid type");
4003 } else
4004 /* if no parameters, then old type prototype */
4005 l = FUNC_OLD;
4006 skip(')');
4007 /* NOTE: const is ignored in returned type as it has a special
4008 meaning in gcc / C++ */
4009 type->t &= ~VT_CONSTANT;
4010 /* some ancient pre-K&R C allows a function to return an array
4011 and the array brackets to be put after the arguments, such
4012 that "int c()[]" means something like "int[] c()" */
4013 if (tok == '[') {
4014 next();
4015 skip(']'); /* only handle simple "[]" */
4016 type->t |= VT_PTR;
4018 /* we push a anonymous symbol which will contain the function prototype */
4019 ad->a.func_args = arg_size;
4020 s = sym_push(SYM_FIELD, type, 0, l);
4021 s->a = ad->a;
4022 s->next = first;
4023 type->t = VT_FUNC;
4024 type->ref = s;
4025 } else if (tok == '[') {
4026 int saved_nocode_wanted = nocode_wanted;
4027 /* array definition */
4028 next();
4029 if (tok == TOK_RESTRICT1)
4030 next();
4031 n = -1;
4032 t1 = 0;
4033 if (tok != ']') {
4034 if (!local_stack || (storage & VT_STATIC))
4035 vpushi(expr_const());
4036 else {
4037 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4038 length must always be evaluated, even under nocode_wanted,
4039 so that its size slot is initialized (e.g. under sizeof
4040 or typeof). */
4041 nocode_wanted = 0;
4042 gexpr();
4044 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4045 n = vtop->c.i;
4046 if (n < 0)
4047 tcc_error("invalid array size");
4048 } else {
4049 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4050 tcc_error("size of variable length array should be an integer");
4051 t1 = VT_VLA;
4054 skip(']');
4055 /* parse next post type */
4056 post_type(type, ad, storage, 0);
4057 if (type->t == VT_FUNC)
4058 tcc_error("declaration of an array of functions");
4059 t1 |= type->t & VT_VLA;
4061 if (t1 & VT_VLA) {
4062 loc -= type_size(&int_type, &align);
4063 loc &= -align;
4064 n = loc;
4066 vla_runtime_type_size(type, &align);
4067 gen_op('*');
4068 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4069 vswap();
4070 vstore();
4072 if (n != -1)
4073 vpop();
4074 nocode_wanted = saved_nocode_wanted;
4076 /* we push an anonymous symbol which will contain the array
4077 element type */
4078 s = sym_push(SYM_FIELD, type, 0, n);
4079 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4080 type->ref = s;
4082 return 1;
4085 /* Parse a type declarator (except basic type), and return the type
4086 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4087 expected. 'type' should contain the basic type. 'ad' is the
4088 attribute definition of the basic type. It can be modified by
4089 type_decl(). If this (possibly abstract) declarator is a pointer chain
4090 it returns the innermost pointed to type (equals *type, but is a different
4091 pointer), otherwise returns type itself, that's used for recursive calls. */
4092 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4094 CType *post, *ret;
4095 int qualifiers, storage;
4097 /* recursive type, remove storage bits first, apply them later again */
4098 storage = type->t & VT_STORAGE;
4099 type->t &= ~VT_STORAGE;
4100 post = ret = type;
4101 while (tok == '*') {
4102 qualifiers = 0;
4103 redo:
4104 next();
4105 switch(tok) {
4106 case TOK_CONST1:
4107 case TOK_CONST2:
4108 case TOK_CONST3:
4109 qualifiers |= VT_CONSTANT;
4110 goto redo;
4111 case TOK_VOLATILE1:
4112 case TOK_VOLATILE2:
4113 case TOK_VOLATILE3:
4114 qualifiers |= VT_VOLATILE;
4115 goto redo;
4116 case TOK_RESTRICT1:
4117 case TOK_RESTRICT2:
4118 case TOK_RESTRICT3:
4119 goto redo;
4120 /* XXX: clarify attribute handling */
4121 case TOK_ATTRIBUTE1:
4122 case TOK_ATTRIBUTE2:
4123 parse_attribute(ad);
4124 break;
4126 mk_pointer(type);
4127 type->t |= qualifiers;
4128 if (ret == type)
4129 /* innermost pointed to type is the one for the first derivation */
4130 ret = pointed_type(type);
4133 if (tok == '(') {
4134 /* This is possibly a parameter type list for abstract declarators
4135 ('int ()'), use post_type for testing this. */
4136 if (!post_type(type, ad, 0, td)) {
4137 /* It's not, so it's a nested declarator, and the post operations
4138 apply to the innermost pointed to type (if any). */
4139 /* XXX: this is not correct to modify 'ad' at this point, but
4140 the syntax is not clear */
4141 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4142 parse_attribute(ad);
4143 post = type_decl(type, ad, v, td);
4144 skip(')');
4146 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4147 /* type identifier */
4148 *v = tok;
4149 next();
4150 } else {
4151 if (!(td & TYPE_ABSTRACT))
4152 expect("identifier");
4153 *v = 0;
4155 post_type(post, ad, storage, 0);
4156 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4157 parse_attribute(ad);
4158 type->t |= storage;
4159 return ret;
4162 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4163 ST_FUNC int lvalue_type(int t)
4165 int bt, r;
4166 r = VT_LVAL;
4167 bt = t & VT_BTYPE;
4168 if (bt == VT_BYTE || bt == VT_BOOL)
4169 r |= VT_LVAL_BYTE;
4170 else if (bt == VT_SHORT)
4171 r |= VT_LVAL_SHORT;
4172 else
4173 return r;
4174 if (t & VT_UNSIGNED)
4175 r |= VT_LVAL_UNSIGNED;
4176 return r;
4179 /* indirection with full error checking and bound check */
4180 ST_FUNC void indir(void)
4182 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4183 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4184 return;
4185 expect("pointer");
4187 if (vtop->r & VT_LVAL)
4188 gv(RC_INT);
4189 vtop->type = *pointed_type(&vtop->type);
4190 /* Arrays and functions are never lvalues */
4191 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4192 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4193 vtop->r |= lvalue_type(vtop->type.t);
4194 /* if bound checking, the referenced pointer must be checked */
4195 #ifdef CONFIG_TCC_BCHECK
4196 if (tcc_state->do_bounds_check)
4197 vtop->r |= VT_MUSTBOUND;
4198 #endif
4202 /* pass a parameter to a function and do type checking and casting */
4203 static void gfunc_param_typed(Sym *func, Sym *arg)
4205 int func_type;
4206 CType type;
4208 func_type = func->c;
4209 if (func_type == FUNC_OLD ||
4210 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4211 /* default casting : only need to convert float to double */
4212 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4213 type.t = VT_DOUBLE;
4214 gen_cast(&type);
4215 } else if (vtop->type.t & VT_BITFIELD) {
4216 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4217 type.ref = vtop->type.ref;
4218 gen_cast(&type);
4220 } else if (arg == NULL) {
4221 tcc_error("too many arguments to function");
4222 } else {
4223 type = arg->type;
4224 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4225 gen_assign_cast(&type);
4229 /* parse an expression and return its type without any side effect.
4230 If UNRY we parse an unary expression, otherwise a full one. */
4231 static void expr_type(CType *type, int unry)
4233 nocode_wanted++;
4234 if (unry)
4235 unary();
4236 else
4237 gexpr();
4238 *type = vtop->type;
4239 vpop();
4240 nocode_wanted--;
4243 /* parse an expression of the form '(type)' or '(expr)' and return its
4244 type */
4245 static void parse_expr_type(CType *type)
4247 int n;
4248 AttributeDef ad;
4250 skip('(');
4251 if (parse_btype(type, &ad)) {
4252 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4253 } else {
4254 expr_type(type, 0);
4256 skip(')');
4259 static void parse_type(CType *type)
4261 AttributeDef ad;
4262 int n;
4264 if (!parse_btype(type, &ad)) {
4265 expect("type");
4267 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4270 static void parse_builtin_params(int nc, const char *args)
4272 char c, sep = '(';
4273 CType t;
4274 if (nc)
4275 nocode_wanted++;
4276 next();
4277 while ((c = *args++)) {
4278 skip(sep);
4279 sep = ',';
4280 switch (c) {
4281 case 'e': expr_eq(); continue;
4282 case 't': parse_type(&t); vpush(&t); continue;
4283 default: tcc_error("internal error"); break;
4286 skip(')');
4287 if (nc)
4288 nocode_wanted--;
4291 ST_FUNC void unary(void)
4293 int n, t, align, size, r, sizeof_caller;
4294 CType type;
4295 Sym *s;
4296 AttributeDef ad;
4298 sizeof_caller = in_sizeof;
4299 in_sizeof = 0;
4300 /* XXX: GCC 2.95.3 does not generate a table although it should be
4301 better here */
4302 tok_next:
4303 switch(tok) {
4304 case TOK_EXTENSION:
4305 next();
4306 goto tok_next;
4307 case TOK_CINT:
4308 case TOK_CCHAR:
4309 case TOK_LCHAR:
4310 t = VT_INT;
4311 push_tokc:
4312 type.t = t;
4313 type.ref = 0;
4314 vsetc(&type, VT_CONST, &tokc);
4315 next();
4316 break;
4317 case TOK_CUINT:
4318 t = VT_INT | VT_UNSIGNED;
4319 goto push_tokc;
4320 case TOK_CLLONG:
4321 t = VT_LLONG;
4322 goto push_tokc;
4323 case TOK_CULLONG:
4324 t = VT_LLONG | VT_UNSIGNED;
4325 goto push_tokc;
4326 case TOK_CFLOAT:
4327 t = VT_FLOAT;
4328 goto push_tokc;
4329 case TOK_CDOUBLE:
4330 t = VT_DOUBLE;
4331 goto push_tokc;
4332 case TOK_CLDOUBLE:
4333 t = VT_LDOUBLE;
4334 goto push_tokc;
4336 case TOK___FUNCTION__:
4337 if (!gnu_ext)
4338 goto tok_identifier;
4339 /* fall thru */
4340 case TOK___FUNC__:
4342 void *ptr;
4343 int len;
4344 /* special function name identifier */
4345 len = strlen(funcname) + 1;
4346 /* generate char[len] type */
4347 type.t = VT_BYTE;
4348 mk_pointer(&type);
4349 type.t |= VT_ARRAY;
4350 type.ref->c = len;
4351 vpush_ref(&type, data_section, data_section->data_offset, len);
4352 ptr = section_ptr_add(data_section, len);
4353 memcpy(ptr, funcname, len);
4354 next();
4356 break;
4357 case TOK_LSTR:
4358 #ifdef TCC_TARGET_PE
4359 t = VT_SHORT | VT_UNSIGNED;
4360 #else
4361 t = VT_INT;
4362 #endif
4363 goto str_init;
4364 case TOK_STR:
4365 /* string parsing */
4366 t = VT_BYTE;
4367 str_init:
4368 if (tcc_state->warn_write_strings)
4369 t |= VT_CONSTANT;
4370 type.t = t;
4371 mk_pointer(&type);
4372 type.t |= VT_ARRAY;
4373 memset(&ad, 0, sizeof(AttributeDef));
4374 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4375 break;
4376 case '(':
4377 next();
4378 /* cast ? */
4379 if (parse_btype(&type, &ad)) {
4380 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4381 skip(')');
4382 /* check ISOC99 compound literal */
4383 if (tok == '{') {
4384 /* data is allocated locally by default */
4385 if (global_expr)
4386 r = VT_CONST;
4387 else
4388 r = VT_LOCAL;
4389 /* all except arrays are lvalues */
4390 if (!(type.t & VT_ARRAY))
4391 r |= lvalue_type(type.t);
4392 memset(&ad, 0, sizeof(AttributeDef));
4393 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4394 } else {
4395 if (sizeof_caller) {
4396 vpush(&type);
4397 return;
4399 unary();
4400 gen_cast(&type);
4402 } else if (tok == '{') {
4403 int saved_nocode_wanted = nocode_wanted;
4404 if (const_wanted)
4405 tcc_error("expected constant");
4406 /* save all registers */
4407 save_regs(0);
4408 /* statement expression : we do not accept break/continue
4409 inside as GCC does. We do retain the nocode_wanted state,
4410 as statement expressions can't ever be entered from the
4411 outside, so any reactivation of code emission (from labels
4412 or loop heads) can be disabled again after the end of it. */
4413 block(NULL, NULL, 1);
4414 nocode_wanted = saved_nocode_wanted;
4415 skip(')');
4416 } else {
4417 gexpr();
4418 skip(')');
4420 break;
4421 case '*':
4422 next();
4423 unary();
4424 indir();
4425 break;
4426 case '&':
4427 next();
4428 unary();
4429 /* functions names must be treated as function pointers,
4430 except for unary '&' and sizeof. Since we consider that
4431 functions are not lvalues, we only have to handle it
4432 there and in function calls. */
4433 /* arrays can also be used although they are not lvalues */
4434 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4435 !(vtop->type.t & VT_ARRAY))
4436 test_lvalue();
4437 mk_pointer(&vtop->type);
4438 gaddrof();
4439 break;
4440 case '!':
4441 next();
4442 unary();
4443 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4444 CType boolean;
4445 boolean.t = VT_BOOL;
4446 gen_cast(&boolean);
4447 vtop->c.i = !vtop->c.i;
4448 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4449 vtop->c.i ^= 1;
4450 else {
4451 save_regs(1);
4452 vseti(VT_JMP, gvtst(1, 0));
4454 break;
4455 case '~':
4456 next();
4457 unary();
4458 vpushi(-1);
4459 gen_op('^');
4460 break;
4461 case '+':
4462 next();
4463 unary();
4464 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4465 tcc_error("pointer not accepted for unary plus");
4466 /* In order to force cast, we add zero, except for floating point
4467 where we really need an noop (otherwise -0.0 will be transformed
4468 into +0.0). */
4469 if (!is_float(vtop->type.t)) {
4470 vpushi(0);
4471 gen_op('+');
4473 break;
4474 case TOK_SIZEOF:
4475 case TOK_ALIGNOF1:
4476 case TOK_ALIGNOF2:
4477 t = tok;
4478 next();
4479 in_sizeof++;
4480 expr_type(&type, 1); // Perform a in_sizeof = 0;
4481 size = type_size(&type, &align);
4482 if (t == TOK_SIZEOF) {
4483 if (!(type.t & VT_VLA)) {
4484 if (size < 0)
4485 tcc_error("sizeof applied to an incomplete type");
4486 vpushs(size);
4487 } else {
4488 vla_runtime_type_size(&type, &align);
4490 } else {
4491 vpushs(align);
4493 vtop->type.t |= VT_UNSIGNED;
4494 break;
4496 case TOK_builtin_expect:
4497 /* __builtin_expect is a no-op for now */
4498 parse_builtin_params(0, "ee");
4499 vpop();
4500 break;
4501 case TOK_builtin_types_compatible_p:
4502 parse_builtin_params(0, "tt");
4503 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4504 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4505 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4506 vtop -= 2;
4507 vpushi(n);
4508 break;
4509 case TOK_builtin_choose_expr:
4511 int64_t c;
4512 next();
4513 skip('(');
4514 c = expr_const64();
4515 skip(',');
4516 if (!c) {
4517 nocode_wanted++;
4519 expr_eq();
4520 if (!c) {
4521 vpop();
4522 nocode_wanted--;
4524 skip(',');
4525 if (c) {
4526 nocode_wanted++;
4528 expr_eq();
4529 if (c) {
4530 vpop();
4531 nocode_wanted--;
4533 skip(')');
4535 break;
4536 case TOK_builtin_constant_p:
4537 parse_builtin_params(1, "e");
4538 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4539 vtop--;
4540 vpushi(n);
4541 break;
4542 case TOK_builtin_frame_address:
4543 case TOK_builtin_return_address:
4545 int tok1 = tok;
4546 int level;
4547 CType type;
4548 next();
4549 skip('(');
4550 if (tok != TOK_CINT) {
4551 tcc_error("%s only takes positive integers",
4552 tok1 == TOK_builtin_return_address ?
4553 "__builtin_return_address" :
4554 "__builtin_frame_address");
4556 level = (uint32_t)tokc.i;
4557 next();
4558 skip(')');
4559 type.t = VT_VOID;
4560 mk_pointer(&type);
4561 vset(&type, VT_LOCAL, 0); /* local frame */
4562 while (level--) {
4563 mk_pointer(&vtop->type);
4564 indir(); /* -> parent frame */
4566 if (tok1 == TOK_builtin_return_address) {
4567 // assume return address is just above frame pointer on stack
4568 vpushi(PTR_SIZE);
4569 gen_op('+');
4570 mk_pointer(&vtop->type);
4571 indir();
4574 break;
4575 #ifdef TCC_TARGET_X86_64
4576 #ifdef TCC_TARGET_PE
4577 case TOK_builtin_va_start:
4578 parse_builtin_params(0, "ee");
4579 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4580 tcc_error("__builtin_va_start expects a local variable");
4581 vtop->r &= ~VT_LVAL;
4582 vtop->type = char_pointer_type;
4583 vtop->c.i += 8;
4584 vstore();
4585 break;
4586 #else
4587 case TOK_builtin_va_arg_types:
4588 parse_builtin_params(0, "t");
4589 vpushi(classify_x86_64_va_arg(&vtop->type));
4590 vswap();
4591 vpop();
4592 break;
4593 #endif
4594 #endif
4596 #ifdef TCC_TARGET_ARM64
4597 case TOK___va_start: {
4598 parse_builtin_params(0, "ee");
4599 //xx check types
4600 gen_va_start();
4601 vpushi(0);
4602 vtop->type.t = VT_VOID;
4603 break;
4605 case TOK___va_arg: {
4606 CType type;
4607 parse_builtin_params(0, "et");
4608 type = vtop->type;
4609 vpop();
4610 //xx check types
4611 gen_va_arg(&type);
4612 vtop->type = type;
4613 break;
4615 case TOK___arm64_clear_cache: {
4616 parse_builtin_params(0, "ee");
4617 gen_clear_cache();
4618 vpushi(0);
4619 vtop->type.t = VT_VOID;
4620 break;
4622 #endif
4623 /* pre operations */
4624 case TOK_INC:
4625 case TOK_DEC:
4626 t = tok;
4627 next();
4628 unary();
4629 inc(0, t);
4630 break;
4631 case '-':
4632 next();
4633 unary();
4634 t = vtop->type.t & VT_BTYPE;
4635 if (is_float(t)) {
4636 /* In IEEE negate(x) isn't subtract(0,x), but rather
4637 subtract(-0, x). */
4638 vpush(&vtop->type);
4639 if (t == VT_FLOAT)
4640 vtop->c.f = -1.0 * 0.0;
4641 else if (t == VT_DOUBLE)
4642 vtop->c.d = -1.0 * 0.0;
4643 else
4644 vtop->c.ld = -1.0 * 0.0;
4645 } else
4646 vpushi(0);
4647 vswap();
4648 gen_op('-');
4649 break;
4650 case TOK_LAND:
4651 if (!gnu_ext)
4652 goto tok_identifier;
4653 next();
4654 /* allow to take the address of a label */
4655 if (tok < TOK_UIDENT)
4656 expect("label identifier");
4657 s = label_find(tok);
4658 if (!s) {
4659 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4660 } else {
4661 if (s->r == LABEL_DECLARED)
4662 s->r = LABEL_FORWARD;
4664 if (!s->type.t) {
4665 s->type.t = VT_VOID;
4666 mk_pointer(&s->type);
4667 s->type.t |= VT_STATIC;
4669 vpushsym(&s->type, s);
4670 next();
4671 break;
4673 // special qnan , snan and infinity values
4674 case TOK___NAN__:
4675 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4676 next();
4677 break;
4678 case TOK___SNAN__:
4679 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4680 next();
4681 break;
4682 case TOK___INF__:
4683 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4684 next();
4685 break;
4687 default:
4688 tok_identifier:
4689 t = tok;
4690 next();
4691 if (t < TOK_UIDENT)
4692 expect("identifier");
4693 s = sym_find(t);
4694 if (!s) {
4695 const char *name = get_tok_str(t, NULL);
4696 if (tok != '(')
4697 tcc_error("'%s' undeclared", name);
4698 /* for simple function calls, we tolerate undeclared
4699 external reference to int() function */
4700 if (tcc_state->warn_implicit_function_declaration
4701 #ifdef TCC_TARGET_PE
4702 /* people must be warned about using undeclared WINAPI functions
4703 (which usually start with uppercase letter) */
4704 || (name[0] >= 'A' && name[0] <= 'Z')
4705 #endif
4707 tcc_warning("implicit declaration of function '%s'", name);
4708 s = external_global_sym(t, &func_old_type, 0);
4711 r = s->r;
4712 /* A symbol that has a register is a local register variable,
4713 which starts out as VT_LOCAL value. */
4714 if ((r & VT_VALMASK) < VT_CONST)
4715 r = (r & ~VT_VALMASK) | VT_LOCAL;
4717 vset(&s->type, r, s->c);
4718 /* Point to s as backpointer (even without r&VT_SYM).
4719 Will be used by at least the x86 inline asm parser for
4720 regvars. */
4721 vtop->sym = s;
4722 if (vtop->r & VT_SYM) {
4723 vtop->c.i = 0;
4725 break;
4728 /* post operations */
4729 while (1) {
4730 if (tok == TOK_INC || tok == TOK_DEC) {
4731 inc(1, tok);
4732 next();
4733 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4734 int qualifiers;
4735 /* field */
4736 if (tok == TOK_ARROW)
4737 indir();
4738 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4739 test_lvalue();
4740 gaddrof();
4741 /* expect pointer on structure */
4742 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4743 expect("struct or union");
4744 if (tok == TOK_CDOUBLE)
4745 expect("field name");
4746 next();
4747 if (tok == TOK_CINT || tok == TOK_CUINT)
4748 expect("field name");
4749 s = find_field(&vtop->type, tok);
4750 if (!s)
4751 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4752 /* add field offset to pointer */
4753 vtop->type = char_pointer_type; /* change type to 'char *' */
4754 vpushi(s->c);
4755 gen_op('+');
4756 /* change type to field type, and set to lvalue */
4757 vtop->type = s->type;
4758 vtop->type.t |= qualifiers;
4759 /* an array is never an lvalue */
4760 if (!(vtop->type.t & VT_ARRAY)) {
4761 vtop->r |= lvalue_type(vtop->type.t);
4762 #ifdef CONFIG_TCC_BCHECK
4763 /* if bound checking, the referenced pointer must be checked */
4764 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4765 vtop->r |= VT_MUSTBOUND;
4766 #endif
4768 next();
4769 } else if (tok == '[') {
4770 next();
4771 gexpr();
4772 gen_op('+');
4773 indir();
4774 skip(']');
4775 } else if (tok == '(') {
4776 SValue ret;
4777 Sym *sa;
4778 int nb_args, ret_nregs, ret_align, regsize, variadic;
4780 /* function call */
4781 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4782 /* pointer test (no array accepted) */
4783 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4784 vtop->type = *pointed_type(&vtop->type);
4785 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4786 goto error_func;
4787 } else {
4788 error_func:
4789 expect("function pointer");
4791 } else {
4792 vtop->r &= ~VT_LVAL; /* no lvalue */
4794 /* get return type */
4795 s = vtop->type.ref;
4796 next();
4797 sa = s->next; /* first parameter */
4798 nb_args = regsize = 0;
4799 ret.r2 = VT_CONST;
4800 /* compute first implicit argument if a structure is returned */
4801 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4802 variadic = (s->c == FUNC_ELLIPSIS);
4803 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4804 &ret_align, &regsize);
4805 if (!ret_nregs) {
4806 /* get some space for the returned structure */
4807 size = type_size(&s->type, &align);
4808 #ifdef TCC_TARGET_ARM64
4809 /* On arm64, a small struct is return in registers.
4810 It is much easier to write it to memory if we know
4811 that we are allowed to write some extra bytes, so
4812 round the allocated space up to a power of 2: */
4813 if (size < 16)
4814 while (size & (size - 1))
4815 size = (size | (size - 1)) + 1;
4816 #endif
4817 loc = (loc - size) & -align;
4818 ret.type = s->type;
4819 ret.r = VT_LOCAL | VT_LVAL;
4820 /* pass it as 'int' to avoid structure arg passing
4821 problems */
4822 vseti(VT_LOCAL, loc);
4823 ret.c = vtop->c;
4824 nb_args++;
4826 } else {
4827 ret_nregs = 1;
4828 ret.type = s->type;
4831 if (ret_nregs) {
4832 /* return in register */
4833 if (is_float(ret.type.t)) {
4834 ret.r = reg_fret(ret.type.t);
4835 #ifdef TCC_TARGET_X86_64
4836 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4837 ret.r2 = REG_QRET;
4838 #endif
4839 } else {
4840 #ifndef TCC_TARGET_ARM64
4841 #ifdef TCC_TARGET_X86_64
4842 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4843 #else
4844 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4845 #endif
4846 ret.r2 = REG_LRET;
4847 #endif
4848 ret.r = REG_IRET;
4850 ret.c.i = 0;
4852 if (tok != ')') {
4853 for(;;) {
4854 expr_eq();
4855 gfunc_param_typed(s, sa);
4856 nb_args++;
4857 if (sa)
4858 sa = sa->next;
4859 if (tok == ')')
4860 break;
4861 skip(',');
4864 if (sa)
4865 tcc_error("too few arguments to function");
4866 skip(')');
4867 gfunc_call(nb_args);
4869 /* return value */
4870 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4871 vsetc(&ret.type, r, &ret.c);
4872 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4875 /* handle packed struct return */
4876 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4877 int addr, offset;
4879 size = type_size(&s->type, &align);
4880 /* We're writing whole regs often, make sure there's enough
4881 space. Assume register size is power of 2. */
4882 if (regsize > align)
4883 align = regsize;
4884 loc = (loc - size) & -align;
4885 addr = loc;
4886 offset = 0;
4887 for (;;) {
4888 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4889 vswap();
4890 vstore();
4891 vtop--;
4892 if (--ret_nregs == 0)
4893 break;
4894 offset += regsize;
4896 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4898 } else {
4899 break;
4904 ST_FUNC void expr_prod(void)
4906 int t;
4908 unary();
4909 while (tok == '*' || tok == '/' || tok == '%') {
4910 t = tok;
4911 next();
4912 unary();
4913 gen_op(t);
4917 ST_FUNC void expr_sum(void)
4919 int t;
4921 expr_prod();
4922 while (tok == '+' || tok == '-') {
4923 t = tok;
4924 next();
4925 expr_prod();
4926 gen_op(t);
4930 static void expr_shift(void)
4932 int t;
4934 expr_sum();
4935 while (tok == TOK_SHL || tok == TOK_SAR) {
4936 t = tok;
4937 next();
4938 expr_sum();
4939 gen_op(t);
4943 static void expr_cmp(void)
4945 int t;
4947 expr_shift();
4948 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4949 tok == TOK_ULT || tok == TOK_UGE) {
4950 t = tok;
4951 next();
4952 expr_shift();
4953 gen_op(t);
4957 static void expr_cmpeq(void)
4959 int t;
4961 expr_cmp();
4962 while (tok == TOK_EQ || tok == TOK_NE) {
4963 t = tok;
4964 next();
4965 expr_cmp();
4966 gen_op(t);
4970 static void expr_and(void)
4972 expr_cmpeq();
4973 while (tok == '&') {
4974 next();
4975 expr_cmpeq();
4976 gen_op('&');
4980 static void expr_xor(void)
4982 expr_and();
4983 while (tok == '^') {
4984 next();
4985 expr_and();
4986 gen_op('^');
4990 static void expr_or(void)
4992 expr_xor();
4993 while (tok == '|') {
4994 next();
4995 expr_xor();
4996 gen_op('|');
5000 static void expr_land(void)
5002 expr_or();
5003 if (tok == TOK_LAND) {
5004 int t = 0;
5005 for(;;) {
5006 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5007 CType ctb;
5008 ctb.t = VT_BOOL;
5009 gen_cast(&ctb);
5010 if (vtop->c.i) {
5011 vpop();
5012 } else {
5013 nocode_wanted++;
5014 while (tok == TOK_LAND) {
5015 next();
5016 expr_or();
5017 vpop();
5019 nocode_wanted--;
5020 if (t)
5021 gsym(t);
5022 gen_cast(&int_type);
5023 break;
5025 } else {
5026 if (!t)
5027 save_regs(1);
5028 t = gvtst(1, t);
5030 if (tok != TOK_LAND) {
5031 if (t)
5032 vseti(VT_JMPI, t);
5033 else
5034 vpushi(1);
5035 break;
5037 next();
5038 expr_or();
5043 static void expr_lor(void)
5045 expr_land();
5046 if (tok == TOK_LOR) {
5047 int t = 0;
5048 for(;;) {
5049 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5050 CType ctb;
5051 ctb.t = VT_BOOL;
5052 gen_cast(&ctb);
5053 if (!vtop->c.i) {
5054 vpop();
5055 } else {
5056 nocode_wanted++;
5057 while (tok == TOK_LOR) {
5058 next();
5059 expr_land();
5060 vpop();
5062 nocode_wanted--;
5063 if (t)
5064 gsym(t);
5065 gen_cast(&int_type);
5066 break;
5068 } else {
5069 if (!t)
5070 save_regs(1);
5071 t = gvtst(0, t);
5073 if (tok != TOK_LOR) {
5074 if (t)
5075 vseti(VT_JMP, t);
5076 else
5077 vpushi(0);
5078 break;
5080 next();
5081 expr_land();
5086 /* Assuming vtop is a value used in a conditional context
5087 (i.e. compared with zero) return 0 if it's false, 1 if
5088 true and -1 if it can't be statically determined. */
5089 static int condition_3way(void)
5091 int c = -1;
5092 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5093 (!(vtop->r & VT_SYM) ||
5094 !(vtop->sym->type.t & VT_WEAK))) {
5095 CType boolean;
5096 boolean.t = VT_BOOL;
5097 vdup();
5098 gen_cast(&boolean);
5099 c = vtop->c.i;
5100 vpop();
5102 return c;
5105 static void expr_cond(void)
5107 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5108 SValue sv;
5109 CType type, type1, type2;
5111 expr_lor();
5112 if (tok == '?') {
5113 next();
5114 c = condition_3way();
5115 g = (tok == ':' && gnu_ext);
5116 if (c < 0) {
5117 /* needed to avoid having different registers saved in
5118 each branch */
5119 if (is_float(vtop->type.t)) {
5120 rc = RC_FLOAT;
5121 #ifdef TCC_TARGET_X86_64
5122 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5123 rc = RC_ST0;
5125 #endif
5126 } else
5127 rc = RC_INT;
5128 gv(rc);
5129 save_regs(1);
5130 if (g)
5131 gv_dup();
5132 tt = gvtst(1, 0);
5134 } else {
5135 if (!g)
5136 vpop();
5137 tt = 0;
5140 if (1) {
5141 if (c == 0)
5142 nocode_wanted++;
5143 if (!g)
5144 gexpr();
5146 type1 = vtop->type;
5147 sv = *vtop; /* save value to handle it later */
5148 vtop--; /* no vpop so that FP stack is not flushed */
5149 skip(':');
5151 u = 0;
5152 if (c < 0)
5153 u = gjmp(0);
5154 gsym(tt);
5156 if (c == 0)
5157 nocode_wanted--;
5158 if (c == 1)
5159 nocode_wanted++;
5160 expr_cond();
5161 if (c == 1)
5162 nocode_wanted--;
5164 type2 = vtop->type;
5165 t1 = type1.t;
5166 bt1 = t1 & VT_BTYPE;
5167 t2 = type2.t;
5168 bt2 = t2 & VT_BTYPE;
5169 /* cast operands to correct type according to ISOC rules */
5170 if (is_float(bt1) || is_float(bt2)) {
5171 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5172 type.t = VT_LDOUBLE;
5174 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5175 type.t = VT_DOUBLE;
5176 } else {
5177 type.t = VT_FLOAT;
5179 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5180 /* cast to biggest op */
5181 type.t = VT_LLONG;
5182 /* convert to unsigned if it does not fit in a long long */
5183 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5184 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5185 type.t |= VT_UNSIGNED;
5186 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5187 /* If one is a null ptr constant the result type
5188 is the other. */
5189 if (is_null_pointer (vtop))
5190 type = type1;
5191 else if (is_null_pointer (&sv))
5192 type = type2;
5193 /* XXX: test pointer compatibility, C99 has more elaborate
5194 rules here. */
5195 else
5196 type = type1;
5197 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5198 /* XXX: test function pointer compatibility */
5199 type = bt1 == VT_FUNC ? type1 : type2;
5200 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5201 /* XXX: test structure compatibility */
5202 type = bt1 == VT_STRUCT ? type1 : type2;
5203 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5204 /* NOTE: as an extension, we accept void on only one side */
5205 type.t = VT_VOID;
5206 } else {
5207 /* integer operations */
5208 type.t = VT_INT;
5209 /* convert to unsigned if it does not fit in an integer */
5210 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5211 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5212 type.t |= VT_UNSIGNED;
5214 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5215 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5216 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5217 islv &= c < 0;
5219 /* now we convert second operand */
5220 if (c != 1) {
5221 gen_cast(&type);
5222 if (islv) {
5223 mk_pointer(&vtop->type);
5224 gaddrof();
5225 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5226 gaddrof();
5229 rc = RC_INT;
5230 if (is_float(type.t)) {
5231 rc = RC_FLOAT;
5232 #ifdef TCC_TARGET_X86_64
5233 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5234 rc = RC_ST0;
5236 #endif
5237 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5238 /* for long longs, we use fixed registers to avoid having
5239 to handle a complicated move */
5240 rc = RC_IRET;
5243 tt = r2 = 0;
5244 if (c < 0) {
5245 r2 = gv(rc);
5246 tt = gjmp(0);
5248 gsym(u);
5250 /* this is horrible, but we must also convert first
5251 operand */
5252 if (c != 0) {
5253 *vtop = sv;
5254 gen_cast(&type);
5255 if (islv) {
5256 mk_pointer(&vtop->type);
5257 gaddrof();
5258 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5259 gaddrof();
5262 if (c < 0) {
5263 r1 = gv(rc);
5264 move_reg(r2, r1, type.t);
5265 vtop->r = r2;
5266 gsym(tt);
5267 if (islv)
5268 indir();
5274 static void expr_eq(void)
5276 int t;
5278 expr_cond();
5279 if (tok == '=' ||
5280 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5281 tok == TOK_A_XOR || tok == TOK_A_OR ||
5282 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5283 test_lvalue();
5284 t = tok;
5285 next();
5286 if (t == '=') {
5287 expr_eq();
5288 } else {
5289 vdup();
5290 expr_eq();
5291 gen_op(t & 0x7f);
5293 vstore();
5297 ST_FUNC void gexpr(void)
5299 while (1) {
5300 expr_eq();
5301 if (tok != ',')
5302 break;
5303 vpop();
5304 next();
5308 /* parse a constant expression and return value in vtop. */
5309 static void expr_const1(void)
5311 const_wanted++;
5312 expr_cond();
5313 const_wanted--;
5316 /* parse an integer constant and return its value. */
5317 static inline int64_t expr_const64(void)
5319 int64_t c;
5320 expr_const1();
5321 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5322 expect("constant expression");
5323 c = vtop->c.i;
5324 vpop();
5325 return c;
5328 /* parse an integer constant and return its value.
5329 Complain if it doesn't fit 32bit (signed or unsigned). */
5330 ST_FUNC int expr_const(void)
5332 int c;
5333 int64_t wc = expr_const64();
5334 c = wc;
5335 if (c != wc && (unsigned)c != wc)
5336 tcc_error("constant exceeds 32 bit");
5337 return c;
5340 /* return the label token if current token is a label, otherwise
5341 return zero */
5342 static int is_label(void)
5344 int last_tok;
5346 /* fast test first */
5347 if (tok < TOK_UIDENT)
5348 return 0;
5349 /* no need to save tokc because tok is an identifier */
5350 last_tok = tok;
5351 next();
5352 if (tok == ':') {
5353 return last_tok;
5354 } else {
5355 unget_tok(last_tok);
5356 return 0;
5360 #ifndef TCC_TARGET_ARM64
5361 static void gfunc_return(CType *func_type)
5363 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5364 CType type, ret_type;
5365 int ret_align, ret_nregs, regsize;
5366 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5367 &ret_align, &regsize);
5368 if (0 == ret_nregs) {
5369 /* if returning structure, must copy it to implicit
5370 first pointer arg location */
5371 type = *func_type;
5372 mk_pointer(&type);
5373 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5374 indir();
5375 vswap();
5376 /* copy structure value to pointer */
5377 vstore();
5378 } else {
5379 /* returning structure packed into registers */
5380 int r, size, addr, align;
5381 size = type_size(func_type,&align);
5382 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5383 (vtop->c.i & (ret_align-1)))
5384 && (align & (ret_align-1))) {
5385 loc = (loc - size) & -ret_align;
5386 addr = loc;
5387 type = *func_type;
5388 vset(&type, VT_LOCAL | VT_LVAL, addr);
5389 vswap();
5390 vstore();
5391 vpop();
5392 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5394 vtop->type = ret_type;
5395 if (is_float(ret_type.t))
5396 r = rc_fret(ret_type.t);
5397 else
5398 r = RC_IRET;
5400 if (ret_nregs == 1)
5401 gv(r);
5402 else {
5403 for (;;) {
5404 vdup();
5405 gv(r);
5406 vpop();
5407 if (--ret_nregs == 0)
5408 break;
5409 /* We assume that when a structure is returned in multiple
5410 registers, their classes are consecutive values of the
5411 suite s(n) = 2^n */
5412 r <<= 1;
5413 vtop->c.i += regsize;
5417 } else if (is_float(func_type->t)) {
5418 gv(rc_fret(func_type->t));
5419 } else {
5420 gv(RC_IRET);
5422 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5424 #endif
5426 static int case_cmp(const void *pa, const void *pb)
5428 int64_t a = (*(struct case_t**) pa)->v1;
5429 int64_t b = (*(struct case_t**) pb)->v1;
5430 return a < b ? -1 : a > b;
5433 static void gcase(struct case_t **base, int len, int *bsym)
5435 struct case_t *p;
5436 int e;
5437 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5438 gv(RC_INT);
5439 while (len > 4) {
5440 /* binary search */
5441 p = base[len/2];
5442 vdup();
5443 if (ll)
5444 vpushll(p->v2);
5445 else
5446 vpushi(p->v2);
5447 gen_op(TOK_LE);
5448 e = gtst(1, 0);
5449 vdup();
5450 if (ll)
5451 vpushll(p->v1);
5452 else
5453 vpushi(p->v1);
5454 gen_op(TOK_GE);
5455 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5456 /* x < v1 */
5457 gcase(base, len/2, bsym);
5458 if (cur_switch->def_sym)
5459 gjmp_addr(cur_switch->def_sym);
5460 else
5461 *bsym = gjmp(*bsym);
5462 /* x > v2 */
5463 gsym(e);
5464 e = len/2 + 1;
5465 base += e; len -= e;
5467 /* linear scan */
5468 while (len--) {
5469 p = *base++;
5470 vdup();
5471 if (ll)
5472 vpushll(p->v2);
5473 else
5474 vpushi(p->v2);
5475 if (p->v1 == p->v2) {
5476 gen_op(TOK_EQ);
5477 gtst_addr(0, p->sym);
5478 } else {
5479 gen_op(TOK_LE);
5480 e = gtst(1, 0);
5481 vdup();
5482 if (ll)
5483 vpushll(p->v1);
5484 else
5485 vpushi(p->v1);
5486 gen_op(TOK_GE);
5487 gtst_addr(0, p->sym);
5488 gsym(e);
5493 static void block(int *bsym, int *csym, int is_expr)
5495 int a, b, c, d, cond;
5496 Sym *s;
5498 /* generate line number info */
5499 if (tcc_state->do_debug)
5500 tcc_debug_line(tcc_state);
5502 if (is_expr) {
5503 /* default return value is (void) */
5504 vpushi(0);
5505 vtop->type.t = VT_VOID;
5508 if (tok == TOK_IF) {
5509 /* if test */
5510 int saved_nocode_wanted = nocode_wanted;
5511 next();
5512 skip('(');
5513 gexpr();
5514 skip(')');
5515 cond = condition_3way();
5516 if (cond == 1)
5517 a = 0, vpop();
5518 else
5519 a = gvtst(1, 0);
5520 if (cond == 0)
5521 nocode_wanted |= 0x20000000;
5522 block(bsym, csym, 0);
5523 if (cond != 1)
5524 nocode_wanted = saved_nocode_wanted;
5525 c = tok;
5526 if (c == TOK_ELSE) {
5527 next();
5528 d = gjmp(0);
5529 gsym(a);
5530 if (cond == 1)
5531 nocode_wanted |= 0x20000000;
5532 block(bsym, csym, 0);
5533 gsym(d); /* patch else jmp */
5534 if (cond != 0)
5535 nocode_wanted = saved_nocode_wanted;
5536 } else
5537 gsym(a);
5538 } else if (tok == TOK_WHILE) {
5539 int saved_nocode_wanted;
5540 nocode_wanted &= ~0x20000000;
5541 next();
5542 d = ind;
5543 vla_sp_restore();
5544 skip('(');
5545 gexpr();
5546 skip(')');
5547 a = gvtst(1, 0);
5548 b = 0;
5549 ++local_scope;
5550 saved_nocode_wanted = nocode_wanted;
5551 block(&a, &b, 0);
5552 nocode_wanted = saved_nocode_wanted;
5553 --local_scope;
5554 gjmp_addr(d);
5555 gsym(a);
5556 gsym_addr(b, d);
5557 } else if (tok == '{') {
5558 Sym *llabel;
5559 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5561 next();
5562 /* record local declaration stack position */
5563 s = local_stack;
5564 llabel = local_label_stack;
5565 ++local_scope;
5567 /* handle local labels declarations */
5568 if (tok == TOK_LABEL) {
5569 next();
5570 for(;;) {
5571 if (tok < TOK_UIDENT)
5572 expect("label identifier");
5573 label_push(&local_label_stack, tok, LABEL_DECLARED);
5574 next();
5575 if (tok == ',') {
5576 next();
5577 } else {
5578 skip(';');
5579 break;
5583 while (tok != '}') {
5584 if ((a = is_label()))
5585 unget_tok(a);
5586 else
5587 decl(VT_LOCAL);
5588 if (tok != '}') {
5589 if (is_expr)
5590 vpop();
5591 block(bsym, csym, is_expr);
5594 /* pop locally defined labels */
5595 label_pop(&local_label_stack, llabel);
5596 /* pop locally defined symbols */
5597 --local_scope;
5598 /* In the is_expr case (a statement expression is finished here),
5599 vtop might refer to symbols on the local_stack. Either via the
5600 type or via vtop->sym. We can't pop those nor any that in turn
5601 might be referred to. To make it easier we don't roll back
5602 any symbols in that case; some upper level call to block() will
5603 do that. We do have to remove such symbols from the lookup
5604 tables, though. sym_pop will do that. */
5605 sym_pop(&local_stack, s, is_expr);
5607 /* Pop VLA frames and restore stack pointer if required */
5608 if (vlas_in_scope > saved_vlas_in_scope) {
5609 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5610 vla_sp_restore();
5612 vlas_in_scope = saved_vlas_in_scope;
5614 next();
5615 } else if (tok == TOK_RETURN) {
5616 next();
5617 if (tok != ';') {
5618 gexpr();
5619 gen_assign_cast(&func_vt);
5620 gfunc_return(&func_vt);
5622 skip(';');
5623 /* jump unless last stmt in top-level block */
5624 if (tok != '}' || local_scope != 1)
5625 rsym = gjmp(rsym);
5626 nocode_wanted |= 0x20000000;
5627 } else if (tok == TOK_BREAK) {
5628 /* compute jump */
5629 if (!bsym)
5630 tcc_error("cannot break");
5631 *bsym = gjmp(*bsym);
5632 next();
5633 skip(';');
5634 nocode_wanted |= 0x20000000;
5635 } else if (tok == TOK_CONTINUE) {
5636 /* compute jump */
5637 if (!csym)
5638 tcc_error("cannot continue");
5639 vla_sp_restore_root();
5640 *csym = gjmp(*csym);
5641 next();
5642 skip(';');
5643 } else if (tok == TOK_FOR) {
5644 int e;
5645 int saved_nocode_wanted;
5646 nocode_wanted &= ~0x20000000;
5647 next();
5648 skip('(');
5649 s = local_stack;
5650 ++local_scope;
5651 if (tok != ';') {
5652 /* c99 for-loop init decl? */
5653 if (!decl0(VT_LOCAL, 1, NULL)) {
5654 /* no, regular for-loop init expr */
5655 gexpr();
5656 vpop();
5659 skip(';');
5660 d = ind;
5661 c = ind;
5662 vla_sp_restore();
5663 a = 0;
5664 b = 0;
5665 if (tok != ';') {
5666 gexpr();
5667 a = gvtst(1, 0);
5669 skip(';');
5670 if (tok != ')') {
5671 e = gjmp(0);
5672 c = ind;
5673 vla_sp_restore();
5674 gexpr();
5675 vpop();
5676 gjmp_addr(d);
5677 gsym(e);
5679 skip(')');
5680 saved_nocode_wanted = nocode_wanted;
5681 block(&a, &b, 0);
5682 nocode_wanted = saved_nocode_wanted;
5683 gjmp_addr(c);
5684 gsym(a);
5685 gsym_addr(b, c);
5686 --local_scope;
5687 sym_pop(&local_stack, s, 0);
5689 } else
5690 if (tok == TOK_DO) {
5691 int saved_nocode_wanted;
5692 nocode_wanted &= ~0x20000000;
5693 next();
5694 a = 0;
5695 b = 0;
5696 d = ind;
5697 vla_sp_restore();
5698 saved_nocode_wanted = nocode_wanted;
5699 block(&a, &b, 0);
5700 skip(TOK_WHILE);
5701 skip('(');
5702 gsym(b);
5703 gexpr();
5704 c = gvtst(0, 0);
5705 gsym_addr(c, d);
5706 nocode_wanted = saved_nocode_wanted;
5707 skip(')');
5708 gsym(a);
5709 skip(';');
5710 } else
5711 if (tok == TOK_SWITCH) {
5712 struct switch_t *saved, sw;
5713 int saved_nocode_wanted = nocode_wanted;
5714 SValue switchval;
5715 next();
5716 skip('(');
5717 gexpr();
5718 skip(')');
5719 switchval = *vtop--;
5720 a = 0;
5721 b = gjmp(0); /* jump to first case */
5722 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5723 saved = cur_switch;
5724 cur_switch = &sw;
5725 block(&a, csym, 0);
5726 nocode_wanted = saved_nocode_wanted;
5727 a = gjmp(a); /* add implicit break */
5728 /* case lookup */
5729 gsym(b);
5730 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5731 for (b = 1; b < sw.n; b++)
5732 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5733 tcc_error("duplicate case value");
5734 /* Our switch table sorting is signed, so the compared
5735 value needs to be as well when it's 64bit. */
5736 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5737 switchval.type.t &= ~VT_UNSIGNED;
5738 vpushv(&switchval);
5739 gcase(sw.p, sw.n, &a);
5740 vpop();
5741 if (sw.def_sym)
5742 gjmp_addr(sw.def_sym);
5743 dynarray_reset(&sw.p, &sw.n);
5744 cur_switch = saved;
5745 /* break label */
5746 gsym(a);
5747 } else
5748 if (tok == TOK_CASE) {
5749 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5750 if (!cur_switch)
5751 expect("switch");
5752 nocode_wanted &= ~0x20000000;
5753 next();
5754 cr->v1 = cr->v2 = expr_const64();
5755 if (gnu_ext && tok == TOK_DOTS) {
5756 next();
5757 cr->v2 = expr_const64();
5758 if (cr->v2 < cr->v1)
5759 tcc_warning("empty case range");
5761 cr->sym = ind;
5762 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5763 skip(':');
5764 is_expr = 0;
5765 goto block_after_label;
5766 } else
5767 if (tok == TOK_DEFAULT) {
5768 next();
5769 skip(':');
5770 if (!cur_switch)
5771 expect("switch");
5772 if (cur_switch->def_sym)
5773 tcc_error("too many 'default'");
5774 cur_switch->def_sym = ind;
5775 is_expr = 0;
5776 goto block_after_label;
5777 } else
5778 if (tok == TOK_GOTO) {
5779 next();
5780 if (tok == '*' && gnu_ext) {
5781 /* computed goto */
5782 next();
5783 gexpr();
5784 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5785 expect("pointer");
5786 ggoto();
5787 } else if (tok >= TOK_UIDENT) {
5788 s = label_find(tok);
5789 /* put forward definition if needed */
5790 if (!s) {
5791 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5792 } else {
5793 if (s->r == LABEL_DECLARED)
5794 s->r = LABEL_FORWARD;
5796 vla_sp_restore_root();
5797 if (s->r & LABEL_FORWARD)
5798 s->jnext = gjmp(s->jnext);
5799 else
5800 gjmp_addr(s->jnext);
5801 next();
5802 } else {
5803 expect("label identifier");
5805 skip(';');
5806 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5807 asm_instr();
5808 } else {
5809 b = is_label();
5810 if (b) {
5811 /* label case */
5812 next();
5813 s = label_find(b);
5814 if (s) {
5815 if (s->r == LABEL_DEFINED)
5816 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5817 gsym(s->jnext);
5818 s->r = LABEL_DEFINED;
5819 } else {
5820 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5822 s->jnext = ind;
5823 vla_sp_restore();
5824 /* we accept this, but it is a mistake */
5825 block_after_label:
5826 nocode_wanted &= ~0x20000000;
5827 if (tok == '}') {
5828 tcc_warning("deprecated use of label at end of compound statement");
5829 } else {
5830 if (is_expr)
5831 vpop();
5832 block(bsym, csym, is_expr);
5834 } else {
5835 /* expression case */
5836 if (tok != ';') {
5837 if (is_expr) {
5838 vpop();
5839 gexpr();
5840 } else {
5841 gexpr();
5842 vpop();
5845 skip(';');
5850 /* This skips over a stream of tokens containing balanced {} and ()
5851 pairs, stopping at outer ',' ';' and '}'. If STR then allocates
5852 and stores the skipped tokens in *STR. This doesn't check if
5853 () and {} are nested correctly, i.e. "({)}" is accepted. */
5854 static void skip_or_save_block(TokenString **str)
5856 int level = 0;
5857 if (str)
5858 *str = tok_str_alloc();
5860 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';'))) {
5861 int t;
5862 if (tok == TOK_EOF) {
5863 if (str || level > 0)
5864 tcc_error("unexpected end of file");
5865 else
5866 break;
5868 if (str)
5869 tok_str_add_tok(*str);
5870 t = tok;
5871 next();
5872 if (t == '{' || t == '(') {
5873 level++;
5874 } else if (t == '}' || t == ')') {
5875 level--;
5876 if (level == 0)
5877 break;
5880 if (str) {
5881 tok_str_add(*str, -1);
5882 tok_str_add(*str, 0);
5886 #define EXPR_CONST 1
5887 #define EXPR_ANY 2
5889 static void parse_init_elem(int expr_type)
5891 int saved_global_expr;
5892 switch(expr_type) {
5893 case EXPR_CONST:
5894 /* compound literals must be allocated globally in this case */
5895 saved_global_expr = global_expr;
5896 global_expr = 1;
5897 expr_const1();
5898 global_expr = saved_global_expr;
5899 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5900 (compound literals). */
5901 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5902 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
5903 || vtop->sym->v < SYM_FIRST_ANOM))
5904 #ifdef TCC_TARGET_PE
5905 || (vtop->type.t & VT_IMPORT)
5906 #endif
5908 tcc_error("initializer element is not constant");
5909 break;
5910 case EXPR_ANY:
5911 expr_eq();
5912 break;
5916 /* t is the array or struct type. c is the array or struct
5917 address. cur_field is the pointer to the current
5918 value, for arrays the 'c' member contains the current start
5919 index and the 'r' contains the end index (in case of range init).
5920 'size_only' is true if only size info is needed (only used
5921 in arrays) */
5922 static void decl_designator(CType *type, Section *sec, unsigned long c,
5923 Sym **cur_field, int size_only)
5925 Sym *s, *f;
5926 int index, index_last, align, l, nb_elems, elem_size;
5928 elem_size = 0;
5929 nb_elems = 1;
5930 if (gnu_ext && (l = is_label()) != 0)
5931 goto struct_field;
5932 /* NOTE: we only support ranges for last designator */
5933 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
5934 if (tok == '[') {
5935 if (!(type->t & VT_ARRAY))
5936 expect("array type");
5937 next();
5938 index = index_last = expr_const();
5939 if (tok == TOK_DOTS && gnu_ext) {
5940 next();
5941 index_last = expr_const();
5943 skip(']');
5944 s = type->ref;
5945 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
5946 index_last < index)
5947 tcc_error("invalid index");
5948 if (cur_field) {
5949 (*cur_field)->c = index;
5950 (*cur_field)->r = index_last;
5952 type = pointed_type(type);
5953 elem_size = type_size(type, &align);
5954 c += index * elem_size;
5955 nb_elems = index_last - index + 1;
5956 } else {
5957 next();
5958 l = tok;
5959 struct_field:
5960 next();
5961 if ((type->t & VT_BTYPE) != VT_STRUCT)
5962 expect("struct/union type");
5963 f = find_field(type, l);
5964 if (!f)
5965 expect("field");
5966 if (cur_field)
5967 *cur_field = f;
5968 type = &f->type;
5969 c += f->c;
5971 cur_field = NULL;
5973 if (!cur_field) {
5974 if (tok == '=') {
5975 next();
5976 } else if (!gnu_ext) {
5977 expect("=");
5979 } else {
5980 if (type->t & VT_ARRAY) {
5981 index = (*cur_field)->c;
5982 if (type->ref->c >= 0 && index >= type->ref->c)
5983 tcc_error("index too large");
5984 type = pointed_type(type);
5985 c += index * type_size(type, &align);
5986 } else {
5987 f = *cur_field;
5988 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
5989 *cur_field = f = f->next;
5990 if (!f)
5991 tcc_error("too many field init");
5992 type = &f->type;
5993 c += f->c;
5996 decl_initializer(type, sec, c, 0, size_only);
5998 /* XXX: make it more general */
5999 if (!size_only && nb_elems > 1) {
6000 unsigned long c_end;
6001 uint8_t *src, *dst;
6002 int i;
6004 if (!sec) {
6005 vset(type, VT_LOCAL|VT_LVAL, c);
6006 for (i = 1; i < nb_elems; i++) {
6007 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6008 vswap();
6009 vstore();
6011 vpop();
6012 } else {
6013 c_end = c + nb_elems * elem_size;
6014 if (c_end > sec->data_allocated)
6015 section_realloc(sec, c_end);
6016 src = sec->data + c;
6017 dst = src;
6018 for(i = 1; i < nb_elems; i++) {
6019 dst += elem_size;
6020 memcpy(dst, src, elem_size);
6026 /* store a value or an expression directly in global data or in local array */
6027 static void init_putv(CType *type, Section *sec, unsigned long c)
6029 int bt, bit_pos, bit_size;
6030 void *ptr;
6031 unsigned long long bit_mask;
6032 CType dtype;
6034 dtype = *type;
6035 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6037 if (sec) {
6038 int size, align;
6039 /* XXX: not portable */
6040 /* XXX: generate error if incorrect relocation */
6041 gen_assign_cast(&dtype);
6042 bt = type->t & VT_BTYPE;
6043 size = type_size(type, &align);
6044 section_reserve(sec, c + size);
6045 ptr = sec->data + c;
6046 /* XXX: make code faster ? */
6047 if (!(type->t & VT_BITFIELD)) {
6048 bit_pos = 0;
6049 bit_size = PTR_SIZE * 8;
6050 bit_mask = -1LL;
6051 } else {
6052 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6053 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6054 bit_mask = (1LL << bit_size) - 1;
6056 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6057 vtop->sym->v >= SYM_FIRST_ANOM &&
6058 /* XXX This rejects compount literals like
6059 '(void *){ptr}'. The problem is that '&sym' is
6060 represented the same way, which would be ruled out
6061 by the SYM_FIRST_ANOM check above, but also '"string"'
6062 in 'char *p = "string"' is represented the same
6063 with the type being VT_PTR and the symbol being an
6064 anonymous one. That is, there's no difference in vtop
6065 between '(void *){x}' and '&(void *){x}'. Ignore
6066 pointer typed entities here. Hopefully no real code
6067 will every use compound literals with scalar type. */
6068 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6069 /* These come from compound literals, memcpy stuff over. */
6070 Section *ssec;
6071 ElfW(Sym) *esym;
6072 ElfW_Rel *rel;
6073 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6074 ssec = tcc_state->sections[esym->st_shndx];
6075 memmove (ptr, ssec->data + esym->st_value, size);
6076 if (ssec->reloc) {
6077 /* We need to copy over all memory contents, and that
6078 includes relocations. Use the fact that relocs are
6079 created it order, so look from the end of relocs
6080 until we hit one before the copied region. */
6081 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6082 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6083 while (num_relocs--) {
6084 rel--;
6085 if (rel->r_offset >= esym->st_value + size)
6086 continue;
6087 if (rel->r_offset < esym->st_value)
6088 break;
6089 /* Note: if the same fields are initialized multiple
6090 times (possible with designators) then we possibly
6091 add multiple relocations for the same offset here.
6092 That would lead to wrong code, the last reloc needs
6093 to win. We clean this up later after the whole
6094 initializer is parsed. */
6095 put_elf_reloca(symtab_section, sec,
6096 c + rel->r_offset - esym->st_value,
6097 ELFW(R_TYPE)(rel->r_info),
6098 ELFW(R_SYM)(rel->r_info),
6099 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6100 rel->r_addend
6101 #else
6103 #endif
6107 } else {
6108 if ((vtop->r & VT_SYM) &&
6109 (bt == VT_BYTE ||
6110 bt == VT_SHORT ||
6111 bt == VT_DOUBLE ||
6112 bt == VT_LDOUBLE ||
6113 #if PTR_SIZE == 8
6114 (bt == VT_LLONG && bit_size != 64) ||
6115 bt == VT_INT
6116 #else
6117 bt == VT_LLONG ||
6118 (bt == VT_INT && bit_size != 32)
6119 #endif
6121 tcc_error("initializer element is not computable at load time");
6122 switch(bt) {
6123 /* XXX: when cross-compiling we assume that each type has the
6124 same representation on host and target, which is likely to
6125 be wrong in the case of long double */
6126 case VT_BOOL:
6127 vtop->c.i = (vtop->c.i != 0);
6128 case VT_BYTE:
6129 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6130 break;
6131 case VT_SHORT:
6132 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6133 break;
6134 case VT_FLOAT:
6135 *(float*)ptr = vtop->c.f;
6136 break;
6137 case VT_DOUBLE:
6138 *(double *)ptr = vtop->c.d;
6139 break;
6140 case VT_LDOUBLE:
6141 if (sizeof(long double) == LDOUBLE_SIZE)
6142 *(long double *)ptr = vtop->c.ld;
6143 else if (sizeof(double) == LDOUBLE_SIZE)
6144 *(double *)ptr = vtop->c.ld;
6145 else
6146 tcc_error("can't cross compile long double constants");
6147 break;
6148 #if PTR_SIZE != 8
6149 case VT_LLONG:
6150 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6151 break;
6152 #else
6153 case VT_LLONG:
6154 #endif
6155 case VT_PTR:
6157 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6158 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6159 if (vtop->r & VT_SYM)
6160 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6161 else
6162 *(addr_t *)ptr |= val;
6163 #else
6164 if (vtop->r & VT_SYM)
6165 greloc(sec, vtop->sym, c, R_DATA_PTR);
6166 *(addr_t *)ptr |= val;
6167 #endif
6168 break;
6170 default:
6172 int val = (vtop->c.i & bit_mask) << bit_pos;
6173 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6174 if (vtop->r & VT_SYM)
6175 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6176 else
6177 *(int *)ptr |= val;
6178 #else
6179 if (vtop->r & VT_SYM)
6180 greloc(sec, vtop->sym, c, R_DATA_PTR);
6181 *(int *)ptr |= val;
6182 #endif
6183 break;
6187 vtop--;
6188 } else {
6189 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6190 vswap();
6191 vstore();
6192 vpop();
6196 /* put zeros for variable based init */
6197 static void init_putz(Section *sec, unsigned long c, int size)
6199 if (sec) {
6200 /* nothing to do because globals are already set to zero */
6201 } else {
6202 vpush_global_sym(&func_old_type, TOK_memset);
6203 vseti(VT_LOCAL, c);
6204 #ifdef TCC_TARGET_ARM
6205 vpushs(size);
6206 vpushi(0);
6207 #else
6208 vpushi(0);
6209 vpushs(size);
6210 #endif
6211 gfunc_call(3);
6215 /* 't' contains the type and storage info. 'c' is the offset of the
6216 object in section 'sec'. If 'sec' is NULL, it means stack based
6217 allocation. 'first' is true if array '{' must be read (multi
6218 dimension implicit array init handling). 'size_only' is true if
6219 size only evaluation is wanted (only for arrays). */
6220 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6221 int first, int size_only)
6223 int index, array_length, n, no_oblock, nb, i;
6224 int size1, align1;
6225 int have_elem;
6226 Sym *s, *f;
6227 Sym indexsym;
6228 CType *t1;
6230 /* If we currently are at an '}' or ',' we have read an initializer
6231 element in one of our callers, and not yet consumed it. */
6232 have_elem = tok == '}' || tok == ',';
6233 if (!have_elem && tok != '{' &&
6234 /* In case of strings we have special handling for arrays, so
6235 don't consume them as initializer value (which would commit them
6236 to some anonymous symbol). */
6237 tok != TOK_LSTR && tok != TOK_STR &&
6238 !size_only) {
6239 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6240 have_elem = 1;
6243 if (have_elem &&
6244 !(type->t & VT_ARRAY) &&
6245 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6246 The source type might have VT_CONSTANT set, which is
6247 of course assignable to non-const elements. */
6248 is_compatible_parameter_types(type, &vtop->type)) {
6249 init_putv(type, sec, c);
6250 } else if (type->t & VT_ARRAY) {
6251 s = type->ref;
6252 n = s->c;
6253 array_length = 0;
6254 t1 = pointed_type(type);
6255 size1 = type_size(t1, &align1);
6257 no_oblock = 1;
6258 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6259 tok == '{') {
6260 if (tok != '{')
6261 tcc_error("character array initializer must be a literal,"
6262 " optionally enclosed in braces");
6263 skip('{');
6264 no_oblock = 0;
6267 /* only parse strings here if correct type (otherwise: handle
6268 them as ((w)char *) expressions */
6269 if ((tok == TOK_LSTR &&
6270 #ifdef TCC_TARGET_PE
6271 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6272 #else
6273 (t1->t & VT_BTYPE) == VT_INT
6274 #endif
6275 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6276 while (tok == TOK_STR || tok == TOK_LSTR) {
6277 int cstr_len, ch;
6279 /* compute maximum number of chars wanted */
6280 if (tok == TOK_STR)
6281 cstr_len = tokc.str.size;
6282 else
6283 cstr_len = tokc.str.size / sizeof(nwchar_t);
6284 cstr_len--;
6285 nb = cstr_len;
6286 if (n >= 0 && nb > (n - array_length))
6287 nb = n - array_length;
6288 if (!size_only) {
6289 if (cstr_len > nb)
6290 tcc_warning("initializer-string for array is too long");
6291 /* in order to go faster for common case (char
6292 string in global variable, we handle it
6293 specifically */
6294 if (sec && tok == TOK_STR && size1 == 1) {
6295 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6296 } else {
6297 for(i=0;i<nb;i++) {
6298 if (tok == TOK_STR)
6299 ch = ((unsigned char *)tokc.str.data)[i];
6300 else
6301 ch = ((nwchar_t *)tokc.str.data)[i];
6302 vpushi(ch);
6303 init_putv(t1, sec, c + (array_length + i) * size1);
6307 array_length += nb;
6308 next();
6310 /* only add trailing zero if enough storage (no
6311 warning in this case since it is standard) */
6312 if (n < 0 || array_length < n) {
6313 if (!size_only) {
6314 vpushi(0);
6315 init_putv(t1, sec, c + (array_length * size1));
6317 array_length++;
6319 } else {
6320 indexsym.c = 0;
6321 indexsym.r = 0;
6322 f = &indexsym;
6324 do_init_list:
6325 while (tok != '}' || have_elem) {
6326 decl_designator(type, sec, c, &f, size_only);
6327 have_elem = 0;
6328 index = f->c;
6329 /* must put zero in holes (note that doing it that way
6330 ensures that it even works with designators) */
6331 if (!size_only && array_length < index) {
6332 init_putz(sec, c + array_length * size1,
6333 (index - array_length) * size1);
6335 if (type->t & VT_ARRAY) {
6336 index = indexsym.c = ++indexsym.r;
6337 } else {
6338 index = index + type_size(&f->type, &align1);
6339 if (s->type.t == TOK_UNION)
6340 f = NULL;
6341 else
6342 f = f->next;
6344 if (index > array_length)
6345 array_length = index;
6347 if (type->t & VT_ARRAY) {
6348 /* special test for multi dimensional arrays (may not
6349 be strictly correct if designators are used at the
6350 same time) */
6351 if (no_oblock && index >= n)
6352 break;
6353 } else {
6354 if (no_oblock && f == NULL)
6355 break;
6357 if (tok == '}')
6358 break;
6359 skip(',');
6362 /* put zeros at the end */
6363 if (!size_only && array_length < n) {
6364 init_putz(sec, c + array_length * size1,
6365 (n - array_length) * size1);
6367 if (!no_oblock)
6368 skip('}');
6369 /* patch type size if needed, which happens only for array types */
6370 if (n < 0)
6371 s->c = array_length;
6372 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6373 size1 = 1;
6374 no_oblock = 1;
6375 if (first || tok == '{') {
6376 skip('{');
6377 no_oblock = 0;
6379 s = type->ref;
6380 f = s->next;
6381 array_length = 0;
6382 n = s->c;
6383 goto do_init_list;
6384 } else if (tok == '{') {
6385 next();
6386 decl_initializer(type, sec, c, first, size_only);
6387 skip('}');
6388 } else if (size_only) {
6389 /* If we supported only ISO C we wouldn't have to accept calling
6390 this on anything than an array size_only==1 (and even then
6391 only on the outermost level, so no recursion would be needed),
6392 because initializing a flex array member isn't supported.
6393 But GNU C supports it, so we need to recurse even into
6394 subfields of structs and arrays when size_only is set. */
6395 /* just skip expression */
6396 do {
6397 skip_or_save_block(NULL);
6398 } while (tok != '}' && tok != ',' && tok != -1);
6399 } else {
6400 if (!have_elem) {
6401 /* This should happen only when we haven't parsed
6402 the init element above for fear of committing a
6403 string constant to memory too early. */
6404 if (tok != TOK_STR && tok != TOK_LSTR)
6405 expect("string constant");
6406 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6408 init_putv(type, sec, c);
6412 /* parse an initializer for type 't' if 'has_init' is non zero, and
6413 allocate space in local or global data space ('r' is either
6414 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6415 variable 'v' of scope 'scope' is declared before initializers
6416 are parsed. If 'v' is zero, then a reference to the new object
6417 is put in the value stack. If 'has_init' is 2, a special parsing
6418 is done to handle string constants. */
6419 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6420 int has_init, int v, int scope)
6422 int size, align, addr;
6423 ParseState saved_parse_state = {0};
6424 TokenString *init_str = NULL;
6425 Section *sec;
6426 Sym *flexible_array;
6428 flexible_array = NULL;
6429 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6430 Sym *field = type->ref->next;
6431 if (field) {
6432 while (field->next)
6433 field = field->next;
6434 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6435 flexible_array = field;
6439 size = type_size(type, &align);
6440 /* If unknown size, we must evaluate it before
6441 evaluating initializers because
6442 initializers can generate global data too
6443 (e.g. string pointers or ISOC99 compound
6444 literals). It also simplifies local
6445 initializers handling */
6446 if (size < 0 || (flexible_array && has_init)) {
6447 if (!has_init)
6448 tcc_error("unknown type size");
6449 /* get all init string */
6450 if (has_init == 2) {
6451 init_str = tok_str_alloc();
6452 /* only get strings */
6453 while (tok == TOK_STR || tok == TOK_LSTR) {
6454 tok_str_add_tok(init_str);
6455 next();
6457 tok_str_add(init_str, -1);
6458 tok_str_add(init_str, 0);
6459 } else {
6460 skip_or_save_block(&init_str);
6463 /* compute size */
6464 save_parse_state(&saved_parse_state);
6466 begin_macro(init_str, 1);
6467 next();
6468 decl_initializer(type, NULL, 0, 1, 1);
6469 /* prepare second initializer parsing */
6470 macro_ptr = init_str->str;
6471 next();
6473 /* if still unknown size, error */
6474 size = type_size(type, &align);
6475 if (size < 0)
6476 tcc_error("unknown type size");
6478 /* If there's a flex member and it was used in the initializer
6479 adjust size. */
6480 if (flexible_array &&
6481 flexible_array->type.ref->c > 0)
6482 size += flexible_array->type.ref->c
6483 * pointed_size(&flexible_array->type);
6484 /* take into account specified alignment if bigger */
6485 if (ad->a.aligned) {
6486 int speca = 1 << (ad->a.aligned - 1);
6487 if (speca > align)
6488 align = speca;
6489 } else if (ad->a.packed) {
6490 align = 1;
6492 if ((r & VT_VALMASK) == VT_LOCAL) {
6493 sec = NULL;
6494 #ifdef CONFIG_TCC_BCHECK
6495 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6496 loc--;
6498 #endif
6499 loc = (loc - size) & -align;
6500 addr = loc;
6501 #ifdef CONFIG_TCC_BCHECK
6502 /* handles bounds */
6503 /* XXX: currently, since we do only one pass, we cannot track
6504 '&' operators, so we add only arrays */
6505 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6506 addr_t *bounds_ptr;
6507 /* add padding between regions */
6508 loc--;
6509 /* then add local bound info */
6510 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6511 bounds_ptr[0] = addr;
6512 bounds_ptr[1] = size;
6514 #endif
6515 if (v) {
6516 /* local variable */
6517 #ifdef CONFIG_TCC_ASM
6518 if (ad->asm_label) {
6519 int reg = asm_parse_regvar(ad->asm_label);
6520 if (reg >= 0)
6521 r = (r & ~VT_VALMASK) | reg;
6523 #endif
6524 sym_push(v, type, r, addr);
6525 } else {
6526 /* push local reference */
6527 vset(type, r, addr);
6529 } else {
6530 Sym *sym = NULL;
6531 if (v && scope == VT_CONST) {
6532 /* see if the symbol was already defined */
6533 sym = sym_find(v);
6534 if (sym) {
6535 patch_storage(sym, type);
6536 if (sym->type.t & VT_EXTERN) {
6537 /* if the variable is extern, it was not allocated */
6538 sym->type.t &= ~VT_EXTERN;
6539 /* set array size if it was omitted in extern
6540 declaration */
6541 if ((sym->type.t & VT_ARRAY) &&
6542 sym->type.ref->c < 0 &&
6543 type->ref->c >= 0)
6544 sym->type.ref->c = type->ref->c;
6545 } else if (!has_init) {
6546 /* we accept several definitions of the same
6547 global variable. this is tricky, because we
6548 must play with the SHN_COMMON type of the symbol */
6549 /* no init data, we won't add more to the symbol */
6550 update_storage(sym);
6551 goto no_alloc;
6552 } else if (sym->c) {
6553 ElfW(Sym) *esym;
6554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6555 if (esym->st_shndx == data_section->sh_num)
6556 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6561 /* allocate symbol in corresponding section */
6562 sec = ad->section;
6563 if (!sec) {
6564 if (has_init)
6565 sec = data_section;
6566 else if (tcc_state->nocommon)
6567 sec = bss_section;
6570 if (sec) {
6571 addr = section_add(sec, size, align);
6572 #ifdef CONFIG_TCC_BCHECK
6573 /* add padding if bound check */
6574 if (tcc_state->do_bounds_check)
6575 section_add(sec, 1, 1);
6576 #endif
6577 } else {
6578 addr = align; /* SHN_COMMON is special, symbol value is align */
6579 sec = common_section;
6582 if (v) {
6583 if (!sym) {
6584 sym = sym_push(v, type, r | VT_SYM, 0);
6585 sym->asm_label = ad->asm_label;
6587 /* update symbol definition */
6588 put_extern_sym(sym, sec, addr, size);
6589 } else {
6590 /* push global reference */
6591 sym = get_sym_ref(type, sec, addr, size);
6592 vpushsym(type, sym);
6593 vtop->r |= r;
6596 #ifdef CONFIG_TCC_BCHECK
6597 /* handles bounds now because the symbol must be defined
6598 before for the relocation */
6599 if (tcc_state->do_bounds_check) {
6600 addr_t *bounds_ptr;
6602 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6603 /* then add global bound info */
6604 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6605 bounds_ptr[0] = 0; /* relocated */
6606 bounds_ptr[1] = size;
6608 #endif
6611 if (type->t & VT_VLA) {
6612 int a;
6614 /* save current stack pointer */
6615 if (vlas_in_scope == 0) {
6616 if (vla_sp_root_loc == -1)
6617 vla_sp_root_loc = (loc -= PTR_SIZE);
6618 gen_vla_sp_save(vla_sp_root_loc);
6621 vla_runtime_type_size(type, &a);
6622 gen_vla_alloc(type, a);
6623 gen_vla_sp_save(addr);
6624 vla_sp_loc = addr;
6625 vlas_in_scope++;
6627 } else if (has_init) {
6628 size_t oldreloc_offset = 0;
6629 if (sec && sec->reloc)
6630 oldreloc_offset = sec->reloc->data_offset;
6631 decl_initializer(type, sec, addr, 1, 0);
6632 if (sec && sec->reloc)
6633 squeeze_multi_relocs(sec, oldreloc_offset);
6634 /* patch flexible array member size back to -1, */
6635 /* for possible subsequent similar declarations */
6636 if (flexible_array)
6637 flexible_array->type.ref->c = -1;
6640 no_alloc:
6641 /* restore parse state if needed */
6642 if (init_str) {
6643 end_macro();
6644 restore_parse_state(&saved_parse_state);
6648 /* parse a function defined by symbol 'sym' and generate its code in
6649 'cur_text_section' */
6650 static void gen_function(Sym *sym)
6652 nocode_wanted = 0;
6653 ind = cur_text_section->data_offset;
6654 /* NOTE: we patch the symbol size later */
6655 put_extern_sym(sym, cur_text_section, ind, 0);
6656 funcname = get_tok_str(sym->v, NULL);
6657 func_ind = ind;
6658 /* Initialize VLA state */
6659 vla_sp_loc = -1;
6660 vla_sp_root_loc = -1;
6661 /* put debug symbol */
6662 tcc_debug_funcstart(tcc_state, sym);
6663 /* push a dummy symbol to enable local sym storage */
6664 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6665 local_scope = 1; /* for function parameters */
6666 gfunc_prolog(&sym->type);
6667 local_scope = 0;
6668 rsym = 0;
6669 block(NULL, NULL, 0);
6670 nocode_wanted = 0;
6671 gsym(rsym);
6672 gfunc_epilog();
6673 cur_text_section->data_offset = ind;
6674 label_pop(&global_label_stack, NULL);
6675 /* reset local stack */
6676 local_scope = 0;
6677 sym_pop(&local_stack, NULL, 0);
6678 /* end of function */
6679 /* patch symbol size */
6680 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6681 ind - func_ind;
6682 tcc_debug_funcend(tcc_state, ind - func_ind);
6683 /* It's better to crash than to generate wrong code */
6684 cur_text_section = NULL;
6685 funcname = ""; /* for safety */
6686 func_vt.t = VT_VOID; /* for safety */
6687 func_var = 0; /* for safety */
6688 ind = 0; /* for safety */
6689 nocode_wanted = 1;
6690 check_vstack();
6693 static void gen_inline_functions(TCCState *s)
6695 Sym *sym;
6696 int inline_generated, i, ln;
6697 struct InlineFunc *fn;
6699 ln = file->line_num;
6700 /* iterate while inline function are referenced */
6701 for(;;) {
6702 inline_generated = 0;
6703 for (i = 0; i < s->nb_inline_fns; ++i) {
6704 fn = s->inline_fns[i];
6705 sym = fn->sym;
6706 if (sym && sym->c) {
6707 /* the function was used: generate its code and
6708 convert it to a normal function */
6709 fn->sym = NULL;
6710 if (file)
6711 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6712 sym->type.t &= ~VT_INLINE;
6714 begin_macro(fn->func_str, 1);
6715 next();
6716 cur_text_section = text_section;
6717 gen_function(sym);
6718 end_macro();
6720 inline_generated = 1;
6723 if (!inline_generated)
6724 break;
6726 file->line_num = ln;
6729 ST_FUNC void free_inline_functions(TCCState *s)
6731 int i;
6732 /* free tokens of unused inline functions */
6733 for (i = 0; i < s->nb_inline_fns; ++i) {
6734 struct InlineFunc *fn = s->inline_fns[i];
6735 if (fn->sym)
6736 tok_str_free(fn->func_str);
6738 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6741 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6742 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6743 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
6745 int v, has_init, r;
6746 CType type, btype;
6747 Sym *sym;
6748 AttributeDef ad;
6750 while (1) {
6751 if (!parse_btype(&btype, &ad)) {
6752 if (is_for_loop_init)
6753 return 0;
6754 /* skip redundant ';' if not in old parameter decl scope */
6755 if (tok == ';' && l != VT_CMP) {
6756 next();
6757 continue;
6759 if (l == VT_CONST &&
6760 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6761 /* global asm block */
6762 asm_global_instr();
6763 continue;
6765 /* special test for old K&R protos without explicit int
6766 type. Only accepted when defining global data */
6767 if (l != VT_CONST || tok < TOK_UIDENT)
6768 break;
6769 btype.t = VT_INT;
6771 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6772 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6773 tok == ';') {
6774 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6775 int v = btype.ref->v;
6776 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6777 tcc_warning("unnamed struct/union that defines no instances");
6779 next();
6780 continue;
6782 while (1) { /* iterate thru each declaration */
6783 type = btype;
6784 /* If the base type itself was an array type of unspecified
6785 size (like in 'typedef int arr[]; arr x = {1};') then
6786 we will overwrite the unknown size by the real one for
6787 this decl. We need to unshare the ref symbol holding
6788 that size. */
6789 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6790 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6792 type_decl(&type, &ad, &v, TYPE_DIRECT);
6793 #if 0
6795 char buf[500];
6796 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
6797 printf("type = '%s'\n", buf);
6799 #endif
6800 if ((type.t & VT_BTYPE) == VT_FUNC) {
6801 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6802 tcc_error("function without file scope cannot be static");
6804 /* if old style function prototype, we accept a
6805 declaration list */
6806 sym = type.ref;
6807 if (sym->c == FUNC_OLD && l == VT_CONST)
6808 decl0(VT_CMP, 0, sym);
6811 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6812 ad.asm_label = asm_label_instr();
6813 /* parse one last attribute list, after asm label */
6814 parse_attribute(&ad);
6815 if (tok == '{')
6816 expect(";");
6819 if (ad.a.weak)
6820 type.t |= VT_WEAK;
6821 #ifdef TCC_TARGET_PE
6822 if (ad.a.func_import || ad.a.func_export) {
6823 if (type.t & (VT_STATIC|VT_TYPEDEF))
6824 tcc_error("cannot have dll linkage with static or typedef");
6825 if (ad.a.func_export)
6826 type.t |= VT_EXPORT;
6827 else if ((type.t & VT_BTYPE) != VT_FUNC)
6828 type.t |= VT_IMPORT|VT_EXTERN;
6830 #endif
6831 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6833 if (tok == '{') {
6834 if (l != VT_CONST)
6835 tcc_error("cannot use local functions");
6836 if ((type.t & VT_BTYPE) != VT_FUNC)
6837 expect("function definition");
6839 /* reject abstract declarators in function definition
6840 make old style params without decl have int type */
6841 sym = type.ref;
6842 while ((sym = sym->next) != NULL) {
6843 if (!(sym->v & ~SYM_FIELD))
6844 expect("identifier");
6845 if (sym->type.t == VT_VOID)
6846 sym->type = int_type;
6849 /* XXX: cannot do better now: convert extern line to static inline */
6850 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
6851 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6853 sym = sym_find(v);
6854 if (sym) {
6855 Sym *ref;
6856 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
6857 goto func_error1;
6859 ref = sym->type.ref;
6861 /* use func_call from prototype if not defined */
6862 if (ref->a.func_call != FUNC_CDECL
6863 && type.ref->a.func_call == FUNC_CDECL)
6864 type.ref->a.func_call = ref->a.func_call;
6866 /* use static from prototype */
6867 if (sym->type.t & VT_STATIC)
6868 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6870 /* If the definition has no visibility use the
6871 one from prototype. */
6872 if (! (type.t & VT_VIS_MASK))
6873 type.t |= sym->type.t & VT_VIS_MASK;
6875 /* apply other storage attributes from prototype */
6876 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
6878 if (!is_compatible_types(&sym->type, &type)) {
6879 func_error1:
6880 tcc_error("incompatible types for redefinition of '%s'",
6881 get_tok_str(v, NULL));
6883 if (ref->a.func_body)
6884 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6885 /* if symbol is already defined, then put complete type */
6886 sym->type = type;
6888 } else {
6889 /* put function symbol */
6890 sym = global_identifier_push(v, type.t, 0);
6891 sym->type.ref = type.ref;
6894 sym->type.ref->a.func_body = 1;
6895 sym->r = VT_SYM | VT_CONST;
6897 /* static inline functions are just recorded as a kind
6898 of macro. Their code will be emitted at the end of
6899 the compilation unit only if they are used */
6900 if ((type.t & (VT_INLINE | VT_STATIC)) ==
6901 (VT_INLINE | VT_STATIC)) {
6902 struct InlineFunc *fn;
6903 const char *filename;
6905 filename = file ? file->filename : "";
6906 fn = tcc_malloc(sizeof *fn + strlen(filename));
6907 strcpy(fn->filename, filename);
6908 fn->sym = sym;
6909 skip_or_save_block(&fn->func_str);
6910 dynarray_add(&tcc_state->inline_fns,
6911 &tcc_state->nb_inline_fns, fn);
6912 } else {
6913 /* compute text section */
6914 cur_text_section = ad.section;
6915 if (!cur_text_section)
6916 cur_text_section = text_section;
6917 gen_function(sym);
6919 break;
6920 } else {
6921 if (l == VT_CMP) {
6922 /* find parameter in function parameter list */
6923 for (sym = func_sym->next; sym; sym = sym->next)
6924 if ((sym->v & ~SYM_FIELD) == v)
6925 goto found;
6926 tcc_error("declaration for parameter '%s' but no such parameter",
6927 get_tok_str(v, NULL));
6928 found:
6929 if (type.t & VT_STORAGE) /* 'register' is okay */
6930 tcc_error("storage class specified for '%s'",
6931 get_tok_str(v, NULL));
6932 if (sym->type.t != VT_VOID)
6933 tcc_error("redefinition of parameter '%s'",
6934 get_tok_str(v, NULL));
6935 convert_parameter_type(&type);
6936 sym->type = type;
6937 } else if (type.t & VT_TYPEDEF) {
6938 /* save typedefed type */
6939 /* XXX: test storage specifiers ? */
6940 sym = sym_find(v);
6941 if (sym && sym->scope == local_scope) {
6942 if (!is_compatible_types(&sym->type, &type)
6943 || !(sym->type.t & VT_TYPEDEF))
6944 tcc_error("incompatible redefinition of '%s'",
6945 get_tok_str(v, NULL));
6946 sym->type = type;
6947 } else {
6948 sym = sym_push(v, &type, 0, 0);
6950 sym->a = ad.a;
6951 } else {
6952 r = 0;
6953 if ((type.t & VT_BTYPE) == VT_FUNC) {
6954 /* external function definition */
6955 /* specific case for func_call attribute */
6956 type.ref->a = ad.a;
6957 } else if (!(type.t & VT_ARRAY)) {
6958 /* not lvalue if array */
6959 r |= lvalue_type(type.t);
6961 has_init = (tok == '=');
6962 if (has_init && (type.t & VT_VLA))
6963 tcc_error("variable length array cannot be initialized");
6964 if ((type.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
6965 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
6966 !has_init && l == VT_CONST && type.ref->c < 0)) {
6967 /* external variable or function */
6968 /* NOTE: as GCC, uninitialized global static
6969 arrays of null size are considered as
6970 extern */
6971 sym = external_sym(v, &type, r);
6972 sym->asm_label = ad.asm_label;
6973 if (ad.alias_target) {
6974 Section tsec;
6975 ElfW(Sym) *esym;
6976 Sym *alias_target;
6978 alias_target = sym_find(ad.alias_target);
6979 if (!alias_target || !alias_target->c)
6980 tcc_error("unsupported forward __alias__ attribute");
6981 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
6982 tsec.sh_num = esym->st_shndx;
6983 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
6985 } else {
6986 if (type.t & VT_STATIC)
6987 r |= VT_CONST;
6988 else
6989 r |= l;
6990 if (has_init)
6991 next();
6992 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
6995 if (tok != ',') {
6996 if (is_for_loop_init)
6997 return 1;
6998 skip(';');
6999 break;
7001 next();
7003 ad.a.aligned = 0;
7006 return 0;
7009 ST_FUNC void decl(int l)
7011 decl0(l, 0, NULL);
7014 /* ------------------------------------------------------------------------- */