i386-gen: fix USE_EBX
[tinycc.git] / tccgen.c
blob8281d1e21b9e3e957d0ddbfb26cd7c7578f4d4cd
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC void tccgen_start(TCCState *s1)
145 cur_text_section = NULL;
146 funcname = "";
147 anon_sym = SYM_FIRST_ANOM;
148 section_sym = 0;
149 const_wanted = 0;
150 nocode_wanted = 1;
152 /* define some often used types */
153 int_type.t = VT_INT;
154 char_pointer_type.t = VT_BYTE;
155 mk_pointer(&char_pointer_type);
156 #if PTR_SIZE == 4
157 size_type.t = VT_INT;
158 #else
159 size_type.t = VT_LLONG;
160 #endif
161 func_old_type.t = VT_FUNC;
162 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
164 if (s1->do_debug) {
165 char buf[512];
167 /* file info: full path + filename */
168 section_sym = put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
170 text_section->sh_num, NULL);
171 getcwd(buf, sizeof(buf));
172 #ifdef _WIN32
173 normalize_slashes(buf);
174 #endif
175 pstrcat(buf, sizeof(buf), "/");
176 put_stabs_r(buf, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
178 put_stabs_r(file->filename, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
185 SHN_ABS, file->filename);
187 #ifdef TCC_TARGET_ARM
188 arm_init(s1);
189 #endif
192 ST_FUNC void tccgen_end(TCCState *s1)
194 gen_inline_functions(s1);
195 check_vstack();
196 /* end of translation unit info */
197 if (s1->do_debug) {
198 put_stabs_r(NULL, N_SO, 0, 0,
199 text_section->data_offset, text_section, section_sym);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
208 addr_t value, unsigned long size,
209 int can_add_underscore)
211 int sym_type, sym_bind, sh_num, info, other;
212 ElfW(Sym) *esym;
213 const char *name;
214 char buf1[256];
216 #ifdef CONFIG_TCC_BCHECK
217 char buf[32];
218 #endif
220 if (section == NULL)
221 sh_num = SHN_UNDEF;
222 else if (section == SECTION_ABS)
223 sh_num = SHN_ABS;
224 else
225 sh_num = section->sh_num;
227 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
228 sym_type = STT_FUNC;
229 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
230 sym_type = STT_NOTYPE;
231 } else {
232 sym_type = STT_OBJECT;
235 if (sym->type.t & VT_STATIC)
236 sym_bind = STB_LOCAL;
237 else {
238 if (sym->type.t & VT_WEAK)
239 sym_bind = STB_WEAK;
240 else
241 sym_bind = STB_GLOBAL;
244 if (!sym->c) {
245 name = get_tok_str(sym->v, NULL);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state->do_bounds_check) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
251 switch(sym->v) {
252 #ifdef TCC_TARGET_PE
253 /* XXX: we rely only on malloc hooks */
254 case TOK_malloc:
255 case TOK_free:
256 case TOK_realloc:
257 case TOK_memalign:
258 case TOK_calloc:
259 #endif
260 case TOK_memcpy:
261 case TOK_memmove:
262 case TOK_memset:
263 case TOK_strlen:
264 case TOK_strcpy:
265 case TOK_alloca:
266 strcpy(buf, "__bound_");
267 strcat(buf, name);
268 name = buf;
269 break;
272 #endif
273 other = 0;
275 #ifdef TCC_TARGET_PE
276 if (sym->type.t & VT_EXPORT)
277 other |= ST_PE_EXPORT;
278 if (sym_type == STT_FUNC && sym->type.ref) {
279 Sym *ref = sym->type.ref;
280 if (ref->a.func_export)
281 other |= ST_PE_EXPORT;
282 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
283 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
284 name = buf1;
285 other |= ST_PE_STDCALL;
286 can_add_underscore = 0;
288 } else {
289 if (find_elf_sym(tcc_state->dynsymtab_section, name))
290 other |= ST_PE_IMPORT;
291 if (sym->type.t & VT_IMPORT)
292 other |= ST_PE_IMPORT;
294 #else
295 if (! (sym->type.t & VT_STATIC))
296 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
297 #endif
298 if (tcc_state->leading_underscore && can_add_underscore) {
299 buf1[0] = '_';
300 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
301 name = buf1;
303 if (sym->asm_label) {
304 name = get_tok_str(sym->asm_label, NULL);
306 info = ELFW(ST_INFO)(sym_bind, sym_type);
307 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
308 } else {
309 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
310 esym->st_value = value;
311 esym->st_size = size;
312 esym->st_shndx = sh_num;
316 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
317 addr_t value, unsigned long size)
319 put_extern_sym2(sym, section, value, size, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
324 addr_t addend)
326 int c = 0;
328 if (nocode_wanted && s == cur_text_section)
329 return;
331 if (sym) {
332 if (0 == sym->c)
333 put_extern_sym(sym, NULL, 0, 0);
334 c = sym->c;
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section, s, offset, type, c, addend);
341 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
343 greloca(s, sym, offset, type, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym *__sym_malloc(void)
350 Sym *sym_pool, *sym, *last_sym;
351 int i;
353 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
354 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
356 last_sym = sym_free_first;
357 sym = sym_pool;
358 for(i = 0; i < SYM_POOL_NB; i++) {
359 sym->next = last_sym;
360 last_sym = sym;
361 sym++;
363 sym_free_first = last_sym;
364 return last_sym;
367 static inline Sym *sym_malloc(void)
369 Sym *sym;
370 #ifndef SYM_DEBUG
371 sym = sym_free_first;
372 if (!sym)
373 sym = __sym_malloc();
374 sym_free_first = sym->next;
375 return sym;
376 #else
377 sym = tcc_malloc(sizeof(Sym));
378 return sym;
379 #endif
382 ST_INLN void sym_free(Sym *sym)
384 #ifndef SYM_DEBUG
385 sym->next = sym_free_first;
386 sym_free_first = sym;
387 #else
388 tcc_free(sym);
389 #endif
392 /* push, without hashing */
393 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
395 Sym *s;
397 s = sym_malloc();
398 s->scope = 0;
399 s->v = v;
400 s->type.t = t;
401 s->type.ref = NULL;
402 #ifdef _WIN64
403 s->d = NULL;
404 #endif
405 s->c = c;
406 s->next = NULL;
407 /* add in stack */
408 s->prev = *ps;
409 *ps = s;
410 return s;
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym *sym_find2(Sym *s, int v)
417 while (s) {
418 if (s->v == v)
419 return s;
420 else if (s->v == -1)
421 return NULL;
422 s = s->prev;
424 return NULL;
427 /* structure lookup */
428 ST_INLN Sym *struct_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_struct;
436 /* find an identifier */
437 ST_INLN Sym *sym_find(int v)
439 v -= TOK_IDENT;
440 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
441 return NULL;
442 return table_ident[v]->sym_identifier;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
448 Sym *s, **ps;
449 TokenSym *ts;
451 if (local_stack)
452 ps = &local_stack;
453 else
454 ps = &global_stack;
455 s = sym_push2(ps, v, type->t, c);
456 s->type.ref = type->ref;
457 s->r = r;
458 /* don't record fields or anonymous symbols */
459 /* XXX: simplify */
460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
461 /* record symbol in token array */
462 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
463 if (v & SYM_STRUCT)
464 ps = &ts->sym_struct;
465 else
466 ps = &ts->sym_identifier;
467 s->prev_tok = *ps;
468 *ps = s;
469 s->scope = local_scope;
470 if (s->prev_tok && s->prev_tok->scope == s->scope)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v & ~SYM_STRUCT, NULL));
474 return s;
477 /* push a global identifier */
478 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
480 Sym *s, **ps;
481 s = sym_push2(&global_stack, v, t, c);
482 /* don't record anonymous symbol */
483 if (v < SYM_FIRST_ANOM) {
484 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
487 while (*ps != NULL)
488 ps = &(*ps)->prev_tok;
489 s->prev_tok = NULL;
490 *ps = s;
492 return s;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
499 Sym *s, *ss, **ps;
500 TokenSym *ts;
501 int v;
503 s = *ptop;
504 while(s != b) {
505 ss = s->prev;
506 v = s->v;
507 /* remove symbol in token array */
508 /* XXX: simplify */
509 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
510 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
511 if (v & SYM_STRUCT)
512 ps = &ts->sym_struct;
513 else
514 ps = &ts->sym_identifier;
515 *ps = s->prev_tok;
517 if (!keep)
518 sym_free(s);
519 s = ss;
521 if (!keep)
522 *ptop = b;
525 static void weaken_symbol(Sym *sym)
527 sym->type.t |= VT_WEAK;
528 if (sym->c > 0) {
529 int esym_type;
530 ElfW(Sym) *esym;
532 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
533 esym_type = ELFW(ST_TYPE)(esym->st_info);
534 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
538 static void apply_visibility(Sym *sym, CType *type)
540 int vis = sym->type.t & VT_VIS_MASK;
541 int vis2 = type->t & VT_VIS_MASK;
542 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
543 vis = vis2;
544 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
546 else
547 vis = (vis < vis2) ? vis : vis2;
548 sym->type.t &= ~VT_VIS_MASK;
549 sym->type.t |= vis;
551 if (sym->c > 0) {
552 ElfW(Sym) *esym;
554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
555 vis >>= VT_VIS_SHIFT;
556 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
560 /* ------------------------------------------------------------------------- */
562 ST_FUNC void swap(int *p, int *q)
564 int t;
565 t = *p;
566 *p = *q;
567 *q = t;
570 static void vsetc(CType *type, int r, CValue *vc)
572 int v;
574 if (vtop >= vstack + (VSTACK_SIZE - 1))
575 tcc_error("memory full (vstack)");
576 /* cannot let cpu flags if other instruction are generated. Also
577 avoid leaving VT_JMP anywhere except on the top of the stack
578 because it would complicate the code generator. */
579 if (vtop >= vstack) {
580 v = vtop->r & VT_VALMASK;
581 if (v == VT_CMP || (v & ~1) == VT_JMP)
582 gv(RC_INT);
584 vtop++;
585 vtop->type = *type;
586 vtop->r = r;
587 vtop->r2 = VT_CONST;
588 vtop->c = *vc;
589 vtop->sym = NULL;
592 /* push constant of type "type" with useless value */
593 ST_FUNC void vpush(CType *type)
595 CValue cval;
596 vsetc(type, VT_CONST, &cval);
599 /* push integer constant */
600 ST_FUNC void vpushi(int v)
602 CValue cval;
603 cval.i = v;
604 vsetc(&int_type, VT_CONST, &cval);
607 /* push a pointer sized constant */
608 static void vpushs(addr_t v)
610 CValue cval;
611 cval.i = v;
612 vsetc(&size_type, VT_CONST, &cval);
615 /* push arbitrary 64bit constant */
616 ST_FUNC void vpush64(int ty, unsigned long long v)
618 CValue cval;
619 CType ctype;
620 ctype.t = ty;
621 ctype.ref = NULL;
622 cval.i = v;
623 vsetc(&ctype, VT_CONST, &cval);
626 /* push long long constant */
627 static inline void vpushll(long long v)
629 vpush64(VT_LLONG, v);
632 /* push a symbol value of TYPE */
633 static inline void vpushsym(CType *type, Sym *sym)
635 CValue cval;
636 cval.i = 0;
637 vsetc(type, VT_CONST | VT_SYM, &cval);
638 vtop->sym = sym;
641 /* Return a static symbol pointing to a section */
642 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
644 int v;
645 Sym *sym;
647 v = anon_sym++;
648 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
649 sym->type.ref = type->ref;
650 sym->r = VT_CONST | VT_SYM;
651 put_extern_sym(sym, sec, offset, size);
652 return sym;
655 /* push a reference to a section offset by adding a dummy symbol */
656 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
658 vpushsym(type, get_sym_ref(type, sec, offset, size));
661 /* define a new external reference to a symbol 'v' of type 'u' */
662 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
664 Sym *s;
666 s = sym_find(v);
667 if (!s) {
668 /* push forward reference */
669 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
670 s->type.ref = type->ref;
671 s->r = r | VT_CONST | VT_SYM;
673 return s;
676 /* define a new external reference to a symbol 'v' */
677 static Sym *external_sym(int v, CType *type, int r)
679 Sym *s;
681 s = sym_find(v);
682 if (!s) {
683 /* push forward reference */
684 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
685 s->type.t |= VT_EXTERN;
686 } else if (s->type.ref == func_old_type.ref) {
687 s->type.ref = type->ref;
688 s->r = r | VT_CONST | VT_SYM;
689 s->type.t |= VT_EXTERN;
690 } else if (!is_compatible_types(&s->type, type)) {
691 tcc_error("incompatible types for redefinition of '%s'",
692 get_tok_str(v, NULL));
694 /* Merge some storage attributes. */
695 if (type->t & VT_WEAK)
696 weaken_symbol(s);
698 if (type->t & VT_VIS_MASK)
699 apply_visibility(s, type);
701 return s;
704 /* push a reference to global symbol v */
705 ST_FUNC void vpush_global_sym(CType *type, int v)
707 vpushsym(type, external_global_sym(v, type, 0));
710 ST_FUNC void vset(CType *type, int r, long v)
712 CValue cval;
714 cval.i = v;
715 vsetc(type, r, &cval);
718 static void vseti(int r, int v)
720 CType type;
721 type.t = VT_INT;
722 type.ref = 0;
723 vset(&type, r, v);
726 ST_FUNC void vswap(void)
728 SValue tmp;
729 /* cannot let cpu flags if other instruction are generated. Also
730 avoid leaving VT_JMP anywhere except on the top of the stack
731 because it would complicate the code generator. */
732 if (vtop >= vstack) {
733 int v = vtop->r & VT_VALMASK;
734 if (v == VT_CMP || (v & ~1) == VT_JMP)
735 gv(RC_INT);
737 tmp = vtop[0];
738 vtop[0] = vtop[-1];
739 vtop[-1] = tmp;
741 /* XXX: +2% overall speed possible with optimized memswap
743 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
747 ST_FUNC void vpushv(SValue *v)
749 if (vtop >= vstack + (VSTACK_SIZE - 1))
750 tcc_error("memory full (vstack)");
751 vtop++;
752 *vtop = *v;
755 static void vdup(void)
757 vpushv(vtop);
760 /* save registers up to (vtop - n) stack entry */
761 ST_FUNC void save_regs(int n)
763 SValue *p, *p1;
764 for(p = vstack, p1 = vtop - n; p <= p1; p++)
765 save_reg(p->r);
768 /* save r to the memory stack, and mark it as being free */
769 ST_FUNC void save_reg(int r)
771 save_reg_upstack(r, 0);
774 /* save r to the memory stack, and mark it as being free,
775 if seen up to (vtop - n) stack entry */
776 ST_FUNC void save_reg_upstack(int r, int n)
778 int l, saved, size, align;
779 SValue *p, *p1, sv;
780 CType *type;
782 if ((r &= VT_VALMASK) >= VT_CONST)
783 return;
784 if (nocode_wanted)
785 return;
787 /* modify all stack values */
788 saved = 0;
789 l = 0;
790 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
791 if ((p->r & VT_VALMASK) == r ||
792 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
793 /* must save value on stack if not already done */
794 if (!saved) {
795 /* NOTE: must reload 'r' because r might be equal to r2 */
796 r = p->r & VT_VALMASK;
797 /* store register in the stack */
798 type = &p->type;
799 if ((p->r & VT_LVAL) ||
800 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
801 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
802 type = &char_pointer_type;
803 #else
804 type = &int_type;
805 #endif
806 size = type_size(type, &align);
807 loc = (loc - size) & -align;
808 sv.type.t = type->t;
809 sv.r = VT_LOCAL | VT_LVAL;
810 sv.c.i = loc;
811 store(r, &sv);
812 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
813 /* x86 specific: need to pop fp register ST0 if saved */
814 if (r == TREG_ST0) {
815 o(0xd8dd); /* fstp %st(0) */
817 #endif
818 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
819 /* special long long case */
820 if ((type->t & VT_BTYPE) == VT_LLONG) {
821 sv.c.i += 4;
822 store(p->r2, &sv);
824 #endif
825 l = loc;
826 saved = 1;
828 /* mark that stack entry as being saved on the stack */
829 if (p->r & VT_LVAL) {
830 /* also clear the bounded flag because the
831 relocation address of the function was stored in
832 p->c.i */
833 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
834 } else {
835 p->r = lvalue_type(p->type.t) | VT_LOCAL;
837 p->r2 = VT_CONST;
838 p->c.i = l;
843 #ifdef TCC_TARGET_ARM
844 /* find a register of class 'rc2' with at most one reference on stack.
845 * If none, call get_reg(rc) */
846 ST_FUNC int get_reg_ex(int rc, int rc2)
848 int r;
849 SValue *p;
851 for(r=0;r<NB_REGS;r++) {
852 if (reg_classes[r] & rc2) {
853 int n;
854 n=0;
855 for(p = vstack; p <= vtop; p++) {
856 if ((p->r & VT_VALMASK) == r ||
857 (p->r2 & VT_VALMASK) == r)
858 n++;
860 if (n <= 1)
861 return r;
864 return get_reg(rc);
866 #endif
868 /* find a free register of class 'rc'. If none, save one register */
869 ST_FUNC int get_reg(int rc)
871 int r;
872 SValue *p;
874 /* find a free register */
875 for(r=0;r<NB_REGS;r++) {
876 if (reg_classes[r] & rc) {
877 if (nocode_wanted)
878 return r;
879 for(p=vstack;p<=vtop;p++) {
880 if ((p->r & VT_VALMASK) == r ||
881 (p->r2 & VT_VALMASK) == r)
882 goto notfound;
884 return r;
886 notfound: ;
889 /* no register left : free the first one on the stack (VERY
890 IMPORTANT to start from the bottom to ensure that we don't
891 spill registers used in gen_opi()) */
892 for(p=vstack;p<=vtop;p++) {
893 /* look at second register (if long long) */
894 r = p->r2 & VT_VALMASK;
895 if (r < VT_CONST && (reg_classes[r] & rc))
896 goto save_found;
897 r = p->r & VT_VALMASK;
898 if (r < VT_CONST && (reg_classes[r] & rc)) {
899 save_found:
900 save_reg(r);
901 return r;
904 /* Should never comes here */
905 return -1;
908 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
909 if needed */
910 static void move_reg(int r, int s, int t)
912 SValue sv;
914 if (r != s) {
915 save_reg(r);
916 sv.type.t = t;
917 sv.type.ref = NULL;
918 sv.r = s;
919 sv.c.i = 0;
920 load(r, &sv);
924 /* get address of vtop (vtop MUST BE an lvalue) */
925 ST_FUNC void gaddrof(void)
927 if (vtop->r & VT_REF)
928 gv(RC_INT);
929 vtop->r &= ~VT_LVAL;
930 /* tricky: if saved lvalue, then we can go back to lvalue */
931 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
932 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
937 #ifdef CONFIG_TCC_BCHECK
938 /* generate lvalue bound code */
939 static void gbound(void)
941 int lval_type;
942 CType type1;
944 vtop->r &= ~VT_MUSTBOUND;
945 /* if lvalue, then use checking code before dereferencing */
946 if (vtop->r & VT_LVAL) {
947 /* if not VT_BOUNDED value, then make one */
948 if (!(vtop->r & VT_BOUNDED)) {
949 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
950 /* must save type because we must set it to int to get pointer */
951 type1 = vtop->type;
952 vtop->type.t = VT_PTR;
953 gaddrof();
954 vpushi(0);
955 gen_bounded_ptr_add();
956 vtop->r |= lval_type;
957 vtop->type = type1;
959 /* then check for dereferencing */
960 gen_bounded_ptr_deref();
963 #endif
965 /* store vtop a register belonging to class 'rc'. lvalues are
966 converted to values. Cannot be used if cannot be converted to
967 register value (such as structures). */
968 ST_FUNC int gv(int rc)
970 int r, bit_pos, bit_size, size, align, i;
971 int rc2;
973 /* NOTE: get_reg can modify vstack[] */
974 if (vtop->type.t & VT_BITFIELD) {
975 CType type;
976 int bits = 32;
977 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
978 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
979 /* remove bit field info to avoid loops */
980 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
981 /* cast to int to propagate signedness in following ops */
982 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
983 type.t = VT_LLONG;
984 bits = 64;
985 } else
986 type.t = VT_INT;
987 if((vtop->type.t & VT_UNSIGNED) ||
988 (vtop->type.t & VT_BTYPE) == VT_BOOL)
989 type.t |= VT_UNSIGNED;
990 gen_cast(&type);
991 /* generate shifts */
992 vpushi(bits - (bit_pos + bit_size));
993 gen_op(TOK_SHL);
994 vpushi(bits - bit_size);
995 /* NOTE: transformed to SHR if unsigned */
996 gen_op(TOK_SAR);
997 r = gv(rc);
998 } else {
999 if (is_float(vtop->type.t) &&
1000 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1001 Sym *sym;
1002 int *ptr;
1003 unsigned long offset;
1004 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1005 CValue check;
1006 #endif
1008 /* XXX: unify with initializers handling ? */
1009 /* CPUs usually cannot use float constants, so we store them
1010 generically in data segment */
1011 size = type_size(&vtop->type, &align);
1012 offset = (data_section->data_offset + align - 1) & -align;
1013 data_section->data_offset = offset;
1014 /* XXX: not portable yet */
1015 #if defined(__i386__) || defined(__x86_64__)
1016 /* Zero pad x87 tenbyte long doubles */
1017 if (size == LDOUBLE_SIZE) {
1018 vtop->c.tab[2] &= 0xffff;
1019 #if LDOUBLE_SIZE == 16
1020 vtop->c.tab[3] = 0;
1021 #endif
1023 #endif
1024 ptr = section_ptr_add(data_section, size);
1025 size = size >> 2;
1026 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1027 check.d = 1;
1028 if(check.tab[0])
1029 for(i=0;i<size;i++)
1030 ptr[i] = vtop->c.tab[size-1-i];
1031 else
1032 #endif
1033 for(i=0;i<size;i++)
1034 ptr[i] = vtop->c.tab[i];
1035 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1036 vtop->r |= VT_LVAL | VT_SYM;
1037 vtop->sym = sym;
1038 vtop->c.i = 0;
1040 #ifdef CONFIG_TCC_BCHECK
1041 if (vtop->r & VT_MUSTBOUND)
1042 gbound();
1043 #endif
1045 r = vtop->r & VT_VALMASK;
1046 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1047 #ifndef TCC_TARGET_ARM64
1048 if (rc == RC_IRET)
1049 rc2 = RC_LRET;
1050 #ifdef TCC_TARGET_X86_64
1051 else if (rc == RC_FRET)
1052 rc2 = RC_QRET;
1053 #endif
1054 #endif
1056 /* need to reload if:
1057 - constant
1058 - lvalue (need to dereference pointer)
1059 - already a register, but not in the right class */
1060 if (r >= VT_CONST
1061 || (vtop->r & VT_LVAL)
1062 || !(reg_classes[r] & rc)
1063 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1064 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1065 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1066 #else
1067 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1068 #endif
1071 r = get_reg(rc);
1072 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1073 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1074 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1075 #else
1076 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1077 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1078 unsigned long long ll;
1079 #endif
1080 int r2, original_type;
1081 original_type = vtop->type.t;
1082 /* two register type load : expand to two words
1083 temporarily */
1084 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1085 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1086 /* load constant */
1087 ll = vtop->c.i;
1088 vtop->c.i = ll; /* first word */
1089 load(r, vtop);
1090 vtop->r = r; /* save register value */
1091 vpushi(ll >> 32); /* second word */
1092 } else
1093 #endif
1094 if (vtop->r & VT_LVAL) {
1095 /* We do not want to modifier the long long
1096 pointer here, so the safest (and less
1097 efficient) is to save all the other registers
1098 in the stack. XXX: totally inefficient. */
1099 #if 0
1100 save_regs(1);
1101 #else
1102 /* lvalue_save: save only if used further down the stack */
1103 save_reg_upstack(vtop->r, 1);
1104 #endif
1105 /* load from memory */
1106 vtop->type.t = load_type;
1107 load(r, vtop);
1108 vdup();
1109 vtop[-1].r = r; /* save register value */
1110 /* increment pointer to get second word */
1111 vtop->type.t = addr_type;
1112 gaddrof();
1113 vpushi(load_size);
1114 gen_op('+');
1115 vtop->r |= VT_LVAL;
1116 vtop->type.t = load_type;
1117 } else {
1118 /* move registers */
1119 load(r, vtop);
1120 vdup();
1121 vtop[-1].r = r; /* save register value */
1122 vtop->r = vtop[-1].r2;
1124 /* Allocate second register. Here we rely on the fact that
1125 get_reg() tries first to free r2 of an SValue. */
1126 r2 = get_reg(rc2);
1127 load(r2, vtop);
1128 vpop();
1129 /* write second register */
1130 vtop->r2 = r2;
1131 vtop->type.t = original_type;
1132 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1133 int t1, t;
1134 /* lvalue of scalar type : need to use lvalue type
1135 because of possible cast */
1136 t = vtop->type.t;
1137 t1 = t;
1138 /* compute memory access type */
1139 if (vtop->r & VT_REF)
1140 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1141 t = VT_PTR;
1142 #else
1143 t = VT_INT;
1144 #endif
1145 else if (vtop->r & VT_LVAL_BYTE)
1146 t = VT_BYTE;
1147 else if (vtop->r & VT_LVAL_SHORT)
1148 t = VT_SHORT;
1149 if (vtop->r & VT_LVAL_UNSIGNED)
1150 t |= VT_UNSIGNED;
1151 vtop->type.t = t;
1152 load(r, vtop);
1153 /* restore wanted type */
1154 vtop->type.t = t1;
1155 } else {
1156 /* one register type load */
1157 load(r, vtop);
1160 vtop->r = r;
1161 #ifdef TCC_TARGET_C67
1162 /* uses register pairs for doubles */
1163 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1164 vtop->r2 = r+1;
1165 #endif
1167 return r;
1170 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1171 ST_FUNC void gv2(int rc1, int rc2)
1173 int v;
1175 /* generate more generic register first. But VT_JMP or VT_CMP
1176 values must be generated first in all cases to avoid possible
1177 reload errors */
1178 v = vtop[0].r & VT_VALMASK;
1179 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1180 vswap();
1181 gv(rc1);
1182 vswap();
1183 gv(rc2);
1184 /* test if reload is needed for first register */
1185 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1186 vswap();
1187 gv(rc1);
1188 vswap();
1190 } else {
1191 gv(rc2);
1192 vswap();
1193 gv(rc1);
1194 vswap();
1195 /* test if reload is needed for first register */
1196 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1197 gv(rc2);
1202 #ifndef TCC_TARGET_ARM64
1203 /* wrapper around RC_FRET to return a register by type */
1204 static int rc_fret(int t)
1206 #ifdef TCC_TARGET_X86_64
1207 if (t == VT_LDOUBLE) {
1208 return RC_ST0;
1210 #endif
1211 return RC_FRET;
1213 #endif
1215 /* wrapper around REG_FRET to return a register by type */
1216 static int reg_fret(int t)
1218 #ifdef TCC_TARGET_X86_64
1219 if (t == VT_LDOUBLE) {
1220 return TREG_ST0;
1222 #endif
1223 return REG_FRET;
1226 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1227 /* expand 64bit on stack in two ints */
1228 static void lexpand(void)
1230 int u, v;
1231 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1232 v = vtop->r & (VT_VALMASK | VT_LVAL);
1233 if (v == VT_CONST) {
1234 vdup();
1235 vtop[0].c.i >>= 32;
1236 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1237 vdup();
1238 vtop[0].c.i += 4;
1239 } else {
1240 gv(RC_INT);
1241 vdup();
1242 vtop[0].r = vtop[-1].r2;
1243 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1245 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1247 #endif
1249 #ifdef TCC_TARGET_ARM
1250 /* expand long long on stack */
1251 ST_FUNC void lexpand_nr(void)
1253 int u,v;
1255 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1256 vdup();
1257 vtop->r2 = VT_CONST;
1258 vtop->type.t = VT_INT | u;
1259 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1260 if (v == VT_CONST) {
1261 vtop[-1].c.i = vtop->c.i;
1262 vtop->c.i = vtop->c.i >> 32;
1263 vtop->r = VT_CONST;
1264 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1265 vtop->c.i += 4;
1266 vtop->r = vtop[-1].r;
1267 } else if (v > VT_CONST) {
1268 vtop--;
1269 lexpand();
1270 } else
1271 vtop->r = vtop[-1].r2;
1272 vtop[-1].r2 = VT_CONST;
1273 vtop[-1].type.t = VT_INT | u;
1275 #endif
1277 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1278 /* build a long long from two ints */
1279 static void lbuild(int t)
1281 gv2(RC_INT, RC_INT);
1282 vtop[-1].r2 = vtop[0].r;
1283 vtop[-1].type.t = t;
1284 vpop();
1286 #endif
1288 /* rotate n first stack elements to the bottom
1289 I1 ... In -> I2 ... In I1 [top is right]
1291 ST_FUNC void vrotb(int n)
1293 int i;
1294 SValue tmp;
1296 tmp = vtop[-n + 1];
1297 for(i=-n+1;i!=0;i++)
1298 vtop[i] = vtop[i+1];
1299 vtop[0] = tmp;
1302 /* rotate the n elements before entry e towards the top
1303 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1305 ST_FUNC void vrote(SValue *e, int n)
1307 int i;
1308 SValue tmp;
1310 tmp = *e;
1311 for(i = 0;i < n - 1; i++)
1312 e[-i] = e[-i - 1];
1313 e[-n + 1] = tmp;
1316 /* rotate n first stack elements to the top
1317 I1 ... In -> In I1 ... I(n-1) [top is right]
1319 ST_FUNC void vrott(int n)
1321 vrote(vtop, n);
1324 /* pop stack value */
1325 ST_FUNC void vpop(void)
1327 int v;
1328 v = vtop->r & VT_VALMASK;
1329 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1330 /* for x86, we need to pop the FP stack */
1331 if (v == TREG_ST0) {
1332 o(0xd8dd); /* fstp %st(0) */
1333 } else
1334 #endif
1335 if (v == VT_JMP || v == VT_JMPI) {
1336 /* need to put correct jump if && or || without test */
1337 gsym(vtop->c.i);
1339 vtop--;
1342 /* convert stack entry to register and duplicate its value in another
1343 register */
1344 static void gv_dup(void)
1346 int rc, t, r, r1;
1347 SValue sv;
1349 t = vtop->type.t;
1350 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1351 if ((t & VT_BTYPE) == VT_LLONG) {
1352 lexpand();
1353 gv_dup();
1354 vswap();
1355 vrotb(3);
1356 gv_dup();
1357 vrotb(4);
1358 /* stack: H L L1 H1 */
1359 lbuild(t);
1360 vrotb(3);
1361 vrotb(3);
1362 vswap();
1363 lbuild(t);
1364 vswap();
1365 } else
1366 #endif
1368 /* duplicate value */
1369 rc = RC_INT;
1370 sv.type.t = VT_INT;
1371 if (is_float(t)) {
1372 rc = RC_FLOAT;
1373 #ifdef TCC_TARGET_X86_64
1374 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1375 rc = RC_ST0;
1377 #endif
1378 sv.type.t = t;
1380 r = gv(rc);
1381 r1 = get_reg(rc);
1382 sv.r = r;
1383 sv.c.i = 0;
1384 load(r1, &sv); /* move r to r1 */
1385 vdup();
1386 /* duplicates value */
1387 if (r != r1)
1388 vtop->r = r1;
1392 /* Generate value test
1394 * Generate a test for any value (jump, comparison and integers) */
1395 ST_FUNC int gvtst(int inv, int t)
1397 int v = vtop->r & VT_VALMASK;
1398 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1399 vpushi(0);
1400 gen_op(TOK_NE);
1402 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1403 /* constant jmp optimization */
1404 if ((vtop->c.i != 0) != inv)
1405 t = gjmp(t);
1406 vtop--;
1407 return t;
1409 return gtst(inv, t);
1412 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1413 /* generate CPU independent (unsigned) long long operations */
1414 static void gen_opl(int op)
1416 int t, a, b, op1, c, i;
1417 int func;
1418 unsigned short reg_iret = REG_IRET;
1419 unsigned short reg_lret = REG_LRET;
1420 SValue tmp;
1422 switch(op) {
1423 case '/':
1424 case TOK_PDIV:
1425 func = TOK___divdi3;
1426 goto gen_func;
1427 case TOK_UDIV:
1428 func = TOK___udivdi3;
1429 goto gen_func;
1430 case '%':
1431 func = TOK___moddi3;
1432 goto gen_mod_func;
1433 case TOK_UMOD:
1434 func = TOK___umoddi3;
1435 gen_mod_func:
1436 #ifdef TCC_ARM_EABI
1437 reg_iret = TREG_R2;
1438 reg_lret = TREG_R3;
1439 #endif
1440 gen_func:
1441 /* call generic long long function */
1442 vpush_global_sym(&func_old_type, func);
1443 vrott(3);
1444 gfunc_call(2);
1445 vpushi(0);
1446 vtop->r = reg_iret;
1447 vtop->r2 = reg_lret;
1448 break;
1449 case '^':
1450 case '&':
1451 case '|':
1452 case '*':
1453 case '+':
1454 case '-':
1455 //pv("gen_opl A",0,2);
1456 t = vtop->type.t;
1457 vswap();
1458 lexpand();
1459 vrotb(3);
1460 lexpand();
1461 /* stack: L1 H1 L2 H2 */
1462 tmp = vtop[0];
1463 vtop[0] = vtop[-3];
1464 vtop[-3] = tmp;
1465 tmp = vtop[-2];
1466 vtop[-2] = vtop[-3];
1467 vtop[-3] = tmp;
1468 vswap();
1469 /* stack: H1 H2 L1 L2 */
1470 //pv("gen_opl B",0,4);
1471 if (op == '*') {
1472 vpushv(vtop - 1);
1473 vpushv(vtop - 1);
1474 gen_op(TOK_UMULL);
1475 lexpand();
1476 /* stack: H1 H2 L1 L2 ML MH */
1477 for(i=0;i<4;i++)
1478 vrotb(6);
1479 /* stack: ML MH H1 H2 L1 L2 */
1480 tmp = vtop[0];
1481 vtop[0] = vtop[-2];
1482 vtop[-2] = tmp;
1483 /* stack: ML MH H1 L2 H2 L1 */
1484 gen_op('*');
1485 vrotb(3);
1486 vrotb(3);
1487 gen_op('*');
1488 /* stack: ML MH M1 M2 */
1489 gen_op('+');
1490 gen_op('+');
1491 } else if (op == '+' || op == '-') {
1492 /* XXX: add non carry method too (for MIPS or alpha) */
1493 if (op == '+')
1494 op1 = TOK_ADDC1;
1495 else
1496 op1 = TOK_SUBC1;
1497 gen_op(op1);
1498 /* stack: H1 H2 (L1 op L2) */
1499 vrotb(3);
1500 vrotb(3);
1501 gen_op(op1 + 1); /* TOK_xxxC2 */
1502 } else {
1503 gen_op(op);
1504 /* stack: H1 H2 (L1 op L2) */
1505 vrotb(3);
1506 vrotb(3);
1507 /* stack: (L1 op L2) H1 H2 */
1508 gen_op(op);
1509 /* stack: (L1 op L2) (H1 op H2) */
1511 /* stack: L H */
1512 lbuild(t);
1513 break;
1514 case TOK_SAR:
1515 case TOK_SHR:
1516 case TOK_SHL:
1517 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1518 t = vtop[-1].type.t;
1519 vswap();
1520 lexpand();
1521 vrotb(3);
1522 /* stack: L H shift */
1523 c = (int)vtop->c.i;
1524 /* constant: simpler */
1525 /* NOTE: all comments are for SHL. the other cases are
1526 done by swaping words */
1527 vpop();
1528 if (op != TOK_SHL)
1529 vswap();
1530 if (c >= 32) {
1531 /* stack: L H */
1532 vpop();
1533 if (c > 32) {
1534 vpushi(c - 32);
1535 gen_op(op);
1537 if (op != TOK_SAR) {
1538 vpushi(0);
1539 } else {
1540 gv_dup();
1541 vpushi(31);
1542 gen_op(TOK_SAR);
1544 vswap();
1545 } else {
1546 vswap();
1547 gv_dup();
1548 /* stack: H L L */
1549 vpushi(c);
1550 gen_op(op);
1551 vswap();
1552 vpushi(32 - c);
1553 if (op == TOK_SHL)
1554 gen_op(TOK_SHR);
1555 else
1556 gen_op(TOK_SHL);
1557 vrotb(3);
1558 /* stack: L L H */
1559 vpushi(c);
1560 if (op == TOK_SHL)
1561 gen_op(TOK_SHL);
1562 else
1563 gen_op(TOK_SHR);
1564 gen_op('|');
1566 if (op != TOK_SHL)
1567 vswap();
1568 lbuild(t);
1569 } else {
1570 /* XXX: should provide a faster fallback on x86 ? */
1571 switch(op) {
1572 case TOK_SAR:
1573 func = TOK___ashrdi3;
1574 goto gen_func;
1575 case TOK_SHR:
1576 func = TOK___lshrdi3;
1577 goto gen_func;
1578 case TOK_SHL:
1579 func = TOK___ashldi3;
1580 goto gen_func;
1583 break;
1584 default:
1585 /* compare operations */
1586 t = vtop->type.t;
1587 vswap();
1588 lexpand();
1589 vrotb(3);
1590 lexpand();
1591 /* stack: L1 H1 L2 H2 */
1592 tmp = vtop[-1];
1593 vtop[-1] = vtop[-2];
1594 vtop[-2] = tmp;
1595 /* stack: L1 L2 H1 H2 */
1596 /* compare high */
1597 op1 = op;
1598 /* when values are equal, we need to compare low words. since
1599 the jump is inverted, we invert the test too. */
1600 if (op1 == TOK_LT)
1601 op1 = TOK_LE;
1602 else if (op1 == TOK_GT)
1603 op1 = TOK_GE;
1604 else if (op1 == TOK_ULT)
1605 op1 = TOK_ULE;
1606 else if (op1 == TOK_UGT)
1607 op1 = TOK_UGE;
1608 a = 0;
1609 b = 0;
1610 gen_op(op1);
1611 if (op1 != TOK_NE) {
1612 a = gvtst(1, 0);
1614 if (op != TOK_EQ) {
1615 /* generate non equal test */
1616 /* XXX: NOT PORTABLE yet */
1617 if (a == 0) {
1618 b = gvtst(0, 0);
1619 } else {
1620 #if defined(TCC_TARGET_I386)
1621 b = gjmp2(0x850f, 0);
1622 #elif defined(TCC_TARGET_ARM)
1623 b = ind;
1624 o(0x1A000000 | encbranch(ind, 0, 1));
1625 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1626 tcc_error("not implemented");
1627 #else
1628 #error not supported
1629 #endif
1632 /* compare low. Always unsigned */
1633 op1 = op;
1634 if (op1 == TOK_LT)
1635 op1 = TOK_ULT;
1636 else if (op1 == TOK_LE)
1637 op1 = TOK_ULE;
1638 else if (op1 == TOK_GT)
1639 op1 = TOK_UGT;
1640 else if (op1 == TOK_GE)
1641 op1 = TOK_UGE;
1642 gen_op(op1);
1643 a = gvtst(1, a);
1644 gsym(b);
1645 vseti(VT_JMPI, a);
1646 break;
1649 #endif
1651 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1653 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1654 return (a ^ b) >> 63 ? -x : x;
1657 static int gen_opic_lt(uint64_t a, uint64_t b)
1659 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1662 /* handle integer constant optimizations and various machine
1663 independent opt */
1664 static void gen_opic(int op)
1666 SValue *v1 = vtop - 1;
1667 SValue *v2 = vtop;
1668 int t1 = v1->type.t & VT_BTYPE;
1669 int t2 = v2->type.t & VT_BTYPE;
1670 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1671 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1672 uint64_t l1 = c1 ? v1->c.i : 0;
1673 uint64_t l2 = c2 ? v2->c.i : 0;
1674 int shm = (t1 == VT_LLONG) ? 63 : 31;
1676 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1677 l1 = ((uint32_t)l1 |
1678 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1679 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1680 l2 = ((uint32_t)l2 |
1681 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1683 if (c1 && c2) {
1684 switch(op) {
1685 case '+': l1 += l2; break;
1686 case '-': l1 -= l2; break;
1687 case '&': l1 &= l2; break;
1688 case '^': l1 ^= l2; break;
1689 case '|': l1 |= l2; break;
1690 case '*': l1 *= l2; break;
1692 case TOK_PDIV:
1693 case '/':
1694 case '%':
1695 case TOK_UDIV:
1696 case TOK_UMOD:
1697 /* if division by zero, generate explicit division */
1698 if (l2 == 0) {
1699 if (const_wanted)
1700 tcc_error("division by zero in constant");
1701 goto general_case;
1703 switch(op) {
1704 default: l1 = gen_opic_sdiv(l1, l2); break;
1705 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1706 case TOK_UDIV: l1 = l1 / l2; break;
1707 case TOK_UMOD: l1 = l1 % l2; break;
1709 break;
1710 case TOK_SHL: l1 <<= (l2 & shm); break;
1711 case TOK_SHR: l1 >>= (l2 & shm); break;
1712 case TOK_SAR:
1713 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1714 break;
1715 /* tests */
1716 case TOK_ULT: l1 = l1 < l2; break;
1717 case TOK_UGE: l1 = l1 >= l2; break;
1718 case TOK_EQ: l1 = l1 == l2; break;
1719 case TOK_NE: l1 = l1 != l2; break;
1720 case TOK_ULE: l1 = l1 <= l2; break;
1721 case TOK_UGT: l1 = l1 > l2; break;
1722 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1723 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1724 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1725 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1726 /* logical */
1727 case TOK_LAND: l1 = l1 && l2; break;
1728 case TOK_LOR: l1 = l1 || l2; break;
1729 default:
1730 goto general_case;
1732 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1733 l1 = ((uint32_t)l1 |
1734 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1735 v1->c.i = l1;
1736 vtop--;
1737 } else {
1738 /* if commutative ops, put c2 as constant */
1739 if (c1 && (op == '+' || op == '&' || op == '^' ||
1740 op == '|' || op == '*')) {
1741 vswap();
1742 c2 = c1; //c = c1, c1 = c2, c2 = c;
1743 l2 = l1; //l = l1, l1 = l2, l2 = l;
1745 if (!const_wanted &&
1746 c1 && ((l1 == 0 &&
1747 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1748 (l1 == -1 && op == TOK_SAR))) {
1749 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1750 vtop--;
1751 } else if (!const_wanted &&
1752 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1753 (l2 == -1 && op == '|') ||
1754 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1755 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1756 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1757 if (l2 == 1)
1758 vtop->c.i = 0;
1759 vswap();
1760 vtop--;
1761 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1762 op == TOK_PDIV) &&
1763 l2 == 1) ||
1764 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1765 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1766 l2 == 0) ||
1767 (op == '&' &&
1768 l2 == -1))) {
1769 /* filter out NOP operations like x*1, x-0, x&-1... */
1770 vtop--;
1771 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1772 /* try to use shifts instead of muls or divs */
1773 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1774 int n = -1;
1775 while (l2) {
1776 l2 >>= 1;
1777 n++;
1779 vtop->c.i = n;
1780 if (op == '*')
1781 op = TOK_SHL;
1782 else if (op == TOK_PDIV)
1783 op = TOK_SAR;
1784 else
1785 op = TOK_SHR;
1787 goto general_case;
1788 } else if (c2 && (op == '+' || op == '-') &&
1789 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1790 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1791 /* symbol + constant case */
1792 if (op == '-')
1793 l2 = -l2;
1794 l2 += vtop[-1].c.i;
1795 /* The backends can't always deal with addends to symbols
1796 larger than +-1<<31. Don't construct such. */
1797 if ((int)l2 != l2)
1798 goto general_case;
1799 vtop--;
1800 vtop->c.i = l2;
1801 } else {
1802 general_case:
1803 /* call low level op generator */
1804 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1805 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1806 gen_opl(op);
1807 else
1808 gen_opi(op);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 gen_opf(op);
1873 static int pointed_size(CType *type)
1875 int align;
1876 return type_size(pointed_type(type), &align);
1879 static void vla_runtime_pointed_size(CType *type)
1881 int align;
1882 vla_runtime_type_size(pointed_type(type), &align);
1885 static inline int is_null_pointer(SValue *p)
1887 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1888 return 0;
1889 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1890 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1891 ((p->type.t & VT_BTYPE) == VT_PTR &&
1892 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1895 static inline int is_integer_btype(int bt)
1897 return (bt == VT_BYTE || bt == VT_SHORT ||
1898 bt == VT_INT || bt == VT_LLONG);
1901 /* check types for comparison or subtraction of pointers */
1902 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1904 CType *type1, *type2, tmp_type1, tmp_type2;
1905 int bt1, bt2;
1907 /* null pointers are accepted for all comparisons as gcc */
1908 if (is_null_pointer(p1) || is_null_pointer(p2))
1909 return;
1910 type1 = &p1->type;
1911 type2 = &p2->type;
1912 bt1 = type1->t & VT_BTYPE;
1913 bt2 = type2->t & VT_BTYPE;
1914 /* accept comparison between pointer and integer with a warning */
1915 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1916 if (op != TOK_LOR && op != TOK_LAND )
1917 tcc_warning("comparison between pointer and integer");
1918 return;
1921 /* both must be pointers or implicit function pointers */
1922 if (bt1 == VT_PTR) {
1923 type1 = pointed_type(type1);
1924 } else if (bt1 != VT_FUNC)
1925 goto invalid_operands;
1927 if (bt2 == VT_PTR) {
1928 type2 = pointed_type(type2);
1929 } else if (bt2 != VT_FUNC) {
1930 invalid_operands:
1931 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1933 if ((type1->t & VT_BTYPE) == VT_VOID ||
1934 (type2->t & VT_BTYPE) == VT_VOID)
1935 return;
1936 tmp_type1 = *type1;
1937 tmp_type2 = *type2;
1938 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1939 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1940 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1941 /* gcc-like error if '-' is used */
1942 if (op == '-')
1943 goto invalid_operands;
1944 else
1945 tcc_warning("comparison of distinct pointer types lacks a cast");
1949 /* generic gen_op: handles types problems */
1950 ST_FUNC void gen_op(int op)
1952 int u, t1, t2, bt1, bt2, t;
1953 CType type1;
1955 redo:
1956 t1 = vtop[-1].type.t;
1957 t2 = vtop[0].type.t;
1958 bt1 = t1 & VT_BTYPE;
1959 bt2 = t2 & VT_BTYPE;
1961 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1962 tcc_error("operation on a struct");
1963 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1964 if (bt2 == VT_FUNC) {
1965 mk_pointer(&vtop->type);
1966 gaddrof();
1968 if (bt1 == VT_FUNC) {
1969 vswap();
1970 mk_pointer(&vtop->type);
1971 gaddrof();
1972 vswap();
1974 goto redo;
1975 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1976 /* at least one operand is a pointer */
1977 /* relationnal op: must be both pointers */
1978 if (op >= TOK_ULT && op <= TOK_LOR) {
1979 check_comparison_pointer_types(vtop - 1, vtop, op);
1980 /* pointers are handled are unsigned */
1981 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1982 t = VT_LLONG | VT_UNSIGNED;
1983 #else
1984 t = VT_INT | VT_UNSIGNED;
1985 #endif
1986 goto std_op;
1988 /* if both pointers, then it must be the '-' op */
1989 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1990 if (op != '-')
1991 tcc_error("cannot use pointers here");
1992 check_comparison_pointer_types(vtop - 1, vtop, op);
1993 /* XXX: check that types are compatible */
1994 if (vtop[-1].type.t & VT_VLA) {
1995 vla_runtime_pointed_size(&vtop[-1].type);
1996 } else {
1997 vpushi(pointed_size(&vtop[-1].type));
1999 vrott(3);
2000 gen_opic(op);
2001 /* set to integer type */
2002 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2003 vtop->type.t = VT_LLONG;
2004 #else
2005 vtop->type.t = VT_INT;
2006 #endif
2007 vswap();
2008 gen_op(TOK_PDIV);
2009 } else {
2010 /* exactly one pointer : must be '+' or '-'. */
2011 if (op != '-' && op != '+')
2012 tcc_error("cannot use pointers here");
2013 /* Put pointer as first operand */
2014 if (bt2 == VT_PTR) {
2015 vswap();
2016 swap(&t1, &t2);
2018 #if PTR_SIZE == 4
2019 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2020 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2021 gen_cast(&int_type);
2022 #endif
2023 type1 = vtop[-1].type;
2024 type1.t &= ~VT_ARRAY;
2025 if (vtop[-1].type.t & VT_VLA)
2026 vla_runtime_pointed_size(&vtop[-1].type);
2027 else {
2028 u = pointed_size(&vtop[-1].type);
2029 if (u < 0)
2030 tcc_error("unknown array element size");
2031 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2032 vpushll(u);
2033 #else
2034 /* XXX: cast to int ? (long long case) */
2035 vpushi(u);
2036 #endif
2038 gen_op('*');
2039 #if 0
2040 /* #ifdef CONFIG_TCC_BCHECK
2041 The main reason to removing this code:
2042 #include <stdio.h>
2043 int main ()
2045 int v[10];
2046 int i = 10;
2047 int j = 9;
2048 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2049 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2051 When this code is on. then the output looks like
2052 v+i-j = 0xfffffffe
2053 v+(i-j) = 0xbff84000
2055 /* if evaluating constant expression, no code should be
2056 generated, so no bound check */
2057 if (tcc_state->do_bounds_check && !const_wanted) {
2058 /* if bounded pointers, we generate a special code to
2059 test bounds */
2060 if (op == '-') {
2061 vpushi(0);
2062 vswap();
2063 gen_op('-');
2065 gen_bounded_ptr_add();
2066 } else
2067 #endif
2069 gen_opic(op);
2071 /* put again type if gen_opic() swaped operands */
2072 vtop->type = type1;
2074 } else if (is_float(bt1) || is_float(bt2)) {
2075 /* compute bigger type and do implicit casts */
2076 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2077 t = VT_LDOUBLE;
2078 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2079 t = VT_DOUBLE;
2080 } else {
2081 t = VT_FLOAT;
2083 /* floats can only be used for a few operations */
2084 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2085 (op < TOK_ULT || op > TOK_GT))
2086 tcc_error("invalid operands for binary operation");
2087 goto std_op;
2088 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2089 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2090 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2091 t |= VT_UNSIGNED;
2092 goto std_op;
2093 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2094 /* cast to biggest op */
2095 t = VT_LLONG;
2096 /* convert to unsigned if it does not fit in a long long */
2097 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2098 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2099 t |= VT_UNSIGNED;
2100 goto std_op;
2101 } else {
2102 /* integer operations */
2103 t = VT_INT;
2104 /* convert to unsigned if it does not fit in an integer */
2105 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2106 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2107 t |= VT_UNSIGNED;
2108 std_op:
2109 /* XXX: currently, some unsigned operations are explicit, so
2110 we modify them here */
2111 if (t & VT_UNSIGNED) {
2112 if (op == TOK_SAR)
2113 op = TOK_SHR;
2114 else if (op == '/')
2115 op = TOK_UDIV;
2116 else if (op == '%')
2117 op = TOK_UMOD;
2118 else if (op == TOK_LT)
2119 op = TOK_ULT;
2120 else if (op == TOK_GT)
2121 op = TOK_UGT;
2122 else if (op == TOK_LE)
2123 op = TOK_ULE;
2124 else if (op == TOK_GE)
2125 op = TOK_UGE;
2127 vswap();
2128 type1.t = t;
2129 gen_cast(&type1);
2130 vswap();
2131 /* special case for shifts and long long: we keep the shift as
2132 an integer */
2133 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2134 type1.t = VT_INT;
2135 gen_cast(&type1);
2136 if (is_float(t))
2137 gen_opif(op);
2138 else
2139 gen_opic(op);
2140 if (op >= TOK_ULT && op <= TOK_GT) {
2141 /* relationnal op: the result is an int */
2142 vtop->type.t = VT_INT;
2143 } else {
2144 vtop->type.t = t;
2147 // Make sure that we have converted to an rvalue:
2148 if (vtop->r & VT_LVAL)
2149 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2152 #ifndef TCC_TARGET_ARM
2153 /* generic itof for unsigned long long case */
2154 static void gen_cvt_itof1(int t)
2156 #ifdef TCC_TARGET_ARM64
2157 gen_cvt_itof(t);
2158 #else
2159 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2160 (VT_LLONG | VT_UNSIGNED)) {
2162 if (t == VT_FLOAT)
2163 vpush_global_sym(&func_old_type, TOK___floatundisf);
2164 #if LDOUBLE_SIZE != 8
2165 else if (t == VT_LDOUBLE)
2166 vpush_global_sym(&func_old_type, TOK___floatundixf);
2167 #endif
2168 else
2169 vpush_global_sym(&func_old_type, TOK___floatundidf);
2170 vrott(2);
2171 gfunc_call(1);
2172 vpushi(0);
2173 vtop->r = reg_fret(t);
2174 } else {
2175 gen_cvt_itof(t);
2177 #endif
2179 #endif
2181 /* generic ftoi for unsigned long long case */
2182 static void gen_cvt_ftoi1(int t)
2184 #ifdef TCC_TARGET_ARM64
2185 gen_cvt_ftoi(t);
2186 #else
2187 int st;
2189 if (t == (VT_LLONG | VT_UNSIGNED)) {
2190 /* not handled natively */
2191 st = vtop->type.t & VT_BTYPE;
2192 if (st == VT_FLOAT)
2193 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2194 #if LDOUBLE_SIZE != 8
2195 else if (st == VT_LDOUBLE)
2196 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2197 #endif
2198 else
2199 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2200 vrott(2);
2201 gfunc_call(1);
2202 vpushi(0);
2203 vtop->r = REG_IRET;
2204 vtop->r2 = REG_LRET;
2205 } else {
2206 gen_cvt_ftoi(t);
2208 #endif
2211 /* force char or short cast */
2212 static void force_charshort_cast(int t)
2214 int bits, dbt;
2215 dbt = t & VT_BTYPE;
2216 /* XXX: add optimization if lvalue : just change type and offset */
2217 if (dbt == VT_BYTE)
2218 bits = 8;
2219 else
2220 bits = 16;
2221 if (t & VT_UNSIGNED) {
2222 vpushi((1 << bits) - 1);
2223 gen_op('&');
2224 } else {
2225 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2226 bits = 64 - bits;
2227 else
2228 bits = 32 - bits;
2229 vpushi(bits);
2230 gen_op(TOK_SHL);
2231 /* result must be signed or the SAR is converted to an SHL
2232 This was not the case when "t" was a signed short
2233 and the last value on the stack was an unsigned int */
2234 vtop->type.t &= ~VT_UNSIGNED;
2235 vpushi(bits);
2236 gen_op(TOK_SAR);
2240 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2241 static void gen_cast(CType *type)
2243 int sbt, dbt, sf, df, c, p;
2245 /* special delayed cast for char/short */
2246 /* XXX: in some cases (multiple cascaded casts), it may still
2247 be incorrect */
2248 if (vtop->r & VT_MUSTCAST) {
2249 vtop->r &= ~VT_MUSTCAST;
2250 force_charshort_cast(vtop->type.t);
2253 /* bitfields first get cast to ints */
2254 if (vtop->type.t & VT_BITFIELD) {
2255 gv(RC_INT);
2258 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2259 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2261 if (sbt != dbt) {
2262 sf = is_float(sbt);
2263 df = is_float(dbt);
2264 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2265 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2266 if (c) {
2267 /* constant case: we can do it now */
2268 /* XXX: in ISOC, cannot do it if error in convert */
2269 if (sbt == VT_FLOAT)
2270 vtop->c.ld = vtop->c.f;
2271 else if (sbt == VT_DOUBLE)
2272 vtop->c.ld = vtop->c.d;
2274 if (df) {
2275 if ((sbt & VT_BTYPE) == VT_LLONG) {
2276 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2277 vtop->c.ld = vtop->c.i;
2278 else
2279 vtop->c.ld = -(long double)-vtop->c.i;
2280 } else if(!sf) {
2281 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2282 vtop->c.ld = (uint32_t)vtop->c.i;
2283 else
2284 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2287 if (dbt == VT_FLOAT)
2288 vtop->c.f = (float)vtop->c.ld;
2289 else if (dbt == VT_DOUBLE)
2290 vtop->c.d = (double)vtop->c.ld;
2291 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2292 vtop->c.i = vtop->c.ld;
2293 } else if (sf && dbt == VT_BOOL) {
2294 vtop->c.i = (vtop->c.ld != 0);
2295 } else {
2296 if(sf)
2297 vtop->c.i = vtop->c.ld;
2298 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2300 else if (sbt & VT_UNSIGNED)
2301 vtop->c.i = (uint32_t)vtop->c.i;
2302 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2303 else if (sbt == VT_PTR)
2305 #endif
2306 else if (sbt != VT_LLONG)
2307 vtop->c.i = ((uint32_t)vtop->c.i |
2308 -(vtop->c.i & 0x80000000));
2310 if (dbt == (VT_LLONG|VT_UNSIGNED))
2312 else if (dbt == VT_BOOL)
2313 vtop->c.i = (vtop->c.i != 0);
2314 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2315 else if (dbt == VT_PTR)
2317 #endif
2318 else if (dbt != VT_LLONG) {
2319 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2320 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2321 0xffffffff);
2322 vtop->c.i &= m;
2323 if (!(dbt & VT_UNSIGNED))
2324 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2327 } else if (p && dbt == VT_BOOL) {
2328 vtop->r = VT_CONST;
2329 vtop->c.i = 1;
2330 } else {
2331 /* non constant case: generate code */
2332 if (sf && df) {
2333 /* convert from fp to fp */
2334 gen_cvt_ftof(dbt);
2335 } else if (df) {
2336 /* convert int to fp */
2337 gen_cvt_itof1(dbt);
2338 } else if (sf) {
2339 /* convert fp to int */
2340 if (dbt == VT_BOOL) {
2341 vpushi(0);
2342 gen_op(TOK_NE);
2343 } else {
2344 /* we handle char/short/etc... with generic code */
2345 if (dbt != (VT_INT | VT_UNSIGNED) &&
2346 dbt != (VT_LLONG | VT_UNSIGNED) &&
2347 dbt != VT_LLONG)
2348 dbt = VT_INT;
2349 gen_cvt_ftoi1(dbt);
2350 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2351 /* additional cast for char/short... */
2352 vtop->type.t = dbt;
2353 gen_cast(type);
2356 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2357 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2358 if ((sbt & VT_BTYPE) != VT_LLONG) {
2359 /* scalar to long long */
2360 /* machine independent conversion */
2361 gv(RC_INT);
2362 /* generate high word */
2363 if (sbt == (VT_INT | VT_UNSIGNED)) {
2364 vpushi(0);
2365 gv(RC_INT);
2366 } else {
2367 if (sbt == VT_PTR) {
2368 /* cast from pointer to int before we apply
2369 shift operation, which pointers don't support*/
2370 gen_cast(&int_type);
2372 gv_dup();
2373 vpushi(31);
2374 gen_op(TOK_SAR);
2376 /* patch second register */
2377 vtop[-1].r2 = vtop->r;
2378 vpop();
2380 #else
2381 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2382 (dbt & VT_BTYPE) == VT_PTR ||
2383 (dbt & VT_BTYPE) == VT_FUNC) {
2384 if ((sbt & VT_BTYPE) != VT_LLONG &&
2385 (sbt & VT_BTYPE) != VT_PTR &&
2386 (sbt & VT_BTYPE) != VT_FUNC) {
2387 /* need to convert from 32bit to 64bit */
2388 gv(RC_INT);
2389 if (sbt != (VT_INT | VT_UNSIGNED)) {
2390 #if defined(TCC_TARGET_ARM64)
2391 gen_cvt_sxtw();
2392 #elif defined(TCC_TARGET_X86_64)
2393 int r = gv(RC_INT);
2394 /* x86_64 specific: movslq */
2395 o(0x6348);
2396 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2397 #else
2398 #error
2399 #endif
2402 #endif
2403 } else if (dbt == VT_BOOL) {
2404 /* scalar to bool */
2405 vpushi(0);
2406 gen_op(TOK_NE);
2407 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2408 (dbt & VT_BTYPE) == VT_SHORT) {
2409 if (sbt == VT_PTR) {
2410 vtop->type.t = VT_INT;
2411 tcc_warning("nonportable conversion from pointer to char/short");
2413 force_charshort_cast(dbt);
2414 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2415 } else if ((dbt & VT_BTYPE) == VT_INT) {
2416 /* scalar to int */
2417 if ((sbt & VT_BTYPE) == VT_LLONG) {
2418 /* from long long: just take low order word */
2419 lexpand();
2420 vpop();
2422 /* if lvalue and single word type, nothing to do because
2423 the lvalue already contains the real type size (see
2424 VT_LVAL_xxx constants) */
2425 #endif
2428 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2429 /* if we are casting between pointer types,
2430 we must update the VT_LVAL_xxx size */
2431 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2432 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2434 vtop->type = *type;
2437 /* return type size as known at compile time. Put alignment at 'a' */
2438 ST_FUNC int type_size(CType *type, int *a)
2440 Sym *s;
2441 int bt;
2443 bt = type->t & VT_BTYPE;
2444 if (bt == VT_STRUCT) {
2445 /* struct/union */
2446 s = type->ref;
2447 *a = s->r;
2448 return s->c;
2449 } else if (bt == VT_PTR) {
2450 if (type->t & VT_ARRAY) {
2451 int ts;
2453 s = type->ref;
2454 ts = type_size(&s->type, a);
2456 if (ts < 0 && s->c < 0)
2457 ts = -ts;
2459 return ts * s->c;
2460 } else {
2461 *a = PTR_SIZE;
2462 return PTR_SIZE;
2464 } else if (bt == VT_LDOUBLE) {
2465 *a = LDOUBLE_ALIGN;
2466 return LDOUBLE_SIZE;
2467 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2468 #ifdef TCC_TARGET_I386
2469 #ifdef TCC_TARGET_PE
2470 *a = 8;
2471 #else
2472 *a = 4;
2473 #endif
2474 #elif defined(TCC_TARGET_ARM)
2475 #ifdef TCC_ARM_EABI
2476 *a = 8;
2477 #else
2478 *a = 4;
2479 #endif
2480 #else
2481 *a = 8;
2482 #endif
2483 return 8;
2484 } else if (bt == VT_INT || bt == VT_FLOAT) {
2485 *a = 4;
2486 return 4;
2487 } else if (bt == VT_SHORT) {
2488 *a = 2;
2489 return 2;
2490 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2491 *a = 8;
2492 return 16;
2493 } else if (bt == VT_ENUM) {
2494 *a = 4;
2495 /* Enums might be incomplete, so don't just return '4' here. */
2496 return type->ref->c;
2497 } else {
2498 /* char, void, function, _Bool */
2499 *a = 1;
2500 return 1;
2504 /* push type size as known at runtime time on top of value stack. Put
2505 alignment at 'a' */
2506 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2508 if (type->t & VT_VLA) {
2509 type_size(&type->ref->type, a);
2510 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2511 } else {
2512 vpushi(type_size(type, a));
2516 static void vla_sp_restore(void) {
2517 if (vlas_in_scope) {
2518 gen_vla_sp_restore(vla_sp_loc);
2522 static void vla_sp_restore_root(void) {
2523 if (vlas_in_scope) {
2524 gen_vla_sp_restore(vla_sp_root_loc);
2528 /* return the pointed type of t */
2529 static inline CType *pointed_type(CType *type)
2531 return &type->ref->type;
2534 /* modify type so that its it is a pointer to type. */
2535 ST_FUNC void mk_pointer(CType *type)
2537 Sym *s;
2538 s = sym_push(SYM_FIELD, type, 0, -1);
2539 type->t = VT_PTR | (type->t & ~VT_TYPE);
2540 type->ref = s;
2543 /* compare function types. OLD functions match any new functions */
2544 static int is_compatible_func(CType *type1, CType *type2)
2546 Sym *s1, *s2;
2548 s1 = type1->ref;
2549 s2 = type2->ref;
2550 if (!is_compatible_types(&s1->type, &s2->type))
2551 return 0;
2552 /* check func_call */
2553 if (s1->a.func_call != s2->a.func_call)
2554 return 0;
2555 /* XXX: not complete */
2556 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2557 return 1;
2558 if (s1->c != s2->c)
2559 return 0;
2560 while (s1 != NULL) {
2561 if (s2 == NULL)
2562 return 0;
2563 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2564 return 0;
2565 s1 = s1->next;
2566 s2 = s2->next;
2568 if (s2)
2569 return 0;
2570 return 1;
2573 /* return true if type1 and type2 are the same. If unqualified is
2574 true, qualifiers on the types are ignored.
2576 - enums are not checked as gcc __builtin_types_compatible_p ()
2578 static int compare_types(CType *type1, CType *type2, int unqualified)
2580 int bt1, t1, t2;
2582 t1 = type1->t & VT_TYPE;
2583 t2 = type2->t & VT_TYPE;
2584 if (unqualified) {
2585 /* strip qualifiers before comparing */
2586 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2587 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2589 /* Default Vs explicit signedness only matters for char */
2590 if ((t1 & VT_BTYPE) != VT_BYTE) {
2591 t1 &= ~VT_DEFSIGN;
2592 t2 &= ~VT_DEFSIGN;
2594 /* An enum is compatible with (unsigned) int. Ideally we would
2595 store the enums signedness in type->ref.a.<some_bit> and
2596 only accept unsigned enums with unsigned int and vice versa.
2597 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2598 from pointer target types, so we can't add it here either. */
2599 if ((t1 & VT_BTYPE) == VT_ENUM) {
2600 t1 = VT_INT;
2601 if (type1->ref->a.unsigned_enum)
2602 t1 |= VT_UNSIGNED;
2604 if ((t2 & VT_BTYPE) == VT_ENUM) {
2605 t2 = VT_INT;
2606 if (type2->ref->a.unsigned_enum)
2607 t2 |= VT_UNSIGNED;
2609 /* XXX: bitfields ? */
2610 if (t1 != t2)
2611 return 0;
2612 /* test more complicated cases */
2613 bt1 = t1 & VT_BTYPE;
2614 if (bt1 == VT_PTR) {
2615 type1 = pointed_type(type1);
2616 type2 = pointed_type(type2);
2617 return is_compatible_types(type1, type2);
2618 } else if (bt1 == VT_STRUCT) {
2619 return (type1->ref == type2->ref);
2620 } else if (bt1 == VT_FUNC) {
2621 return is_compatible_func(type1, type2);
2622 } else {
2623 return 1;
2627 /* return true if type1 and type2 are exactly the same (including
2628 qualifiers).
2630 static int is_compatible_types(CType *type1, CType *type2)
2632 return compare_types(type1,type2,0);
2635 /* return true if type1 and type2 are the same (ignoring qualifiers).
2637 static int is_compatible_parameter_types(CType *type1, CType *type2)
2639 return compare_types(type1,type2,1);
2642 /* print a type. If 'varstr' is not NULL, then the variable is also
2643 printed in the type */
2644 /* XXX: union */
2645 /* XXX: add array and function pointers */
2646 static void type_to_str(char *buf, int buf_size,
2647 CType *type, const char *varstr)
2649 int bt, v, t;
2650 Sym *s, *sa;
2651 char buf1[256];
2652 const char *tstr;
2654 t = type->t & VT_TYPE;
2655 bt = t & VT_BTYPE;
2656 buf[0] = '\0';
2657 if (t & VT_CONSTANT)
2658 pstrcat(buf, buf_size, "const ");
2659 if (t & VT_VOLATILE)
2660 pstrcat(buf, buf_size, "volatile ");
2661 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2662 pstrcat(buf, buf_size, "unsigned ");
2663 else if (t & VT_DEFSIGN)
2664 pstrcat(buf, buf_size, "signed ");
2665 switch(bt) {
2666 case VT_VOID:
2667 tstr = "void";
2668 goto add_tstr;
2669 case VT_BOOL:
2670 tstr = "_Bool";
2671 goto add_tstr;
2672 case VT_BYTE:
2673 tstr = "char";
2674 goto add_tstr;
2675 case VT_SHORT:
2676 tstr = "short";
2677 goto add_tstr;
2678 case VT_INT:
2679 tstr = "int";
2680 goto add_tstr;
2681 case VT_LONG:
2682 tstr = "long";
2683 goto add_tstr;
2684 case VT_LLONG:
2685 tstr = "long long";
2686 goto add_tstr;
2687 case VT_FLOAT:
2688 tstr = "float";
2689 goto add_tstr;
2690 case VT_DOUBLE:
2691 tstr = "double";
2692 goto add_tstr;
2693 case VT_LDOUBLE:
2694 tstr = "long double";
2695 add_tstr:
2696 pstrcat(buf, buf_size, tstr);
2697 break;
2698 case VT_ENUM:
2699 case VT_STRUCT:
2700 if (bt == VT_STRUCT)
2701 tstr = "struct ";
2702 else
2703 tstr = "enum ";
2704 pstrcat(buf, buf_size, tstr);
2705 v = type->ref->v & ~SYM_STRUCT;
2706 if (v >= SYM_FIRST_ANOM)
2707 pstrcat(buf, buf_size, "<anonymous>");
2708 else
2709 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2710 break;
2711 case VT_FUNC:
2712 s = type->ref;
2713 type_to_str(buf, buf_size, &s->type, varstr);
2714 pstrcat(buf, buf_size, "(");
2715 sa = s->next;
2716 while (sa != NULL) {
2717 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2718 pstrcat(buf, buf_size, buf1);
2719 sa = sa->next;
2720 if (sa)
2721 pstrcat(buf, buf_size, ", ");
2723 pstrcat(buf, buf_size, ")");
2724 goto no_var;
2725 case VT_PTR:
2726 s = type->ref;
2727 if (t & VT_ARRAY) {
2728 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2729 type_to_str(buf, buf_size, &s->type, buf1);
2730 goto no_var;
2732 pstrcpy(buf1, sizeof(buf1), "*");
2733 if (t & VT_CONSTANT)
2734 pstrcat(buf1, buf_size, "const ");
2735 if (t & VT_VOLATILE)
2736 pstrcat(buf1, buf_size, "volatile ");
2737 if (varstr)
2738 pstrcat(buf1, sizeof(buf1), varstr);
2739 type_to_str(buf, buf_size, &s->type, buf1);
2740 goto no_var;
2742 if (varstr) {
2743 pstrcat(buf, buf_size, " ");
2744 pstrcat(buf, buf_size, varstr);
2746 no_var: ;
2749 /* verify type compatibility to store vtop in 'dt' type, and generate
2750 casts if needed. */
2751 static void gen_assign_cast(CType *dt)
2753 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2754 char buf1[256], buf2[256];
2755 int dbt, sbt;
2757 st = &vtop->type; /* source type */
2758 dbt = dt->t & VT_BTYPE;
2759 sbt = st->t & VT_BTYPE;
2760 if (sbt == VT_VOID || dbt == VT_VOID) {
2761 if (sbt == VT_VOID && dbt == VT_VOID)
2762 ; /*
2763 It is Ok if both are void
2764 A test program:
2765 void func1() {}
2766 void func2() {
2767 return func1();
2769 gcc accepts this program
2771 else
2772 tcc_error("cannot cast from/to void");
2774 if (dt->t & VT_CONSTANT)
2775 tcc_warning("assignment of read-only location");
2776 switch(dbt) {
2777 case VT_PTR:
2778 /* special cases for pointers */
2779 /* '0' can also be a pointer */
2780 if (is_null_pointer(vtop))
2781 goto type_ok;
2782 /* accept implicit pointer to integer cast with warning */
2783 if (is_integer_btype(sbt)) {
2784 tcc_warning("assignment makes pointer from integer without a cast");
2785 goto type_ok;
2787 type1 = pointed_type(dt);
2788 /* a function is implicitely a function pointer */
2789 if (sbt == VT_FUNC) {
2790 if ((type1->t & VT_BTYPE) != VT_VOID &&
2791 !is_compatible_types(pointed_type(dt), st))
2792 tcc_warning("assignment from incompatible pointer type");
2793 goto type_ok;
2795 if (sbt != VT_PTR)
2796 goto error;
2797 type2 = pointed_type(st);
2798 if ((type1->t & VT_BTYPE) == VT_VOID ||
2799 (type2->t & VT_BTYPE) == VT_VOID) {
2800 /* void * can match anything */
2801 } else {
2802 /* exact type match, except for qualifiers */
2803 tmp_type1 = *type1;
2804 tmp_type2 = *type2;
2805 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2806 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2807 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2808 /* Like GCC don't warn by default for merely changes
2809 in pointer target signedness. Do warn for different
2810 base types, though, in particular for unsigned enums
2811 and signed int targets. */
2812 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2813 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2814 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2816 else
2817 tcc_warning("assignment from incompatible pointer type");
2820 /* check const and volatile */
2821 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2822 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2823 tcc_warning("assignment discards qualifiers from pointer target type");
2824 break;
2825 case VT_BYTE:
2826 case VT_SHORT:
2827 case VT_INT:
2828 case VT_LLONG:
2829 if (sbt == VT_PTR || sbt == VT_FUNC) {
2830 tcc_warning("assignment makes integer from pointer without a cast");
2831 } else if (sbt == VT_STRUCT) {
2832 goto case_VT_STRUCT;
2834 /* XXX: more tests */
2835 break;
2836 case VT_STRUCT:
2837 case_VT_STRUCT:
2838 tmp_type1 = *dt;
2839 tmp_type2 = *st;
2840 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2841 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2842 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2843 error:
2844 type_to_str(buf1, sizeof(buf1), st, NULL);
2845 type_to_str(buf2, sizeof(buf2), dt, NULL);
2846 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2848 break;
2850 type_ok:
2851 gen_cast(dt);
2854 /* store vtop in lvalue pushed on stack */
2855 ST_FUNC void vstore(void)
2857 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2859 ft = vtop[-1].type.t;
2860 sbt = vtop->type.t & VT_BTYPE;
2861 dbt = ft & VT_BTYPE;
2862 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2863 (sbt == VT_INT && dbt == VT_SHORT))
2864 && !(vtop->type.t & VT_BITFIELD)) {
2865 /* optimize char/short casts */
2866 delayed_cast = VT_MUSTCAST;
2867 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2868 ((1 << VT_STRUCT_SHIFT) - 1));
2869 /* XXX: factorize */
2870 if (ft & VT_CONSTANT)
2871 tcc_warning("assignment of read-only location");
2872 } else {
2873 delayed_cast = 0;
2874 if (!(ft & VT_BITFIELD))
2875 gen_assign_cast(&vtop[-1].type);
2878 if (sbt == VT_STRUCT) {
2879 /* if structure, only generate pointer */
2880 /* structure assignment : generate memcpy */
2881 /* XXX: optimize if small size */
2882 size = type_size(&vtop->type, &align);
2884 /* destination */
2885 vswap();
2886 vtop->type.t = VT_PTR;
2887 gaddrof();
2889 /* address of memcpy() */
2890 #ifdef TCC_ARM_EABI
2891 if(!(align & 7))
2892 vpush_global_sym(&func_old_type, TOK_memcpy8);
2893 else if(!(align & 3))
2894 vpush_global_sym(&func_old_type, TOK_memcpy4);
2895 else
2896 #endif
2897 /* Use memmove, rather than memcpy, as dest and src may be same: */
2898 vpush_global_sym(&func_old_type, TOK_memmove);
2900 vswap();
2901 /* source */
2902 vpushv(vtop - 2);
2903 vtop->type.t = VT_PTR;
2904 gaddrof();
2905 /* type size */
2906 vpushi(size);
2907 gfunc_call(3);
2909 /* leave source on stack */
2910 } else if (ft & VT_BITFIELD) {
2911 /* bitfield store handling */
2913 /* save lvalue as expression result (example: s.b = s.a = n;) */
2914 vdup(), vtop[-1] = vtop[-2];
2916 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2917 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2918 /* remove bit field info to avoid loops */
2919 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2921 if((ft & VT_BTYPE) == VT_BOOL) {
2922 gen_cast(&vtop[-1].type);
2923 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2926 /* duplicate destination */
2927 vdup();
2928 vtop[-1] = vtop[-2];
2930 /* mask and shift source */
2931 if((ft & VT_BTYPE) != VT_BOOL) {
2932 if((ft & VT_BTYPE) == VT_LLONG) {
2933 vpushll((1ULL << bit_size) - 1ULL);
2934 } else {
2935 vpushi((1 << bit_size) - 1);
2937 gen_op('&');
2939 vpushi(bit_pos);
2940 gen_op(TOK_SHL);
2941 /* load destination, mask and or with source */
2942 vswap();
2943 if((ft & VT_BTYPE) == VT_LLONG) {
2944 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2945 } else {
2946 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2948 gen_op('&');
2949 gen_op('|');
2950 /* store result */
2951 vstore();
2952 /* ... and discard */
2953 vpop();
2955 } else {
2956 #ifdef CONFIG_TCC_BCHECK
2957 /* bound check case */
2958 if (vtop[-1].r & VT_MUSTBOUND) {
2959 vswap();
2960 gbound();
2961 vswap();
2963 #endif
2964 rc = RC_INT;
2965 if (is_float(ft)) {
2966 rc = RC_FLOAT;
2967 #ifdef TCC_TARGET_X86_64
2968 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2969 rc = RC_ST0;
2970 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2971 rc = RC_FRET;
2973 #endif
2975 r = gv(rc); /* generate value */
2976 /* if lvalue was saved on stack, must read it */
2977 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2978 SValue sv;
2979 t = get_reg(RC_INT);
2980 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2981 sv.type.t = VT_PTR;
2982 #else
2983 sv.type.t = VT_INT;
2984 #endif
2985 sv.r = VT_LOCAL | VT_LVAL;
2986 sv.c.i = vtop[-1].c.i;
2987 load(t, &sv);
2988 vtop[-1].r = t | VT_LVAL;
2990 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2991 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2992 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2993 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2994 #else
2995 if ((ft & VT_BTYPE) == VT_LLONG) {
2996 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
2997 #endif
2998 vtop[-1].type.t = load_type;
2999 store(r, vtop - 1);
3000 vswap();
3001 /* convert to int to increment easily */
3002 vtop->type.t = addr_type;
3003 gaddrof();
3004 vpushi(load_size);
3005 gen_op('+');
3006 vtop->r |= VT_LVAL;
3007 vswap();
3008 vtop[-1].type.t = load_type;
3009 /* XXX: it works because r2 is spilled last ! */
3010 store(vtop->r2, vtop - 1);
3011 } else {
3012 store(r, vtop - 1);
3015 vswap();
3016 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3017 vtop->r |= delayed_cast;
3021 /* post defines POST/PRE add. c is the token ++ or -- */
3022 ST_FUNC void inc(int post, int c)
3024 test_lvalue();
3025 vdup(); /* save lvalue */
3026 if (post) {
3027 gv_dup(); /* duplicate value */
3028 vrotb(3);
3029 vrotb(3);
3031 /* add constant */
3032 vpushi(c - TOK_MID);
3033 gen_op('+');
3034 vstore(); /* store value */
3035 if (post)
3036 vpop(); /* if post op, return saved value */
3039 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3041 /* read the string */
3042 if (tok != TOK_STR)
3043 expect(msg);
3044 cstr_new(astr);
3045 while (tok == TOK_STR) {
3046 /* XXX: add \0 handling too ? */
3047 cstr_cat(astr, tokc.str.data, -1);
3048 next();
3050 cstr_ccat(astr, '\0');
3053 /* If I is >= 1 and a power of two, returns log2(i)+1.
3054 If I is 0 returns 0. */
3055 static int exact_log2p1(int i)
3057 int ret;
3058 if (!i)
3059 return 0;
3060 for (ret = 1; i >= 1 << 8; ret += 8)
3061 i >>= 8;
3062 if (i >= 1 << 4)
3063 ret += 4, i >>= 4;
3064 if (i >= 1 << 2)
3065 ret += 2, i >>= 2;
3066 if (i >= 1 << 1)
3067 ret++;
3068 return ret;
3071 /* Parse GNUC __attribute__ extension. Currently, the following
3072 extensions are recognized:
3073 - aligned(n) : set data/function alignment.
3074 - packed : force data alignment to 1
3075 - section(x) : generate data/code in this section.
3076 - unused : currently ignored, but may be used someday.
3077 - regparm(n) : pass function parameters in registers (i386 only)
3079 static void parse_attribute(AttributeDef *ad)
3081 int t, n;
3082 CString astr;
3084 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3085 next();
3086 skip('(');
3087 skip('(');
3088 while (tok != ')') {
3089 if (tok < TOK_IDENT)
3090 expect("attribute name");
3091 t = tok;
3092 next();
3093 switch(t) {
3094 case TOK_SECTION1:
3095 case TOK_SECTION2:
3096 skip('(');
3097 parse_mult_str(&astr, "section name");
3098 ad->section = find_section(tcc_state, (char *)astr.data);
3099 skip(')');
3100 cstr_free(&astr);
3101 break;
3102 case TOK_ALIAS1:
3103 case TOK_ALIAS2:
3104 skip('(');
3105 parse_mult_str(&astr, "alias(\"target\")");
3106 ad->alias_target = /* save string as token, for later */
3107 tok_alloc((char*)astr.data, astr.size-1)->tok;
3108 skip(')');
3109 cstr_free(&astr);
3110 break;
3111 case TOK_VISIBILITY1:
3112 case TOK_VISIBILITY2:
3113 skip('(');
3114 parse_mult_str(&astr,
3115 "visibility(\"default|hidden|internal|protected\")");
3116 if (!strcmp (astr.data, "default"))
3117 ad->a.visibility = STV_DEFAULT;
3118 else if (!strcmp (astr.data, "hidden"))
3119 ad->a.visibility = STV_HIDDEN;
3120 else if (!strcmp (astr.data, "internal"))
3121 ad->a.visibility = STV_INTERNAL;
3122 else if (!strcmp (astr.data, "protected"))
3123 ad->a.visibility = STV_PROTECTED;
3124 else
3125 expect("visibility(\"default|hidden|internal|protected\")");
3126 skip(')');
3127 cstr_free(&astr);
3128 break;
3129 case TOK_ALIGNED1:
3130 case TOK_ALIGNED2:
3131 if (tok == '(') {
3132 next();
3133 n = expr_const();
3134 if (n <= 0 || (n & (n - 1)) != 0)
3135 tcc_error("alignment must be a positive power of two");
3136 skip(')');
3137 } else {
3138 n = MAX_ALIGN;
3140 ad->a.aligned = exact_log2p1(n);
3141 if (n != 1 << (ad->a.aligned - 1))
3142 tcc_error("alignment of %d is larger than implemented", n);
3143 break;
3144 case TOK_PACKED1:
3145 case TOK_PACKED2:
3146 ad->a.packed = 1;
3147 break;
3148 case TOK_WEAK1:
3149 case TOK_WEAK2:
3150 ad->a.weak = 1;
3151 break;
3152 case TOK_UNUSED1:
3153 case TOK_UNUSED2:
3154 /* currently, no need to handle it because tcc does not
3155 track unused objects */
3156 break;
3157 case TOK_NORETURN1:
3158 case TOK_NORETURN2:
3159 /* currently, no need to handle it because tcc does not
3160 track unused objects */
3161 break;
3162 case TOK_CDECL1:
3163 case TOK_CDECL2:
3164 case TOK_CDECL3:
3165 ad->a.func_call = FUNC_CDECL;
3166 break;
3167 case TOK_STDCALL1:
3168 case TOK_STDCALL2:
3169 case TOK_STDCALL3:
3170 ad->a.func_call = FUNC_STDCALL;
3171 break;
3172 #ifdef TCC_TARGET_I386
3173 case TOK_REGPARM1:
3174 case TOK_REGPARM2:
3175 skip('(');
3176 n = expr_const();
3177 if (n > 3)
3178 n = 3;
3179 else if (n < 0)
3180 n = 0;
3181 if (n > 0)
3182 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3183 skip(')');
3184 break;
3185 case TOK_FASTCALL1:
3186 case TOK_FASTCALL2:
3187 case TOK_FASTCALL3:
3188 ad->a.func_call = FUNC_FASTCALLW;
3189 break;
3190 #endif
3191 case TOK_MODE:
3192 skip('(');
3193 switch(tok) {
3194 case TOK_MODE_DI:
3195 ad->a.mode = VT_LLONG + 1;
3196 break;
3197 case TOK_MODE_QI:
3198 ad->a.mode = VT_BYTE + 1;
3199 break;
3200 case TOK_MODE_HI:
3201 ad->a.mode = VT_SHORT + 1;
3202 break;
3203 case TOK_MODE_SI:
3204 case TOK_MODE_word:
3205 ad->a.mode = VT_INT + 1;
3206 break;
3207 default:
3208 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3209 break;
3211 next();
3212 skip(')');
3213 break;
3214 case TOK_DLLEXPORT:
3215 ad->a.func_export = 1;
3216 break;
3217 case TOK_DLLIMPORT:
3218 ad->a.func_import = 1;
3219 break;
3220 default:
3221 if (tcc_state->warn_unsupported)
3222 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3223 /* skip parameters */
3224 if (tok == '(') {
3225 int parenthesis = 0;
3226 do {
3227 if (tok == '(')
3228 parenthesis++;
3229 else if (tok == ')')
3230 parenthesis--;
3231 next();
3232 } while (parenthesis && tok != -1);
3234 break;
3236 if (tok != ',')
3237 break;
3238 next();
3240 skip(')');
3241 skip(')');
3245 static Sym * find_field (CType *type, int v)
3247 Sym *s = type->ref;
3248 v |= SYM_FIELD;
3249 while ((s = s->next) != NULL) {
3250 if ((s->v & SYM_FIELD) &&
3251 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3252 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3253 Sym *ret = find_field (&s->type, v);
3254 if (ret)
3255 return ret;
3257 if (s->v == v)
3258 break;
3260 return s;
3263 static void struct_add_offset (Sym *s, int offset)
3265 while ((s = s->next) != NULL) {
3266 if ((s->v & SYM_FIELD) &&
3267 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3268 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3269 struct_add_offset(s->type.ref, offset);
3270 } else
3271 s->c += offset;
3275 static void struct_layout(CType *type, AttributeDef *ad)
3277 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3278 int pcc = !tcc_state->ms_bitfields;
3279 Sym *f;
3280 if (ad->a.aligned)
3281 maxalign = 1 << (ad->a.aligned - 1);
3282 else
3283 maxalign = 1;
3284 offset = 0;
3285 c = 0;
3286 bit_pos = 0;
3287 prevbt = VT_STRUCT; /* make it never match */
3288 prev_bit_size = 0;
3289 for (f = type->ref->next; f; f = f->next) {
3290 int typealign, bit_size;
3291 int size = type_size(&f->type, &typealign);
3292 if (f->type.t & VT_BITFIELD)
3293 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3294 else
3295 bit_size = -1;
3296 if (bit_size == 0 && pcc) {
3297 /* Zero-width bit-fields in PCC mode aren't affected
3298 by any packing (attribute or pragma). */
3299 align = typealign;
3300 } else if (f->r > 1) {
3301 align = f->r;
3302 } else if (ad->a.packed || f->r == 1) {
3303 align = 1;
3304 /* Packed fields or packed records don't let the base type
3305 influence the records type alignment. */
3306 typealign = 1;
3307 } else {
3308 align = typealign;
3310 if (type->ref->type.t != TOK_STRUCT) {
3311 if (pcc && bit_size >= 0)
3312 size = (bit_size + 7) >> 3;
3313 /* Bit position is already zero from our caller. */
3314 offset = 0;
3315 if (size > c)
3316 c = size;
3317 } else if (bit_size < 0) {
3318 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3319 prevbt = VT_STRUCT;
3320 prev_bit_size = 0;
3321 c = (c + addbytes + align - 1) & -align;
3322 offset = c;
3323 if (size > 0)
3324 c += size;
3325 bit_pos = 0;
3326 } else {
3327 /* A bit-field. Layout is more complicated. There are two
3328 options TCC implements: PCC compatible and MS compatible
3329 (PCC compatible is what GCC uses for almost all targets).
3330 In PCC layout the overall size of the struct (in c) is
3331 _excluding_ the current run of bit-fields (that is,
3332 there's at least additional bit_pos bits after c). In
3333 MS layout c does include the current run of bit-fields.
3335 This matters for calculating the natural alignment buckets
3336 in PCC mode. */
3338 /* 'align' will be used to influence records alignment,
3339 so it's the max of specified and type alignment, except
3340 in certain cases that depend on the mode. */
3341 if (align < typealign)
3342 align = typealign;
3343 if (pcc) {
3344 /* In PCC layout a non-packed bit-field is placed adjacent
3345 to the preceding bit-fields, except if it would overflow
3346 its container (depending on base type) or it's a zero-width
3347 bit-field. Packed non-zero-width bit-fields always are
3348 placed adjacent. */
3349 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3350 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3351 if (bit_size == 0 ||
3352 (typealign != 1 &&
3353 (ofs2 / (typealign * 8)) > (size/typealign))) {
3354 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3355 bit_pos = 0;
3357 offset = c;
3358 /* In PCC layout named bit-fields influence the alignment
3359 of the containing struct using the base types alignment,
3360 except for packed fields (which here have correct
3361 align/typealign). */
3362 if ((f->v & SYM_FIRST_ANOM))
3363 align = 1;
3364 } else {
3365 bt = f->type.t & VT_BTYPE;
3366 if ((bit_pos + bit_size > size * 8) ||
3367 (bit_size > 0) == (bt != prevbt)) {
3368 c = (c + typealign - 1) & -typealign;
3369 offset = c;
3370 bit_pos = 0;
3371 /* In MS bitfield mode a bit-field run always uses
3372 at least as many bits as the underlying type.
3373 To start a new run it's also required that this
3374 or the last bit-field had non-zero width. */
3375 if (bit_size || prev_bit_size)
3376 c += size;
3378 /* In MS layout the records alignment is normally
3379 influenced by the field, except for a zero-width
3380 field at the start of a run (but by further zero-width
3381 fields it is again). */
3382 if (bit_size == 0 && prevbt != bt)
3383 align = 1;
3384 prevbt = bt;
3385 prev_bit_size = bit_size;
3387 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3388 | (bit_pos << VT_STRUCT_SHIFT);
3389 bit_pos += bit_size;
3390 if (pcc && bit_pos >= size * 8) {
3391 c += size;
3392 bit_pos -= size * 8;
3395 if (align > maxalign)
3396 maxalign = align;
3397 #if 0
3398 printf("set field %s offset=%d c=%d",
3399 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3400 if (f->type.t & VT_BITFIELD) {
3401 printf(" pos=%d size=%d",
3402 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3403 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3405 printf("\n");
3406 #endif
3408 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3409 Sym *ass;
3410 /* An anonymous struct/union. Adjust member offsets
3411 to reflect the real offset of our containing struct.
3412 Also set the offset of this anon member inside
3413 the outer struct to be zero. Via this it
3414 works when accessing the field offset directly
3415 (from base object), as well as when recursing
3416 members in initializer handling. */
3417 int v2 = f->type.ref->v;
3418 if (!(v2 & SYM_FIELD) &&
3419 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3420 Sym **pps;
3421 /* This happens only with MS extensions. The
3422 anon member has a named struct type, so it
3423 potentially is shared with other references.
3424 We need to unshare members so we can modify
3425 them. */
3426 ass = f->type.ref;
3427 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3428 &f->type.ref->type, 0,
3429 f->type.ref->c);
3430 pps = &f->type.ref->next;
3431 while ((ass = ass->next) != NULL) {
3432 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3433 pps = &((*pps)->next);
3435 *pps = NULL;
3437 struct_add_offset(f->type.ref, offset);
3438 f->c = 0;
3439 } else {
3440 f->c = offset;
3443 f->r = 0;
3445 /* store size and alignment */
3446 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3447 + maxalign - 1) & -maxalign;
3448 type->ref->r = maxalign;
3451 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3452 static void struct_decl(CType *type, AttributeDef *ad, int u)
3454 int a, v, size, align, flexible, alignoverride;
3455 long c;
3456 int bit_size, bsize, bt;
3457 Sym *s, *ss, **ps;
3458 AttributeDef ad1;
3459 CType type1, btype;
3461 a = tok; /* save decl type */
3462 next();
3463 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3464 parse_attribute(ad);
3465 if (tok != '{') {
3466 v = tok;
3467 next();
3468 /* struct already defined ? return it */
3469 if (v < TOK_IDENT)
3470 expect("struct/union/enum name");
3471 s = struct_find(v);
3472 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3473 if (s->type.t != a)
3474 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3475 goto do_decl;
3477 } else {
3478 v = anon_sym++;
3480 /* Record the original enum/struct/union token. */
3481 type1.t = a;
3482 type1.ref = NULL;
3483 /* we put an undefined size for struct/union */
3484 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3485 s->r = 0; /* default alignment is zero as gcc */
3486 /* put struct/union/enum name in type */
3487 do_decl:
3488 type->t = u;
3489 type->ref = s;
3491 if (tok == '{') {
3492 next();
3493 if (s->c != -1)
3494 tcc_error("struct/union/enum already defined");
3495 /* cannot be empty */
3496 c = 0;
3497 /* non empty enums are not allowed */
3498 if (a == TOK_ENUM) {
3499 int seen_neg = 0;
3500 int seen_wide = 0;
3501 for(;;) {
3502 CType *t = &int_type;
3503 v = tok;
3504 if (v < TOK_UIDENT)
3505 expect("identifier");
3506 ss = sym_find(v);
3507 if (ss && !local_stack)
3508 tcc_error("redefinition of enumerator '%s'",
3509 get_tok_str(v, NULL));
3510 next();
3511 if (tok == '=') {
3512 next();
3513 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3514 c = expr_const64();
3515 #else
3516 /* We really want to support long long enums
3517 on i386 as well, but the Sym structure only
3518 holds a 'long' for associated constants,
3519 and enlarging it would bump its size (no
3520 available padding). So punt for now. */
3521 c = expr_const();
3522 #endif
3524 if (c < 0)
3525 seen_neg = 1;
3526 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3527 seen_wide = 1, t = &size_type;
3528 /* enum symbols have static storage */
3529 ss = sym_push(v, t, VT_CONST, c);
3530 ss->type.t |= VT_STATIC;
3531 if (tok != ',')
3532 break;
3533 next();
3534 c++;
3535 /* NOTE: we accept a trailing comma */
3536 if (tok == '}')
3537 break;
3539 if (!seen_neg)
3540 s->a.unsigned_enum = 1;
3541 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3542 skip('}');
3543 } else {
3544 ps = &s->next;
3545 flexible = 0;
3546 while (tok != '}') {
3547 if (!parse_btype(&btype, &ad1)) {
3548 skip(';');
3549 continue;
3551 while (1) {
3552 if (flexible)
3553 tcc_error("flexible array member '%s' not at the end of struct",
3554 get_tok_str(v, NULL));
3555 bit_size = -1;
3556 v = 0;
3557 type1 = btype;
3558 if (tok != ':') {
3559 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3560 if (v == 0) {
3561 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3562 expect("identifier");
3563 else {
3564 int v = btype.ref->v;
3565 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3566 if (tcc_state->ms_extensions == 0)
3567 expect("identifier");
3571 if (type_size(&type1, &align) < 0) {
3572 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3573 flexible = 1;
3574 else
3575 tcc_error("field '%s' has incomplete type",
3576 get_tok_str(v, NULL));
3578 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3579 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3580 tcc_error("invalid type for '%s'",
3581 get_tok_str(v, NULL));
3583 if (tok == ':') {
3584 next();
3585 bit_size = expr_const();
3586 /* XXX: handle v = 0 case for messages */
3587 if (bit_size < 0)
3588 tcc_error("negative width in bit-field '%s'",
3589 get_tok_str(v, NULL));
3590 if (v && bit_size == 0)
3591 tcc_error("zero width for bit-field '%s'",
3592 get_tok_str(v, NULL));
3593 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3594 parse_attribute(&ad1);
3596 size = type_size(&type1, &align);
3597 /* Only remember non-default alignment. */
3598 alignoverride = 0;
3599 if (ad1.a.aligned) {
3600 int speca = 1 << (ad1.a.aligned - 1);
3601 alignoverride = speca;
3602 } else if (ad1.a.packed || ad->a.packed) {
3603 alignoverride = 1;
3604 } else if (*tcc_state->pack_stack_ptr) {
3605 if (align > *tcc_state->pack_stack_ptr)
3606 alignoverride = *tcc_state->pack_stack_ptr;
3608 if (bit_size >= 0) {
3609 bt = type1.t & VT_BTYPE;
3610 if (bt != VT_INT &&
3611 bt != VT_BYTE &&
3612 bt != VT_SHORT &&
3613 bt != VT_BOOL &&
3614 bt != VT_ENUM &&
3615 bt != VT_LLONG)
3616 tcc_error("bitfields must have scalar type");
3617 bsize = size * 8;
3618 if (bit_size > bsize) {
3619 tcc_error("width of '%s' exceeds its type",
3620 get_tok_str(v, NULL));
3621 } else if (bit_size == bsize) {
3622 /* no need for bit fields */
3624 } else {
3625 type1.t |= VT_BITFIELD |
3626 (0 << VT_STRUCT_SHIFT) |
3627 (bit_size << (VT_STRUCT_SHIFT + 6));
3630 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3631 /* Remember we've seen a real field to check
3632 for placement of flexible array member. */
3633 c = 1;
3635 /* If member is a struct or bit-field, enforce
3636 placing into the struct (as anonymous). */
3637 if (v == 0 &&
3638 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3639 bit_size >= 0)) {
3640 v = anon_sym++;
3642 if (v) {
3643 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3644 *ps = ss;
3645 ps = &ss->next;
3647 if (tok == ';' || tok == TOK_EOF)
3648 break;
3649 skip(',');
3651 skip(';');
3653 skip('}');
3654 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3655 parse_attribute(ad);
3656 struct_layout(type, ad);
3661 /* return 1 if basic type is a type size (short, long, long long) */
3662 ST_FUNC int is_btype_size(int bt)
3664 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3667 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3668 are added to the element type, copied because it could be a typedef. */
3669 static void parse_btype_qualify(CType *type, int qualifiers)
3671 while (type->t & VT_ARRAY) {
3672 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3673 type = &type->ref->type;
3675 type->t |= qualifiers;
3678 /* return 0 if no type declaration. otherwise, return the basic type
3679 and skip it.
3681 static int parse_btype(CType *type, AttributeDef *ad)
3683 int t, u, bt_size, complete, type_found, typespec_found;
3684 Sym *s;
3685 CType type1;
3687 memset(ad, 0, sizeof(AttributeDef));
3688 complete = 0;
3689 type_found = 0;
3690 typespec_found = 0;
3691 t = 0;
3692 while(1) {
3693 switch(tok) {
3694 case TOK_EXTENSION:
3695 /* currently, we really ignore extension */
3696 next();
3697 continue;
3699 /* basic types */
3700 case TOK_CHAR:
3701 u = VT_BYTE;
3702 basic_type:
3703 next();
3704 basic_type1:
3705 if (complete)
3706 tcc_error("too many basic types");
3707 t |= u;
3708 bt_size = is_btype_size (u & VT_BTYPE);
3709 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3710 complete = 1;
3711 typespec_found = 1;
3712 break;
3713 case TOK_VOID:
3714 u = VT_VOID;
3715 goto basic_type;
3716 case TOK_SHORT:
3717 u = VT_SHORT;
3718 goto basic_type;
3719 case TOK_INT:
3720 u = VT_INT;
3721 goto basic_type;
3722 case TOK_LONG:
3723 next();
3724 if ((t & VT_BTYPE) == VT_DOUBLE) {
3725 #ifndef TCC_TARGET_PE
3726 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3727 #endif
3728 } else if ((t & VT_BTYPE) == VT_LONG) {
3729 t = (t & ~VT_BTYPE) | VT_LLONG;
3730 } else {
3731 u = VT_LONG;
3732 goto basic_type1;
3734 break;
3735 #ifdef TCC_TARGET_ARM64
3736 case TOK_UINT128:
3737 /* GCC's __uint128_t appears in some Linux header files. Make it a
3738 synonym for long double to get the size and alignment right. */
3739 u = VT_LDOUBLE;
3740 goto basic_type;
3741 #endif
3742 case TOK_BOOL:
3743 u = VT_BOOL;
3744 goto basic_type;
3745 case TOK_FLOAT:
3746 u = VT_FLOAT;
3747 goto basic_type;
3748 case TOK_DOUBLE:
3749 next();
3750 if ((t & VT_BTYPE) == VT_LONG) {
3751 #ifdef TCC_TARGET_PE
3752 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3753 #else
3754 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3755 #endif
3756 } else {
3757 u = VT_DOUBLE;
3758 goto basic_type1;
3760 break;
3761 case TOK_ENUM:
3762 struct_decl(&type1, ad, VT_ENUM);
3763 basic_type2:
3764 u = type1.t;
3765 type->ref = type1.ref;
3766 goto basic_type1;
3767 case TOK_STRUCT:
3768 case TOK_UNION:
3769 struct_decl(&type1, ad, VT_STRUCT);
3770 goto basic_type2;
3772 /* type modifiers */
3773 case TOK_CONST1:
3774 case TOK_CONST2:
3775 case TOK_CONST3:
3776 type->t = t;
3777 parse_btype_qualify(type, VT_CONSTANT);
3778 t = type->t;
3779 next();
3780 break;
3781 case TOK_VOLATILE1:
3782 case TOK_VOLATILE2:
3783 case TOK_VOLATILE3:
3784 type->t = t;
3785 parse_btype_qualify(type, VT_VOLATILE);
3786 t = type->t;
3787 next();
3788 break;
3789 case TOK_SIGNED1:
3790 case TOK_SIGNED2:
3791 case TOK_SIGNED3:
3792 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3793 tcc_error("signed and unsigned modifier");
3794 typespec_found = 1;
3795 t |= VT_DEFSIGN;
3796 next();
3797 break;
3798 case TOK_REGISTER:
3799 case TOK_AUTO:
3800 case TOK_RESTRICT1:
3801 case TOK_RESTRICT2:
3802 case TOK_RESTRICT3:
3803 next();
3804 break;
3805 case TOK_UNSIGNED:
3806 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3807 tcc_error("signed and unsigned modifier");
3808 t |= VT_DEFSIGN | VT_UNSIGNED;
3809 next();
3810 typespec_found = 1;
3811 break;
3813 /* storage */
3814 case TOK_EXTERN:
3815 t |= VT_EXTERN;
3816 next();
3817 break;
3818 case TOK_STATIC:
3819 t |= VT_STATIC;
3820 next();
3821 break;
3822 case TOK_TYPEDEF:
3823 t |= VT_TYPEDEF;
3824 next();
3825 break;
3826 case TOK_INLINE1:
3827 case TOK_INLINE2:
3828 case TOK_INLINE3:
3829 t |= VT_INLINE;
3830 next();
3831 break;
3833 /* GNUC attribute */
3834 case TOK_ATTRIBUTE1:
3835 case TOK_ATTRIBUTE2:
3836 parse_attribute(ad);
3837 if (ad->a.mode) {
3838 u = ad->a.mode -1;
3839 t = (t & ~VT_BTYPE) | u;
3841 break;
3842 /* GNUC typeof */
3843 case TOK_TYPEOF1:
3844 case TOK_TYPEOF2:
3845 case TOK_TYPEOF3:
3846 next();
3847 parse_expr_type(&type1);
3848 /* remove all storage modifiers except typedef */
3849 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3850 goto basic_type2;
3851 default:
3852 if (typespec_found)
3853 goto the_end;
3854 s = sym_find(tok);
3855 if (!s || !(s->type.t & VT_TYPEDEF))
3856 goto the_end;
3858 type->t = ((s->type.t & ~VT_TYPEDEF) |
3859 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3860 type->ref = s->type.ref;
3861 if (t & (VT_CONSTANT | VT_VOLATILE))
3862 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3863 t = type->t;
3865 if (s->r) {
3866 /* get attributes from typedef */
3867 if (0 == ad->a.aligned)
3868 ad->a.aligned = s->a.aligned;
3869 if (0 == ad->a.func_call)
3870 ad->a.func_call = s->a.func_call;
3871 ad->a.packed |= s->a.packed;
3873 next();
3874 typespec_found = 1;
3875 break;
3877 type_found = 1;
3879 the_end:
3880 if (tcc_state->char_is_unsigned) {
3881 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3882 t |= VT_UNSIGNED;
3885 /* long is never used as type */
3886 if ((t & VT_BTYPE) == VT_LONG)
3887 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3888 defined TCC_TARGET_PE
3889 t = (t & ~VT_BTYPE) | VT_INT;
3890 #else
3891 t = (t & ~VT_BTYPE) | VT_LLONG;
3892 #endif
3893 type->t = t;
3894 return type_found;
3897 /* convert a function parameter type (array to pointer and function to
3898 function pointer) */
3899 static inline void convert_parameter_type(CType *pt)
3901 /* remove const and volatile qualifiers (XXX: const could be used
3902 to indicate a const function parameter */
3903 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3904 /* array must be transformed to pointer according to ANSI C */
3905 pt->t &= ~VT_ARRAY;
3906 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3907 mk_pointer(pt);
3911 ST_FUNC void parse_asm_str(CString *astr)
3913 skip('(');
3914 parse_mult_str(astr, "string constant");
3917 /* Parse an asm label and return the token */
3918 static int asm_label_instr(void)
3920 int v;
3921 CString astr;
3923 next();
3924 parse_asm_str(&astr);
3925 skip(')');
3926 #ifdef ASM_DEBUG
3927 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3928 #endif
3929 v = tok_alloc(astr.data, astr.size - 1)->tok;
3930 cstr_free(&astr);
3931 return v;
3934 static void post_type(CType *type, AttributeDef *ad, int storage)
3936 int n, l, t1, arg_size, align;
3937 Sym **plast, *s, *first;
3938 AttributeDef ad1;
3939 CType pt;
3941 if (tok == '(') {
3942 /* function declaration */
3943 next();
3944 l = 0;
3945 first = NULL;
3946 plast = &first;
3947 arg_size = 0;
3948 if (tok != ')') {
3949 for(;;) {
3950 /* read param name and compute offset */
3951 if (l != FUNC_OLD) {
3952 if (!parse_btype(&pt, &ad1)) {
3953 if (l) {
3954 tcc_error("invalid type");
3955 } else {
3956 l = FUNC_OLD;
3957 goto old_proto;
3960 l = FUNC_NEW;
3961 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3962 break;
3963 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3964 if ((pt.t & VT_BTYPE) == VT_VOID)
3965 tcc_error("parameter declared as void");
3966 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3967 } else {
3968 old_proto:
3969 n = tok;
3970 if (n < TOK_UIDENT)
3971 expect("identifier");
3972 pt.t = VT_INT;
3973 next();
3975 convert_parameter_type(&pt);
3976 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3977 *plast = s;
3978 plast = &s->next;
3979 if (tok == ')')
3980 break;
3981 skip(',');
3982 if (l == FUNC_NEW && tok == TOK_DOTS) {
3983 l = FUNC_ELLIPSIS;
3984 next();
3985 break;
3989 /* if no parameters, then old type prototype */
3990 if (l == 0)
3991 l = FUNC_OLD;
3992 skip(')');
3993 /* NOTE: const is ignored in returned type as it has a special
3994 meaning in gcc / C++ */
3995 type->t &= ~VT_CONSTANT;
3996 /* some ancient pre-K&R C allows a function to return an array
3997 and the array brackets to be put after the arguments, such
3998 that "int c()[]" means something like "int[] c()" */
3999 if (tok == '[') {
4000 next();
4001 skip(']'); /* only handle simple "[]" */
4002 type->t |= VT_PTR;
4004 /* we push a anonymous symbol which will contain the function prototype */
4005 ad->a.func_args = arg_size;
4006 s = sym_push(SYM_FIELD, type, 0, l);
4007 s->a = ad->a;
4008 s->next = first;
4009 type->t = VT_FUNC;
4010 type->ref = s;
4011 } else if (tok == '[') {
4012 int saved_nocode_wanted = nocode_wanted;
4013 /* array definition */
4014 next();
4015 if (tok == TOK_RESTRICT1)
4016 next();
4017 n = -1;
4018 t1 = 0;
4019 if (tok != ']') {
4020 if (!local_stack || (storage & VT_STATIC))
4021 vpushi(expr_const());
4022 else {
4023 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4024 length must always be evaluated, even under nocode_wanted,
4025 so that its size slot is initialized (e.g. under sizeof
4026 or typeof). */
4027 nocode_wanted = 0;
4028 gexpr();
4030 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4031 n = vtop->c.i;
4032 if (n < 0)
4033 tcc_error("invalid array size");
4034 } else {
4035 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4036 tcc_error("size of variable length array should be an integer");
4037 t1 = VT_VLA;
4040 skip(']');
4041 /* parse next post type */
4042 post_type(type, ad, storage);
4043 if (type->t == VT_FUNC)
4044 tcc_error("declaration of an array of functions");
4045 t1 |= type->t & VT_VLA;
4047 if (t1 & VT_VLA) {
4048 loc -= type_size(&int_type, &align);
4049 loc &= -align;
4050 n = loc;
4052 vla_runtime_type_size(type, &align);
4053 gen_op('*');
4054 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4055 vswap();
4056 vstore();
4058 if (n != -1)
4059 vpop();
4060 nocode_wanted = saved_nocode_wanted;
4062 /* we push an anonymous symbol which will contain the array
4063 element type */
4064 s = sym_push(SYM_FIELD, type, 0, n);
4065 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4066 type->ref = s;
4070 /* Parse a type declaration (except basic type), and return the type
4071 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4072 expected. 'type' should contain the basic type. 'ad' is the
4073 attribute definition of the basic type. It can be modified by
4074 type_decl().
4076 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4078 Sym *s;
4079 CType type1, *type2;
4080 int qualifiers, storage;
4082 while (tok == '*') {
4083 qualifiers = 0;
4084 redo:
4085 next();
4086 switch(tok) {
4087 case TOK_CONST1:
4088 case TOK_CONST2:
4089 case TOK_CONST3:
4090 qualifiers |= VT_CONSTANT;
4091 goto redo;
4092 case TOK_VOLATILE1:
4093 case TOK_VOLATILE2:
4094 case TOK_VOLATILE3:
4095 qualifiers |= VT_VOLATILE;
4096 goto redo;
4097 case TOK_RESTRICT1:
4098 case TOK_RESTRICT2:
4099 case TOK_RESTRICT3:
4100 goto redo;
4101 /* XXX: clarify attribute handling */
4102 case TOK_ATTRIBUTE1:
4103 case TOK_ATTRIBUTE2:
4104 parse_attribute(ad);
4105 break;
4107 mk_pointer(type);
4108 type->t |= qualifiers;
4111 /* recursive type */
4112 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4113 type1.t = 0; /* XXX: same as int */
4114 if (tok == '(') {
4115 next();
4116 /* XXX: this is not correct to modify 'ad' at this point, but
4117 the syntax is not clear */
4118 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4119 parse_attribute(ad);
4120 type_decl(&type1, ad, v, td);
4121 skip(')');
4122 } else {
4123 /* type identifier */
4124 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4125 *v = tok;
4126 next();
4127 } else {
4128 if (!(td & TYPE_ABSTRACT))
4129 expect("identifier");
4130 *v = 0;
4133 storage = type->t & VT_STORAGE;
4134 type->t &= ~VT_STORAGE;
4135 post_type(type, ad, storage);
4136 type->t |= storage;
4137 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4138 parse_attribute(ad);
4140 if (!type1.t)
4141 return;
4142 /* append type at the end of type1 */
4143 type2 = &type1;
4144 for(;;) {
4145 s = type2->ref;
4146 type2 = &s->type;
4147 if (!type2->t) {
4148 *type2 = *type;
4149 break;
4152 *type = type1;
4155 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4156 ST_FUNC int lvalue_type(int t)
4158 int bt, r;
4159 r = VT_LVAL;
4160 bt = t & VT_BTYPE;
4161 if (bt == VT_BYTE || bt == VT_BOOL)
4162 r |= VT_LVAL_BYTE;
4163 else if (bt == VT_SHORT)
4164 r |= VT_LVAL_SHORT;
4165 else
4166 return r;
4167 if (t & VT_UNSIGNED)
4168 r |= VT_LVAL_UNSIGNED;
4169 return r;
4172 /* indirection with full error checking and bound check */
4173 ST_FUNC void indir(void)
4175 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4176 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4177 return;
4178 expect("pointer");
4180 if (vtop->r & VT_LVAL)
4181 gv(RC_INT);
4182 vtop->type = *pointed_type(&vtop->type);
4183 /* Arrays and functions are never lvalues */
4184 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4185 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4186 vtop->r |= lvalue_type(vtop->type.t);
4187 /* if bound checking, the referenced pointer must be checked */
4188 #ifdef CONFIG_TCC_BCHECK
4189 if (tcc_state->do_bounds_check)
4190 vtop->r |= VT_MUSTBOUND;
4191 #endif
4195 /* pass a parameter to a function and do type checking and casting */
4196 static void gfunc_param_typed(Sym *func, Sym *arg)
4198 int func_type;
4199 CType type;
4201 func_type = func->c;
4202 if (func_type == FUNC_OLD ||
4203 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4204 /* default casting : only need to convert float to double */
4205 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4206 type.t = VT_DOUBLE;
4207 gen_cast(&type);
4208 } else if (vtop->type.t & VT_BITFIELD) {
4209 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4210 type.ref = vtop->type.ref;
4211 gen_cast(&type);
4213 } else if (arg == NULL) {
4214 tcc_error("too many arguments to function");
4215 } else {
4216 type = arg->type;
4217 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4218 gen_assign_cast(&type);
4222 /* parse an expression of the form '(type)' or '(expr)' and return its
4223 type */
4224 static void parse_expr_type(CType *type)
4226 int n;
4227 AttributeDef ad;
4229 skip('(');
4230 if (parse_btype(type, &ad)) {
4231 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4232 } else {
4233 expr_type(type);
4235 skip(')');
4238 static void parse_type(CType *type)
4240 AttributeDef ad;
4241 int n;
4243 if (!parse_btype(type, &ad)) {
4244 expect("type");
4246 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4249 static void vpush_tokc(int t)
4251 CType type;
4252 type.t = t;
4253 type.ref = 0;
4254 vsetc(&type, VT_CONST, &tokc);
4257 ST_FUNC void unary(void)
4259 int n, t, align, size, r, sizeof_caller;
4260 CType type;
4261 Sym *s;
4262 AttributeDef ad;
4264 sizeof_caller = in_sizeof;
4265 in_sizeof = 0;
4266 /* XXX: GCC 2.95.3 does not generate a table although it should be
4267 better here */
4268 tok_next:
4269 switch(tok) {
4270 case TOK_EXTENSION:
4271 next();
4272 goto tok_next;
4273 case TOK_CINT:
4274 case TOK_CCHAR:
4275 case TOK_LCHAR:
4276 vpushi(tokc.i);
4277 next();
4278 break;
4279 case TOK_CUINT:
4280 vpush_tokc(VT_INT | VT_UNSIGNED);
4281 next();
4282 break;
4283 case TOK_CLLONG:
4284 vpush_tokc(VT_LLONG);
4285 next();
4286 break;
4287 case TOK_CULLONG:
4288 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4289 next();
4290 break;
4291 case TOK_CFLOAT:
4292 vpush_tokc(VT_FLOAT);
4293 next();
4294 break;
4295 case TOK_CDOUBLE:
4296 vpush_tokc(VT_DOUBLE);
4297 next();
4298 break;
4299 case TOK_CLDOUBLE:
4300 vpush_tokc(VT_LDOUBLE);
4301 next();
4302 break;
4303 case TOK___FUNCTION__:
4304 if (!gnu_ext)
4305 goto tok_identifier;
4306 /* fall thru */
4307 case TOK___FUNC__:
4309 void *ptr;
4310 int len;
4311 /* special function name identifier */
4312 len = strlen(funcname) + 1;
4313 /* generate char[len] type */
4314 type.t = VT_BYTE;
4315 mk_pointer(&type);
4316 type.t |= VT_ARRAY;
4317 type.ref->c = len;
4318 vpush_ref(&type, data_section, data_section->data_offset, len);
4319 ptr = section_ptr_add(data_section, len);
4320 memcpy(ptr, funcname, len);
4321 next();
4323 break;
4324 case TOK_LSTR:
4325 #ifdef TCC_TARGET_PE
4326 t = VT_SHORT | VT_UNSIGNED;
4327 #else
4328 t = VT_INT;
4329 #endif
4330 goto str_init;
4331 case TOK_STR:
4332 /* string parsing */
4333 t = VT_BYTE;
4334 str_init:
4335 if (tcc_state->warn_write_strings)
4336 t |= VT_CONSTANT;
4337 type.t = t;
4338 mk_pointer(&type);
4339 type.t |= VT_ARRAY;
4340 memset(&ad, 0, sizeof(AttributeDef));
4341 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4342 break;
4343 case '(':
4344 next();
4345 /* cast ? */
4346 if (parse_btype(&type, &ad)) {
4347 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4348 skip(')');
4349 /* check ISOC99 compound literal */
4350 if (tok == '{') {
4351 /* data is allocated locally by default */
4352 if (global_expr)
4353 r = VT_CONST;
4354 else
4355 r = VT_LOCAL;
4356 /* all except arrays are lvalues */
4357 if (!(type.t & VT_ARRAY))
4358 r |= lvalue_type(type.t);
4359 memset(&ad, 0, sizeof(AttributeDef));
4360 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4361 } else {
4362 if (sizeof_caller) {
4363 vpush(&type);
4364 return;
4366 unary();
4367 gen_cast(&type);
4369 } else if (tok == '{') {
4370 if (const_wanted)
4371 tcc_error("expected constant");
4372 /* save all registers */
4373 save_regs(0);
4374 /* statement expression : we do not accept break/continue
4375 inside as GCC does */
4376 block(NULL, NULL, 1);
4377 skip(')');
4378 } else {
4379 gexpr();
4380 skip(')');
4382 break;
4383 case '*':
4384 next();
4385 unary();
4386 indir();
4387 break;
4388 case '&':
4389 next();
4390 unary();
4391 /* functions names must be treated as function pointers,
4392 except for unary '&' and sizeof. Since we consider that
4393 functions are not lvalues, we only have to handle it
4394 there and in function calls. */
4395 /* arrays can also be used although they are not lvalues */
4396 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4397 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4398 test_lvalue();
4399 mk_pointer(&vtop->type);
4400 gaddrof();
4401 break;
4402 case '!':
4403 next();
4404 unary();
4405 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4406 CType boolean;
4407 boolean.t = VT_BOOL;
4408 gen_cast(&boolean);
4409 vtop->c.i = !vtop->c.i;
4410 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4411 vtop->c.i ^= 1;
4412 else {
4413 save_regs(1);
4414 vseti(VT_JMP, gvtst(1, 0));
4416 break;
4417 case '~':
4418 next();
4419 unary();
4420 vpushi(-1);
4421 gen_op('^');
4422 break;
4423 case '+':
4424 next();
4425 unary();
4426 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4427 tcc_error("pointer not accepted for unary plus");
4428 /* In order to force cast, we add zero, except for floating point
4429 where we really need an noop (otherwise -0.0 will be transformed
4430 into +0.0). */
4431 if (!is_float(vtop->type.t)) {
4432 vpushi(0);
4433 gen_op('+');
4435 break;
4436 case TOK_SIZEOF:
4437 case TOK_ALIGNOF1:
4438 case TOK_ALIGNOF2:
4439 t = tok;
4440 next();
4441 in_sizeof++;
4442 unary_type(&type); // Perform a in_sizeof = 0;
4443 size = type_size(&type, &align);
4444 if (t == TOK_SIZEOF) {
4445 if (!(type.t & VT_VLA)) {
4446 if (size < 0)
4447 tcc_error("sizeof applied to an incomplete type");
4448 vpushs(size);
4449 } else {
4450 vla_runtime_type_size(&type, &align);
4452 } else {
4453 vpushs(align);
4455 vtop->type.t |= VT_UNSIGNED;
4456 break;
4458 case TOK_builtin_expect:
4460 /* __builtin_expect is a no-op for now */
4461 next();
4462 skip('(');
4463 expr_eq();
4464 skip(',');
4465 nocode_wanted++;
4466 expr_lor_const();
4467 vpop();
4468 nocode_wanted--;
4469 skip(')');
4471 break;
4472 case TOK_builtin_types_compatible_p:
4474 CType type1, type2;
4475 next();
4476 skip('(');
4477 parse_type(&type1);
4478 skip(',');
4479 parse_type(&type2);
4480 skip(')');
4481 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4482 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4483 vpushi(is_compatible_types(&type1, &type2));
4485 break;
4486 case TOK_builtin_choose_expr:
4488 int64_t c;
4489 next();
4490 skip('(');
4491 c = expr_const64();
4492 skip(',');
4493 if (!c) {
4494 nocode_wanted++;
4496 expr_eq();
4497 if (!c) {
4498 vpop();
4499 nocode_wanted--;
4501 skip(',');
4502 if (c) {
4503 nocode_wanted++;
4505 expr_eq();
4506 if (c) {
4507 vpop();
4508 nocode_wanted--;
4510 skip(')');
4512 break;
4513 case TOK_builtin_constant_p:
4515 int res;
4516 next();
4517 skip('(');
4518 nocode_wanted++;
4519 gexpr();
4520 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4521 vpop();
4522 nocode_wanted--;
4523 skip(')');
4524 vpushi(res);
4526 break;
4527 case TOK_builtin_frame_address:
4528 case TOK_builtin_return_address:
4530 int tok1 = tok;
4531 int level;
4532 CType type;
4533 next();
4534 skip('(');
4535 if (tok != TOK_CINT) {
4536 tcc_error("%s only takes positive integers",
4537 tok1 == TOK_builtin_return_address ?
4538 "__builtin_return_address" :
4539 "__builtin_frame_address");
4541 level = (uint32_t)tokc.i;
4542 next();
4543 skip(')');
4544 type.t = VT_VOID;
4545 mk_pointer(&type);
4546 vset(&type, VT_LOCAL, 0); /* local frame */
4547 while (level--) {
4548 mk_pointer(&vtop->type);
4549 indir(); /* -> parent frame */
4551 if (tok1 == TOK_builtin_return_address) {
4552 // assume return address is just above frame pointer on stack
4553 vpushi(PTR_SIZE);
4554 gen_op('+');
4555 mk_pointer(&vtop->type);
4556 indir();
4559 break;
4560 #ifdef TCC_TARGET_X86_64
4561 #ifdef TCC_TARGET_PE
4562 case TOK_builtin_va_start:
4564 next();
4565 skip('(');
4566 expr_eq();
4567 skip(',');
4568 expr_eq();
4569 skip(')');
4570 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4571 tcc_error("__builtin_va_start expects a local variable");
4572 vtop->r &= ~(VT_LVAL | VT_REF);
4573 vtop->type = char_pointer_type;
4574 vtop->c.i += 8;
4575 vstore();
4577 break;
4578 #else
4579 case TOK_builtin_va_arg_types:
4581 CType type;
4582 next();
4583 skip('(');
4584 parse_type(&type);
4585 skip(')');
4586 vpushi(classify_x86_64_va_arg(&type));
4588 break;
4589 #endif
4590 #endif
4592 #ifdef TCC_TARGET_ARM64
4593 case TOK___va_start: {
4594 next();
4595 skip('(');
4596 expr_eq();
4597 skip(',');
4598 expr_eq();
4599 skip(')');
4600 //xx check types
4601 gen_va_start();
4602 vpushi(0);
4603 vtop->type.t = VT_VOID;
4604 break;
4606 case TOK___va_arg: {
4607 CType type;
4608 next();
4609 skip('(');
4610 expr_eq();
4611 skip(',');
4612 parse_type(&type);
4613 skip(')');
4614 //xx check types
4615 gen_va_arg(&type);
4616 vtop->type = type;
4617 break;
4619 case TOK___arm64_clear_cache: {
4620 next();
4621 skip('(');
4622 expr_eq();
4623 skip(',');
4624 expr_eq();
4625 skip(')');
4626 gen_clear_cache();
4627 vpushi(0);
4628 vtop->type.t = VT_VOID;
4629 break;
4631 #endif
4632 /* pre operations */
4633 case TOK_INC:
4634 case TOK_DEC:
4635 t = tok;
4636 next();
4637 unary();
4638 inc(0, t);
4639 break;
4640 case '-':
4641 next();
4642 unary();
4643 t = vtop->type.t & VT_BTYPE;
4644 if (is_float(t)) {
4645 /* In IEEE negate(x) isn't subtract(0,x), but rather
4646 subtract(-0, x). */
4647 vpush(&vtop->type);
4648 if (t == VT_FLOAT)
4649 vtop->c.f = -0.0f;
4650 else if (t == VT_DOUBLE)
4651 vtop->c.d = -0.0;
4652 else
4653 vtop->c.ld = -0.0;
4654 } else
4655 vpushi(0);
4656 vswap();
4657 gen_op('-');
4658 break;
4659 case TOK_LAND:
4660 if (!gnu_ext)
4661 goto tok_identifier;
4662 next();
4663 /* allow to take the address of a label */
4664 if (tok < TOK_UIDENT)
4665 expect("label identifier");
4666 s = label_find(tok);
4667 if (!s) {
4668 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4669 } else {
4670 if (s->r == LABEL_DECLARED)
4671 s->r = LABEL_FORWARD;
4673 if (!s->type.t) {
4674 s->type.t = VT_VOID;
4675 mk_pointer(&s->type);
4676 s->type.t |= VT_STATIC;
4678 vpushsym(&s->type, s);
4679 next();
4680 break;
4682 // special qnan , snan and infinity values
4683 case TOK___NAN__:
4684 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4685 next();
4686 break;
4687 case TOK___SNAN__:
4688 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4689 next();
4690 break;
4691 case TOK___INF__:
4692 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4693 next();
4694 break;
4696 default:
4697 tok_identifier:
4698 t = tok;
4699 next();
4700 if (t < TOK_UIDENT)
4701 expect("identifier");
4702 s = sym_find(t);
4703 if (!s) {
4704 const char *name = get_tok_str(t, NULL);
4705 if (tok != '(')
4706 tcc_error("'%s' undeclared", name);
4707 /* for simple function calls, we tolerate undeclared
4708 external reference to int() function */
4709 if (tcc_state->warn_implicit_function_declaration
4710 #ifdef TCC_TARGET_PE
4711 /* people must be warned about using undeclared WINAPI functions
4712 (which usually start with uppercase letter) */
4713 || (name[0] >= 'A' && name[0] <= 'Z')
4714 #endif
4716 tcc_warning("implicit declaration of function '%s'", name);
4717 s = external_global_sym(t, &func_old_type, 0);
4719 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4720 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4721 /* if referencing an inline function, then we generate a
4722 symbol to it if not already done. It will have the
4723 effect to generate code for it at the end of the
4724 compilation unit. Inline function as always
4725 generated in the text section. */
4726 if (!s->c && !nocode_wanted)
4727 put_extern_sym(s, text_section, 0, 0);
4728 r = VT_SYM | VT_CONST;
4729 } else {
4730 r = s->r;
4731 /* A symbol that has a register is a local register variable,
4732 which starts out as VT_LOCAL value. */
4733 if ((r & VT_VALMASK) < VT_CONST)
4734 r = (r & ~VT_VALMASK) | VT_LOCAL;
4736 vset(&s->type, r, s->c);
4737 /* Point to s as backpointer (even without r&VT_SYM).
4738 Will be used by at least the x86 inline asm parser for
4739 regvars. */
4740 vtop->sym = s;
4741 if (vtop->r & VT_SYM) {
4742 vtop->c.i = 0;
4744 break;
4747 /* post operations */
4748 while (1) {
4749 if (tok == TOK_INC || tok == TOK_DEC) {
4750 inc(1, tok);
4751 next();
4752 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4753 int qualifiers;
4754 /* field */
4755 if (tok == TOK_ARROW)
4756 indir();
4757 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4758 test_lvalue();
4759 gaddrof();
4760 /* expect pointer on structure */
4761 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4762 expect("struct or union");
4763 if (tok == TOK_CDOUBLE)
4764 expect("field name");
4765 next();
4766 if (tok == TOK_CINT || tok == TOK_CUINT)
4767 expect("field name");
4768 s = find_field(&vtop->type, tok);
4769 if (!s)
4770 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4771 /* add field offset to pointer */
4772 vtop->type = char_pointer_type; /* change type to 'char *' */
4773 vpushi(s->c);
4774 gen_op('+');
4775 /* change type to field type, and set to lvalue */
4776 vtop->type = s->type;
4777 vtop->type.t |= qualifiers;
4778 /* an array is never an lvalue */
4779 if (!(vtop->type.t & VT_ARRAY)) {
4780 vtop->r |= lvalue_type(vtop->type.t);
4781 #ifdef CONFIG_TCC_BCHECK
4782 /* if bound checking, the referenced pointer must be checked */
4783 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4784 vtop->r |= VT_MUSTBOUND;
4785 #endif
4787 next();
4788 } else if (tok == '[') {
4789 next();
4790 gexpr();
4791 gen_op('+');
4792 indir();
4793 skip(']');
4794 } else if (tok == '(') {
4795 SValue ret;
4796 Sym *sa;
4797 int nb_args, ret_nregs, ret_align, regsize, variadic;
4799 /* function call */
4800 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4801 /* pointer test (no array accepted) */
4802 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4803 vtop->type = *pointed_type(&vtop->type);
4804 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4805 goto error_func;
4806 } else {
4807 error_func:
4808 expect("function pointer");
4810 } else {
4811 vtop->r &= ~VT_LVAL; /* no lvalue */
4813 /* get return type */
4814 s = vtop->type.ref;
4815 next();
4816 sa = s->next; /* first parameter */
4817 nb_args = 0;
4818 ret.r2 = VT_CONST;
4819 /* compute first implicit argument if a structure is returned */
4820 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4821 variadic = (s->c == FUNC_ELLIPSIS);
4822 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4823 &ret_align, &regsize);
4824 if (!ret_nregs) {
4825 /* get some space for the returned structure */
4826 size = type_size(&s->type, &align);
4827 #ifdef TCC_TARGET_ARM64
4828 /* On arm64, a small struct is return in registers.
4829 It is much easier to write it to memory if we know
4830 that we are allowed to write some extra bytes, so
4831 round the allocated space up to a power of 2: */
4832 if (size < 16)
4833 while (size & (size - 1))
4834 size = (size | (size - 1)) + 1;
4835 #endif
4836 loc = (loc - size) & -align;
4837 ret.type = s->type;
4838 ret.r = VT_LOCAL | VT_LVAL;
4839 /* pass it as 'int' to avoid structure arg passing
4840 problems */
4841 vseti(VT_LOCAL, loc);
4842 ret.c = vtop->c;
4843 nb_args++;
4845 } else {
4846 ret_nregs = 1;
4847 ret.type = s->type;
4850 if (ret_nregs) {
4851 /* return in register */
4852 if (is_float(ret.type.t)) {
4853 ret.r = reg_fret(ret.type.t);
4854 #ifdef TCC_TARGET_X86_64
4855 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4856 ret.r2 = REG_QRET;
4857 #endif
4858 } else {
4859 #ifndef TCC_TARGET_ARM64
4860 #ifdef TCC_TARGET_X86_64
4861 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4862 #else
4863 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4864 #endif
4865 ret.r2 = REG_LRET;
4866 #endif
4867 ret.r = REG_IRET;
4869 ret.c.i = 0;
4871 if (tok != ')') {
4872 for(;;) {
4873 expr_eq();
4874 gfunc_param_typed(s, sa);
4875 nb_args++;
4876 if (sa)
4877 sa = sa->next;
4878 if (tok == ')')
4879 break;
4880 skip(',');
4883 if (sa)
4884 tcc_error("too few arguments to function");
4885 skip(')');
4886 gfunc_call(nb_args);
4888 /* return value */
4889 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4890 vsetc(&ret.type, r, &ret.c);
4891 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4894 /* handle packed struct return */
4895 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4896 int addr, offset;
4898 size = type_size(&s->type, &align);
4899 /* We're writing whole regs often, make sure there's enough
4900 space. Assume register size is power of 2. */
4901 if (regsize > align)
4902 align = regsize;
4903 loc = (loc - size) & -align;
4904 addr = loc;
4905 offset = 0;
4906 for (;;) {
4907 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4908 vswap();
4909 vstore();
4910 vtop--;
4911 if (--ret_nregs == 0)
4912 break;
4913 offset += regsize;
4915 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4917 } else {
4918 break;
4923 ST_FUNC void expr_prod(void)
4925 int t;
4927 unary();
4928 while (tok == '*' || tok == '/' || tok == '%') {
4929 t = tok;
4930 next();
4931 unary();
4932 gen_op(t);
4936 ST_FUNC void expr_sum(void)
4938 int t;
4940 expr_prod();
4941 while (tok == '+' || tok == '-') {
4942 t = tok;
4943 next();
4944 expr_prod();
4945 gen_op(t);
4949 static void expr_shift(void)
4951 int t;
4953 expr_sum();
4954 while (tok == TOK_SHL || tok == TOK_SAR) {
4955 t = tok;
4956 next();
4957 expr_sum();
4958 gen_op(t);
4962 static void expr_cmp(void)
4964 int t;
4966 expr_shift();
4967 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4968 tok == TOK_ULT || tok == TOK_UGE) {
4969 t = tok;
4970 next();
4971 expr_shift();
4972 gen_op(t);
4976 static void expr_cmpeq(void)
4978 int t;
4980 expr_cmp();
4981 while (tok == TOK_EQ || tok == TOK_NE) {
4982 t = tok;
4983 next();
4984 expr_cmp();
4985 gen_op(t);
4989 static void expr_and(void)
4991 expr_cmpeq();
4992 while (tok == '&') {
4993 next();
4994 expr_cmpeq();
4995 gen_op('&');
4999 static void expr_xor(void)
5001 expr_and();
5002 while (tok == '^') {
5003 next();
5004 expr_and();
5005 gen_op('^');
5009 static void expr_or(void)
5011 expr_xor();
5012 while (tok == '|') {
5013 next();
5014 expr_xor();
5015 gen_op('|');
5019 /* XXX: fix this mess */
5020 static void expr_land_const(void)
5022 expr_or();
5023 while (tok == TOK_LAND) {
5024 next();
5025 expr_or();
5026 gen_op(TOK_LAND);
5029 static void expr_lor_const(void)
5031 expr_land_const();
5032 while (tok == TOK_LOR) {
5033 next();
5034 expr_land_const();
5035 gen_op(TOK_LOR);
5039 static void expr_land(void)
5041 expr_or();
5042 if (tok == TOK_LAND) {
5043 int t = 0;
5044 for(;;) {
5045 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5046 CType ctb;
5047 ctb.t = VT_BOOL;
5048 gen_cast(&ctb);
5049 if (vtop->c.i) {
5050 vpop();
5051 } else {
5052 nocode_wanted++;
5053 while (tok == TOK_LAND) {
5054 next();
5055 expr_or();
5056 vpop();
5058 nocode_wanted--;
5059 if (t)
5060 gsym(t);
5061 gen_cast(&int_type);
5062 break;
5064 } else {
5065 if (!t)
5066 save_regs(1);
5067 t = gvtst(1, t);
5069 if (tok != TOK_LAND) {
5070 if (t)
5071 vseti(VT_JMPI, t);
5072 else
5073 vpushi(1);
5074 break;
5076 next();
5077 expr_or();
5082 static void expr_lor(void)
5084 expr_land();
5085 if (tok == TOK_LOR) {
5086 int t = 0;
5087 for(;;) {
5088 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5089 CType ctb;
5090 ctb.t = VT_BOOL;
5091 gen_cast(&ctb);
5092 if (!vtop->c.i) {
5093 vpop();
5094 } else {
5095 nocode_wanted++;
5096 while (tok == TOK_LOR) {
5097 next();
5098 expr_land();
5099 vpop();
5101 nocode_wanted--;
5102 if (t)
5103 gsym(t);
5104 gen_cast(&int_type);
5105 break;
5107 } else {
5108 if (!t)
5109 save_regs(1);
5110 t = gvtst(0, t);
5112 if (tok != TOK_LOR) {
5113 if (t)
5114 vseti(VT_JMP, t);
5115 else
5116 vpushi(0);
5117 break;
5119 next();
5120 expr_land();
5125 /* Assuming vtop is a value used in a conditional context
5126 (i.e. compared with zero) return 0 if it's false, 1 if
5127 true and -1 if it can't be statically determined. */
5128 static int condition_3way(void)
5130 int c = -1;
5131 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5132 (!(vtop->r & VT_SYM) ||
5133 !(vtop->sym->type.t & VT_WEAK))) {
5134 CType boolean;
5135 boolean.t = VT_BOOL;
5136 vdup();
5137 gen_cast(&boolean);
5138 c = vtop->c.i;
5139 vpop();
5141 return c;
5144 static void expr_cond(void)
5146 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5147 SValue sv;
5148 CType type, type1, type2;
5150 expr_lor();
5151 if (tok == '?') {
5152 next();
5153 c = condition_3way();
5154 g = (tok == ':' && gnu_ext);
5155 if (c < 0) {
5156 /* needed to avoid having different registers saved in
5157 each branch */
5158 if (is_float(vtop->type.t)) {
5159 rc = RC_FLOAT;
5160 #ifdef TCC_TARGET_X86_64
5161 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5162 rc = RC_ST0;
5164 #endif
5165 } else
5166 rc = RC_INT;
5167 gv(rc);
5168 save_regs(1);
5169 if (g)
5170 gv_dup();
5171 tt = gvtst(1, 0);
5173 } else {
5174 if (!g)
5175 vpop();
5176 tt = 0;
5179 if (1) {
5180 if (c == 0)
5181 nocode_wanted++;
5182 if (!g)
5183 gexpr();
5185 type1 = vtop->type;
5186 sv = *vtop; /* save value to handle it later */
5187 vtop--; /* no vpop so that FP stack is not flushed */
5188 skip(':');
5190 u = 0;
5191 if (c < 0)
5192 u = gjmp(0);
5193 gsym(tt);
5195 if (c == 0)
5196 nocode_wanted--;
5197 if (c == 1)
5198 nocode_wanted++;
5199 expr_cond();
5200 if (c == 1)
5201 nocode_wanted--;
5203 type2 = vtop->type;
5204 t1 = type1.t;
5205 bt1 = t1 & VT_BTYPE;
5206 t2 = type2.t;
5207 bt2 = t2 & VT_BTYPE;
5208 /* cast operands to correct type according to ISOC rules */
5209 if (is_float(bt1) || is_float(bt2)) {
5210 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5211 type.t = VT_LDOUBLE;
5213 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5214 type.t = VT_DOUBLE;
5215 } else {
5216 type.t = VT_FLOAT;
5218 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5219 /* cast to biggest op */
5220 type.t = VT_LLONG;
5221 /* convert to unsigned if it does not fit in a long long */
5222 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5223 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5224 type.t |= VT_UNSIGNED;
5225 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5226 /* If one is a null ptr constant the result type
5227 is the other. */
5228 if (is_null_pointer (vtop))
5229 type = type1;
5230 else if (is_null_pointer (&sv))
5231 type = type2;
5232 /* XXX: test pointer compatibility, C99 has more elaborate
5233 rules here. */
5234 else
5235 type = type1;
5236 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5237 /* XXX: test function pointer compatibility */
5238 type = bt1 == VT_FUNC ? type1 : type2;
5239 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5240 /* XXX: test structure compatibility */
5241 type = bt1 == VT_STRUCT ? type1 : type2;
5242 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5243 /* NOTE: as an extension, we accept void on only one side */
5244 type.t = VT_VOID;
5245 } else {
5246 /* integer operations */
5247 type.t = VT_INT;
5248 /* convert to unsigned if it does not fit in an integer */
5249 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5250 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5251 type.t |= VT_UNSIGNED;
5253 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5254 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5255 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5256 islv &= c < 0;
5258 /* now we convert second operand */
5259 if (c != 1) {
5260 gen_cast(&type);
5261 if (islv) {
5262 mk_pointer(&vtop->type);
5263 gaddrof();
5264 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5265 gaddrof();
5268 rc = RC_INT;
5269 if (is_float(type.t)) {
5270 rc = RC_FLOAT;
5271 #ifdef TCC_TARGET_X86_64
5272 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5273 rc = RC_ST0;
5275 #endif
5276 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5277 /* for long longs, we use fixed registers to avoid having
5278 to handle a complicated move */
5279 rc = RC_IRET;
5282 tt = r2 = 0;
5283 if (c < 0) {
5284 r2 = gv(rc);
5285 tt = gjmp(0);
5287 gsym(u);
5289 /* this is horrible, but we must also convert first
5290 operand */
5291 if (c != 0) {
5292 *vtop = sv;
5293 gen_cast(&type);
5294 if (islv) {
5295 mk_pointer(&vtop->type);
5296 gaddrof();
5297 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5298 gaddrof();
5301 if (c < 0) {
5302 r1 = gv(rc);
5303 move_reg(r2, r1, type.t);
5304 vtop->r = r2;
5305 gsym(tt);
5306 if (islv)
5307 indir();
5313 static void expr_eq(void)
5315 int t;
5317 expr_cond();
5318 if (tok == '=' ||
5319 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5320 tok == TOK_A_XOR || tok == TOK_A_OR ||
5321 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5322 test_lvalue();
5323 t = tok;
5324 next();
5325 if (t == '=') {
5326 expr_eq();
5327 } else {
5328 vdup();
5329 expr_eq();
5330 gen_op(t & 0x7f);
5332 vstore();
5336 ST_FUNC void gexpr(void)
5338 while (1) {
5339 expr_eq();
5340 if (tok != ',')
5341 break;
5342 vpop();
5343 next();
5347 /* parse an expression and return its type without any side effect. */
5348 static void expr_type(CType *type)
5351 nocode_wanted++;
5352 gexpr();
5353 *type = vtop->type;
5354 vpop();
5355 nocode_wanted--;
5358 /* parse a unary expression and return its type without any side
5359 effect. */
5360 static void unary_type(CType *type)
5362 nocode_wanted++;
5363 unary();
5364 *type = vtop->type;
5365 vpop();
5366 nocode_wanted--;
5369 /* parse a constant expression and return value in vtop. */
5370 static void expr_const1(void)
5372 const_wanted++;
5373 expr_cond();
5374 const_wanted--;
5377 /* parse an integer constant and return its value. */
5378 static inline int64_t expr_const64(void)
5380 int64_t c;
5381 expr_const1();
5382 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5383 expect("constant expression");
5384 c = vtop->c.i;
5385 vpop();
5386 return c;
5389 /* parse an integer constant and return its value.
5390 Complain if it doesn't fit 32bit (signed or unsigned). */
5391 ST_FUNC int expr_const(void)
5393 int c;
5394 int64_t wc = expr_const64();
5395 c = wc;
5396 if (c != wc && (unsigned)c != wc)
5397 tcc_error("constant exceeds 32 bit");
5398 return c;
5401 /* return the label token if current token is a label, otherwise
5402 return zero */
5403 static int is_label(void)
5405 int last_tok;
5407 /* fast test first */
5408 if (tok < TOK_UIDENT)
5409 return 0;
5410 /* no need to save tokc because tok is an identifier */
5411 last_tok = tok;
5412 next();
5413 if (tok == ':') {
5414 next();
5415 return last_tok;
5416 } else {
5417 unget_tok(last_tok);
5418 return 0;
5422 static void label_or_decl(int l)
5424 int last_tok;
5426 /* fast test first */
5427 if (tok >= TOK_UIDENT)
5429 /* no need to save tokc because tok is an identifier */
5430 last_tok = tok;
5431 next();
5432 if (tok == ':') {
5433 unget_tok(last_tok);
5434 return;
5436 unget_tok(last_tok);
5438 decl(l);
5441 static int case_cmp(const void *pa, const void *pb)
5443 int64_t a = (*(struct case_t**) pa)->v1;
5444 int64_t b = (*(struct case_t**) pb)->v1;
5445 return a < b ? -1 : a > b;
5448 static void gcase(struct case_t **base, int len, int *bsym)
5450 struct case_t *p;
5451 int e;
5452 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5453 gv(RC_INT);
5454 while (len > 4) {
5455 /* binary search */
5456 p = base[len/2];
5457 vdup();
5458 if (ll)
5459 vpushll(p->v2);
5460 else
5461 vpushi(p->v2);
5462 gen_op(TOK_LE);
5463 e = gtst(1, 0);
5464 vdup();
5465 if (ll)
5466 vpushll(p->v1);
5467 else
5468 vpushi(p->v1);
5469 gen_op(TOK_GE);
5470 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5471 /* x < v1 */
5472 gcase(base, len/2, bsym);
5473 if (cur_switch->def_sym)
5474 gjmp_addr(cur_switch->def_sym);
5475 else
5476 *bsym = gjmp(*bsym);
5477 /* x > v2 */
5478 gsym(e);
5479 e = len/2 + 1;
5480 base += e; len -= e;
5482 /* linear scan */
5483 while (len--) {
5484 p = *base++;
5485 vdup();
5486 if (ll)
5487 vpushll(p->v2);
5488 else
5489 vpushi(p->v2);
5490 if (p->v1 == p->v2) {
5491 gen_op(TOK_EQ);
5492 gtst_addr(0, p->sym);
5493 } else {
5494 gen_op(TOK_LE);
5495 e = gtst(1, 0);
5496 vdup();
5497 if (ll)
5498 vpushll(p->v1);
5499 else
5500 vpushi(p->v1);
5501 gen_op(TOK_GE);
5502 gtst_addr(0, p->sym);
5503 gsym(e);
5508 static void block(int *bsym, int *csym, int is_expr)
5510 int a, b, c, d, cond;
5511 Sym *s;
5513 /* generate line number info */
5514 if (tcc_state->do_debug &&
5515 (last_line_num != file->line_num || last_ind != ind)) {
5516 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5517 last_ind = ind;
5518 last_line_num = file->line_num;
5521 if (is_expr) {
5522 /* default return value is (void) */
5523 vpushi(0);
5524 vtop->type.t = VT_VOID;
5527 if (tok == TOK_IF) {
5528 /* if test */
5529 int saved_nocode_wanted = nocode_wanted;
5530 next();
5531 skip('(');
5532 gexpr();
5533 skip(')');
5534 cond = condition_3way();
5535 if (cond == 1)
5536 a = 0, vpop();
5537 else
5538 a = gvtst(1, 0);
5539 if (cond == 0)
5540 nocode_wanted |= 0x20000000;
5541 block(bsym, csym, 0);
5542 if (cond != 1)
5543 nocode_wanted = saved_nocode_wanted;
5544 c = tok;
5545 if (c == TOK_ELSE) {
5546 next();
5547 d = gjmp(0);
5548 gsym(a);
5549 if (cond == 1)
5550 nocode_wanted |= 0x20000000;
5551 block(bsym, csym, 0);
5552 gsym(d); /* patch else jmp */
5553 if (cond != 0)
5554 nocode_wanted = saved_nocode_wanted;
5555 } else
5556 gsym(a);
5557 } else if (tok == TOK_WHILE) {
5558 int saved_nocode_wanted;
5559 nocode_wanted &= ~0x20000000;
5560 next();
5561 d = ind;
5562 vla_sp_restore();
5563 skip('(');
5564 gexpr();
5565 skip(')');
5566 a = gvtst(1, 0);
5567 b = 0;
5568 ++local_scope;
5569 saved_nocode_wanted = nocode_wanted;
5570 block(&a, &b, 0);
5571 nocode_wanted = saved_nocode_wanted;
5572 --local_scope;
5573 gjmp_addr(d);
5574 gsym(a);
5575 gsym_addr(b, d);
5576 } else if (tok == '{') {
5577 Sym *llabel;
5578 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5580 next();
5581 /* record local declaration stack position */
5582 s = local_stack;
5583 llabel = local_label_stack;
5584 ++local_scope;
5586 /* handle local labels declarations */
5587 if (tok == TOK_LABEL) {
5588 next();
5589 for(;;) {
5590 if (tok < TOK_UIDENT)
5591 expect("label identifier");
5592 label_push(&local_label_stack, tok, LABEL_DECLARED);
5593 next();
5594 if (tok == ',') {
5595 next();
5596 } else {
5597 skip(';');
5598 break;
5602 while (tok != '}') {
5603 label_or_decl(VT_LOCAL);
5604 if (tok != '}') {
5605 if (is_expr)
5606 vpop();
5607 block(bsym, csym, is_expr);
5610 /* pop locally defined labels */
5611 label_pop(&local_label_stack, llabel);
5612 /* pop locally defined symbols */
5613 --local_scope;
5614 /* In the is_expr case (a statement expression is finished here),
5615 vtop might refer to symbols on the local_stack. Either via the
5616 type or via vtop->sym. We can't pop those nor any that in turn
5617 might be referred to. To make it easier we don't roll back
5618 any symbols in that case; some upper level call to block() will
5619 do that. We do have to remove such symbols from the lookup
5620 tables, though. sym_pop will do that. */
5621 sym_pop(&local_stack, s, is_expr);
5623 /* Pop VLA frames and restore stack pointer if required */
5624 if (vlas_in_scope > saved_vlas_in_scope) {
5625 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5626 vla_sp_restore();
5628 vlas_in_scope = saved_vlas_in_scope;
5630 next();
5631 } else if (tok == TOK_RETURN) {
5632 next();
5633 if (tok != ';') {
5634 gexpr();
5635 gen_assign_cast(&func_vt);
5636 #ifdef TCC_TARGET_ARM64
5637 // Perhaps it would be better to use this for all backends:
5638 greturn();
5639 #else
5640 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5641 CType type, ret_type;
5642 int ret_align, ret_nregs, regsize;
5643 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5644 &ret_align, &regsize);
5645 if (0 == ret_nregs) {
5646 /* if returning structure, must copy it to implicit
5647 first pointer arg location */
5648 type = func_vt;
5649 mk_pointer(&type);
5650 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5651 indir();
5652 vswap();
5653 /* copy structure value to pointer */
5654 vstore();
5655 } else {
5656 /* returning structure packed into registers */
5657 int r, size, addr, align;
5658 size = type_size(&func_vt,&align);
5659 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5660 (vtop->c.i & (ret_align-1)))
5661 && (align & (ret_align-1))) {
5662 loc = (loc - size) & -ret_align;
5663 addr = loc;
5664 type = func_vt;
5665 vset(&type, VT_LOCAL | VT_LVAL, addr);
5666 vswap();
5667 vstore();
5668 vpop();
5669 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5671 vtop->type = ret_type;
5672 if (is_float(ret_type.t))
5673 r = rc_fret(ret_type.t);
5674 else
5675 r = RC_IRET;
5677 if (ret_nregs == 1)
5678 gv(r);
5679 else {
5680 for (;;) {
5681 vdup();
5682 gv(r);
5683 vpop();
5684 if (--ret_nregs == 0)
5685 break;
5686 /* We assume that when a structure is returned in multiple
5687 registers, their classes are consecutive values of the
5688 suite s(n) = 2^n */
5689 r <<= 1;
5690 vtop->c.i += regsize;
5694 } else if (is_float(func_vt.t)) {
5695 gv(rc_fret(func_vt.t));
5696 } else {
5697 gv(RC_IRET);
5699 #endif
5700 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5702 skip(';');
5703 /* jump unless last stmt in top-level block */
5704 if (tok != '}' || local_scope != 1)
5705 rsym = gjmp(rsym);
5706 nocode_wanted |= 0x20000000;
5707 } else if (tok == TOK_BREAK) {
5708 /* compute jump */
5709 if (!bsym)
5710 tcc_error("cannot break");
5711 *bsym = gjmp(*bsym);
5712 next();
5713 skip(';');
5714 nocode_wanted |= 0x20000000;
5715 } else if (tok == TOK_CONTINUE) {
5716 /* compute jump */
5717 if (!csym)
5718 tcc_error("cannot continue");
5719 vla_sp_restore_root();
5720 *csym = gjmp(*csym);
5721 next();
5722 skip(';');
5723 } else if (tok == TOK_FOR) {
5724 int e;
5725 int saved_nocode_wanted;
5726 nocode_wanted &= ~0x20000000;
5727 next();
5728 skip('(');
5729 s = local_stack;
5730 ++local_scope;
5731 if (tok != ';') {
5732 /* c99 for-loop init decl? */
5733 if (!decl0(VT_LOCAL, 1)) {
5734 /* no, regular for-loop init expr */
5735 gexpr();
5736 vpop();
5739 skip(';');
5740 d = ind;
5741 c = ind;
5742 vla_sp_restore();
5743 a = 0;
5744 b = 0;
5745 if (tok != ';') {
5746 gexpr();
5747 a = gvtst(1, 0);
5749 skip(';');
5750 if (tok != ')') {
5751 e = gjmp(0);
5752 c = ind;
5753 vla_sp_restore();
5754 gexpr();
5755 vpop();
5756 gjmp_addr(d);
5757 gsym(e);
5759 skip(')');
5760 saved_nocode_wanted = nocode_wanted;
5761 block(&a, &b, 0);
5762 nocode_wanted = saved_nocode_wanted;
5763 gjmp_addr(c);
5764 gsym(a);
5765 gsym_addr(b, c);
5766 --local_scope;
5767 sym_pop(&local_stack, s, 0);
5769 } else
5770 if (tok == TOK_DO) {
5771 int saved_nocode_wanted;
5772 nocode_wanted &= ~0x20000000;
5773 next();
5774 a = 0;
5775 b = 0;
5776 d = ind;
5777 vla_sp_restore();
5778 saved_nocode_wanted = nocode_wanted;
5779 block(&a, &b, 0);
5780 skip(TOK_WHILE);
5781 skip('(');
5782 gsym(b);
5783 gexpr();
5784 c = gvtst(0, 0);
5785 gsym_addr(c, d);
5786 nocode_wanted = saved_nocode_wanted;
5787 skip(')');
5788 gsym(a);
5789 skip(';');
5790 } else
5791 if (tok == TOK_SWITCH) {
5792 struct switch_t *saved, sw;
5793 int saved_nocode_wanted = nocode_wanted;
5794 SValue switchval;
5795 next();
5796 skip('(');
5797 gexpr();
5798 skip(')');
5799 switchval = *vtop--;
5800 a = 0;
5801 b = gjmp(0); /* jump to first case */
5802 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5803 saved = cur_switch;
5804 cur_switch = &sw;
5805 block(&a, csym, 0);
5806 nocode_wanted = saved_nocode_wanted;
5807 a = gjmp(a); /* add implicit break */
5808 /* case lookup */
5809 gsym(b);
5810 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5811 for (b = 1; b < sw.n; b++)
5812 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5813 tcc_error("duplicate case value");
5814 /* Our switch table sorting is signed, so the compared
5815 value needs to be as well when it's 64bit. */
5816 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5817 switchval.type.t &= ~VT_UNSIGNED;
5818 vpushv(&switchval);
5819 gcase(sw.p, sw.n, &a);
5820 vpop();
5821 if (sw.def_sym)
5822 gjmp_addr(sw.def_sym);
5823 dynarray_reset(&sw.p, &sw.n);
5824 cur_switch = saved;
5825 /* break label */
5826 gsym(a);
5827 } else
5828 if (tok == TOK_CASE) {
5829 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5830 if (!cur_switch)
5831 expect("switch");
5832 nocode_wanted &= ~0x20000000;
5833 next();
5834 cr->v1 = cr->v2 = expr_const64();
5835 if (gnu_ext && tok == TOK_DOTS) {
5836 next();
5837 cr->v2 = expr_const64();
5838 if (cr->v2 < cr->v1)
5839 tcc_warning("empty case range");
5841 cr->sym = ind;
5842 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5843 skip(':');
5844 is_expr = 0;
5845 goto block_after_label;
5846 } else
5847 if (tok == TOK_DEFAULT) {
5848 next();
5849 skip(':');
5850 if (!cur_switch)
5851 expect("switch");
5852 if (cur_switch->def_sym)
5853 tcc_error("too many 'default'");
5854 cur_switch->def_sym = ind;
5855 is_expr = 0;
5856 goto block_after_label;
5857 } else
5858 if (tok == TOK_GOTO) {
5859 next();
5860 if (tok == '*' && gnu_ext) {
5861 /* computed goto */
5862 next();
5863 gexpr();
5864 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5865 expect("pointer");
5866 ggoto();
5867 } else if (tok >= TOK_UIDENT) {
5868 s = label_find(tok);
5869 /* put forward definition if needed */
5870 if (!s) {
5871 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5872 } else {
5873 if (s->r == LABEL_DECLARED)
5874 s->r = LABEL_FORWARD;
5876 vla_sp_restore_root();
5877 if (s->r & LABEL_FORWARD)
5878 s->jnext = gjmp(s->jnext);
5879 else
5880 gjmp_addr(s->jnext);
5881 next();
5882 } else {
5883 expect("label identifier");
5885 skip(';');
5886 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5887 asm_instr();
5888 } else {
5889 b = is_label();
5890 if (b) {
5891 /* label case */
5892 s = label_find(b);
5893 if (s) {
5894 if (s->r == LABEL_DEFINED)
5895 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5896 gsym(s->jnext);
5897 s->r = LABEL_DEFINED;
5898 } else {
5899 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5901 s->jnext = ind;
5902 vla_sp_restore();
5903 /* we accept this, but it is a mistake */
5904 block_after_label:
5905 nocode_wanted &= ~0x20000000;
5906 if (tok == '}') {
5907 tcc_warning("deprecated use of label at end of compound statement");
5908 } else {
5909 if (is_expr)
5910 vpop();
5911 block(bsym, csym, is_expr);
5913 } else {
5914 /* expression case */
5915 if (tok != ';') {
5916 if (is_expr) {
5917 vpop();
5918 gexpr();
5919 } else {
5920 gexpr();
5921 vpop();
5924 skip(';');
5929 #define EXPR_CONST 1
5930 #define EXPR_ANY 2
5932 static void parse_init_elem(int expr_type)
5934 int saved_global_expr;
5935 switch(expr_type) {
5936 case EXPR_CONST:
5937 /* compound literals must be allocated globally in this case */
5938 saved_global_expr = global_expr;
5939 global_expr = 1;
5940 expr_const1();
5941 global_expr = saved_global_expr;
5942 /* NOTE: symbols are accepted */
5943 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5944 tcc_error("initializer element is not constant");
5945 break;
5946 case EXPR_ANY:
5947 expr_eq();
5948 break;
5952 /* t is the array or struct type. c is the array or struct
5953 address. cur_field is the pointer to the current
5954 value, for arrays the 'c' member contains the current start
5955 index and the 'r' contains the end index (in case of range init).
5956 'size_only' is true if only size info is needed (only used
5957 in arrays) */
5958 static void decl_designator(CType *type, Section *sec, unsigned long c,
5959 Sym **cur_field, int size_only)
5961 Sym *s, *f;
5962 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5963 CType type1;
5965 notfirst = 0;
5966 elem_size = 0;
5967 nb_elems = 1;
5968 if (gnu_ext && (l = is_label()) != 0)
5969 goto struct_field;
5970 while (tok == '[' || tok == '.') {
5971 if (tok == '[') {
5972 if (!(type->t & VT_ARRAY))
5973 expect("array type");
5974 s = type->ref;
5975 next();
5976 index = expr_const();
5977 if (index < 0 || (s->c >= 0 && index >= s->c))
5978 tcc_error("invalid index");
5979 if (tok == TOK_DOTS && gnu_ext) {
5980 next();
5981 index_last = expr_const();
5982 if (index_last < 0 ||
5983 (s->c >= 0 && index_last >= s->c) ||
5984 index_last < index)
5985 tcc_error("invalid index");
5986 } else {
5987 index_last = index;
5989 skip(']');
5990 if (!notfirst) {
5991 (*cur_field)->c = index;
5992 (*cur_field)->r = index_last;
5994 type = pointed_type(type);
5995 elem_size = type_size(type, &align);
5996 c += index * elem_size;
5997 /* NOTE: we only support ranges for last designator */
5998 nb_elems = index_last - index + 1;
5999 if (nb_elems != 1) {
6000 notfirst = 1;
6001 break;
6003 } else {
6004 next();
6005 l = tok;
6006 next();
6007 struct_field:
6008 if ((type->t & VT_BTYPE) != VT_STRUCT)
6009 expect("struct/union type");
6010 f = find_field(type, l);
6011 if (!f)
6012 expect("field");
6013 if (!notfirst)
6014 *cur_field = f;
6015 /* XXX: fix this mess by using explicit storage field */
6016 type1 = f->type;
6017 type1.t |= (type->t & ~VT_TYPE);
6018 type = &type1;
6019 c += f->c;
6021 notfirst = 1;
6023 if (notfirst) {
6024 if (tok == '=') {
6025 next();
6026 } else {
6027 if (!gnu_ext)
6028 expect("=");
6030 } else {
6031 if (type->t & VT_ARRAY) {
6032 index = (*cur_field)->c;
6033 if (type->ref->c >= 0 && index >= type->ref->c)
6034 tcc_error("index too large");
6035 type = pointed_type(type);
6036 c += index * type_size(type, &align);
6037 } else {
6038 f = *cur_field;
6039 if (!f)
6040 tcc_error("too many field init");
6041 /* XXX: fix this mess by using explicit storage field */
6042 type1 = f->type;
6043 type1.t |= (type->t & ~VT_TYPE);
6044 type = &type1;
6045 c += f->c;
6048 decl_initializer(type, sec, c, 0, size_only);
6050 /* XXX: make it more general */
6051 if (!size_only && nb_elems > 1) {
6052 unsigned long c_end;
6053 uint8_t *src, *dst;
6054 int i;
6056 if (!sec) {
6057 vset(type, VT_LOCAL|VT_LVAL, c);
6058 for (i = 1; i < nb_elems; i++) {
6059 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6060 vswap();
6061 vstore();
6063 vpop();
6064 } else {
6065 c_end = c + nb_elems * elem_size;
6066 if (c_end > sec->data_allocated)
6067 section_realloc(sec, c_end);
6068 src = sec->data + c;
6069 dst = src;
6070 for(i = 1; i < nb_elems; i++) {
6071 dst += elem_size;
6072 memcpy(dst, src, elem_size);
6078 /* store a value or an expression directly in global data or in local array */
6079 static void init_putv(CType *type, Section *sec, unsigned long c)
6081 int bt, bit_pos, bit_size;
6082 void *ptr;
6083 unsigned long long bit_mask;
6084 CType dtype;
6086 dtype = *type;
6087 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6089 if (sec) {
6090 int size, align;
6091 /* XXX: not portable */
6092 /* XXX: generate error if incorrect relocation */
6093 gen_assign_cast(&dtype);
6094 bt = type->t & VT_BTYPE;
6095 size = type_size(type, &align);
6096 if (c + size > sec->data_allocated) {
6097 section_realloc(sec, c + size);
6099 ptr = sec->data + c;
6100 /* XXX: make code faster ? */
6101 if (!(type->t & VT_BITFIELD)) {
6102 bit_pos = 0;
6103 bit_size = PTR_SIZE * 8;
6104 bit_mask = -1LL;
6105 } else {
6106 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6107 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6108 bit_mask = (1LL << bit_size) - 1;
6110 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6111 vtop->sym->v >= SYM_FIRST_ANOM &&
6112 /* XXX This rejects compount literals like
6113 '(void *){ptr}'. The problem is that '&sym' is
6114 represented the same way, which would be ruled out
6115 by the SYM_FIRST_ANOM check above, but also '"string"'
6116 in 'char *p = "string"' is represented the same
6117 with the type being VT_PTR and the symbol being an
6118 anonymous one. That is, there's no difference in vtop
6119 between '(void *){x}' and '&(void *){x}'. Ignore
6120 pointer typed entities here. Hopefully no real code
6121 will every use compound literals with scalar type. */
6122 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6123 /* These come from compound literals, memcpy stuff over. */
6124 Section *ssec;
6125 ElfW(Sym) *esym;
6126 ElfW_Rel *rel;
6127 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6128 ssec = tcc_state->sections[esym->st_shndx];
6129 memmove (ptr, ssec->data + esym->st_value, size);
6130 if (ssec->reloc) {
6131 /* We need to copy over all memory contents, and that
6132 includes relocations. Use the fact that relocs are
6133 created it order, so look from the end of relocs
6134 until we hit one before the copied region. */
6135 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6136 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6137 while (num_relocs--) {
6138 rel--;
6139 if (rel->r_offset >= esym->st_value + size)
6140 continue;
6141 if (rel->r_offset < esym->st_value)
6142 break;
6143 /* Note: if the same fields are initialized multiple
6144 times (possible with designators) then we possibly
6145 add multiple relocations for the same offset here.
6146 That would lead to wrong code, the last reloc needs
6147 to win. We clean this up later after the whole
6148 initializer is parsed. */
6149 put_elf_reloca(symtab_section, sec,
6150 c + rel->r_offset - esym->st_value,
6151 ELFW(R_TYPE)(rel->r_info),
6152 ELFW(R_SYM)(rel->r_info),
6153 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6154 rel->r_addend
6155 #else
6157 #endif
6161 } else {
6162 if ((vtop->r & VT_SYM) &&
6163 (bt == VT_BYTE ||
6164 bt == VT_SHORT ||
6165 bt == VT_DOUBLE ||
6166 bt == VT_LDOUBLE ||
6167 #if PTR_SIZE == 8
6168 (bt == VT_LLONG && bit_size != 64) ||
6169 bt == VT_INT
6170 #else
6171 bt == VT_LLONG ||
6172 (bt == VT_INT && bit_size != 32)
6173 #endif
6175 tcc_error("initializer element is not computable at load time");
6176 switch(bt) {
6177 /* XXX: when cross-compiling we assume that each type has the
6178 same representation on host and target, which is likely to
6179 be wrong in the case of long double */
6180 case VT_BOOL:
6181 vtop->c.i = (vtop->c.i != 0);
6182 case VT_BYTE:
6183 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6184 break;
6185 case VT_SHORT:
6186 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6187 break;
6188 case VT_DOUBLE:
6189 *(double *)ptr = vtop->c.d;
6190 break;
6191 case VT_LDOUBLE:
6192 if (sizeof(long double) == LDOUBLE_SIZE)
6193 *(long double *)ptr = vtop->c.ld;
6194 else if (sizeof(double) == LDOUBLE_SIZE)
6195 *(double *)ptr = vtop->c.ld;
6196 else
6197 tcc_error("can't cross compile long double constants");
6198 break;
6199 #if PTR_SIZE != 8
6200 case VT_LLONG:
6201 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6202 break;
6203 #else
6204 case VT_LLONG:
6205 #endif
6206 case VT_PTR:
6208 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6209 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6210 if (vtop->r & VT_SYM)
6211 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6212 else
6213 *(addr_t *)ptr |= val;
6214 #else
6215 if (vtop->r & VT_SYM)
6216 greloc(sec, vtop->sym, c, R_DATA_PTR);
6217 *(addr_t *)ptr |= val;
6218 #endif
6219 break;
6221 default:
6223 int val = (vtop->c.i & bit_mask) << bit_pos;
6224 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6225 if (vtop->r & VT_SYM)
6226 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6227 else
6228 *(int *)ptr |= val;
6229 #else
6230 if (vtop->r & VT_SYM)
6231 greloc(sec, vtop->sym, c, R_DATA_PTR);
6232 *(int *)ptr |= val;
6233 #endif
6234 break;
6238 vtop--;
6239 } else {
6240 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6241 vswap();
6242 vstore();
6243 vpop();
6247 /* put zeros for variable based init */
6248 static void init_putz(Section *sec, unsigned long c, int size)
6250 if (sec) {
6251 /* nothing to do because globals are already set to zero */
6252 } else {
6253 vpush_global_sym(&func_old_type, TOK_memset);
6254 vseti(VT_LOCAL, c);
6255 #ifdef TCC_TARGET_ARM
6256 vpushs(size);
6257 vpushi(0);
6258 #else
6259 vpushi(0);
6260 vpushs(size);
6261 #endif
6262 gfunc_call(3);
6266 /* 't' contains the type and storage info. 'c' is the offset of the
6267 object in section 'sec'. If 'sec' is NULL, it means stack based
6268 allocation. 'first' is true if array '{' must be read (multi
6269 dimension implicit array init handling). 'size_only' is true if
6270 size only evaluation is wanted (only for arrays). */
6271 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6272 int first, int size_only)
6274 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6275 int size1, align1;
6276 int have_elem;
6277 Sym *s, *f;
6278 Sym indexsym;
6279 CType *t1;
6281 /* If we currently are at an '}' or ',' we have read an initializer
6282 element in one of our callers, and not yet consumed it. */
6283 have_elem = tok == '}' || tok == ',';
6284 if (!have_elem && tok != '{' &&
6285 /* In case of strings we have special handling for arrays, so
6286 don't consume them as initializer value (which would commit them
6287 to some anonymous symbol). */
6288 tok != TOK_LSTR && tok != TOK_STR &&
6289 !size_only) {
6290 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6291 have_elem = 1;
6294 if (have_elem &&
6295 !(type->t & VT_ARRAY) &&
6296 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6297 The source type might have VT_CONSTANT set, which is
6298 of course assignable to non-const elements. */
6299 is_compatible_parameter_types(type, &vtop->type)) {
6300 init_putv(type, sec, c);
6301 } else if (type->t & VT_ARRAY) {
6302 s = type->ref;
6303 n = s->c;
6304 array_length = 0;
6305 t1 = pointed_type(type);
6306 size1 = type_size(t1, &align1);
6308 no_oblock = 1;
6309 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6310 tok == '{') {
6311 if (tok != '{')
6312 tcc_error("character array initializer must be a literal,"
6313 " optionally enclosed in braces");
6314 skip('{');
6315 no_oblock = 0;
6318 /* only parse strings here if correct type (otherwise: handle
6319 them as ((w)char *) expressions */
6320 if ((tok == TOK_LSTR &&
6321 #ifdef TCC_TARGET_PE
6322 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6323 #else
6324 (t1->t & VT_BTYPE) == VT_INT
6325 #endif
6326 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6327 while (tok == TOK_STR || tok == TOK_LSTR) {
6328 int cstr_len, ch;
6330 /* compute maximum number of chars wanted */
6331 if (tok == TOK_STR)
6332 cstr_len = tokc.str.size;
6333 else
6334 cstr_len = tokc.str.size / sizeof(nwchar_t);
6335 cstr_len--;
6336 nb = cstr_len;
6337 if (n >= 0 && nb > (n - array_length))
6338 nb = n - array_length;
6339 if (!size_only) {
6340 if (cstr_len > nb)
6341 tcc_warning("initializer-string for array is too long");
6342 /* in order to go faster for common case (char
6343 string in global variable, we handle it
6344 specifically */
6345 if (sec && tok == TOK_STR && size1 == 1) {
6346 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6347 } else {
6348 for(i=0;i<nb;i++) {
6349 if (tok == TOK_STR)
6350 ch = ((unsigned char *)tokc.str.data)[i];
6351 else
6352 ch = ((nwchar_t *)tokc.str.data)[i];
6353 vpushi(ch);
6354 init_putv(t1, sec, c + (array_length + i) * size1);
6358 array_length += nb;
6359 next();
6361 /* only add trailing zero if enough storage (no
6362 warning in this case since it is standard) */
6363 if (n < 0 || array_length < n) {
6364 if (!size_only) {
6365 vpushi(0);
6366 init_putv(t1, sec, c + (array_length * size1));
6368 array_length++;
6370 } else {
6371 indexsym.c = 0;
6372 indexsym.r = 0;
6373 f = &indexsym;
6375 do_init_list:
6376 while (tok != '}' || have_elem) {
6377 decl_designator(type, sec, c, &f, size_only);
6378 have_elem = 0;
6379 index = f->c;
6380 /* must put zero in holes (note that doing it that way
6381 ensures that it even works with designators) */
6382 if (!size_only && array_length < index) {
6383 init_putz(sec, c + array_length * size1,
6384 (index - array_length) * size1);
6386 if (type->t & VT_ARRAY) {
6387 index = indexsym.c = ++indexsym.r;
6388 } else {
6389 index = index + type_size(&f->type, &align1);
6390 if (s->type.t == TOK_UNION)
6391 f = NULL;
6392 else
6393 f = f->next;
6395 if (index > array_length)
6396 array_length = index;
6398 if (type->t & VT_ARRAY) {
6399 /* special test for multi dimensional arrays (may not
6400 be strictly correct if designators are used at the
6401 same time) */
6402 if (no_oblock && index >= n)
6403 break;
6404 } else {
6405 if (no_oblock && f == NULL)
6406 break;
6408 if (tok == '}')
6409 break;
6410 skip(',');
6413 /* put zeros at the end */
6414 if (!size_only && array_length < n) {
6415 init_putz(sec, c + array_length * size1,
6416 (n - array_length) * size1);
6418 if (!no_oblock)
6419 skip('}');
6420 /* patch type size if needed, which happens only for array types */
6421 if (n < 0)
6422 s->c = array_length;
6423 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6424 size1 = 1;
6425 no_oblock = 1;
6426 if (first || tok == '{') {
6427 skip('{');
6428 no_oblock = 0;
6430 s = type->ref;
6431 f = s->next;
6432 array_length = 0;
6433 n = s->c;
6434 goto do_init_list;
6435 } else if (tok == '{') {
6436 next();
6437 decl_initializer(type, sec, c, first, size_only);
6438 skip('}');
6439 } else if (size_only) {
6440 /* If we supported only ISO C we wouldn't have to accept calling
6441 this on anything than an array size_only==1 (and even then
6442 only on the outermost level, so no recursion would be needed),
6443 because initializing a flex array member isn't supported.
6444 But GNU C supports it, so we need to recurse even into
6445 subfields of structs and arrays when size_only is set. */
6446 /* just skip expression */
6447 parlevel = parlevel1 = 0;
6448 while ((parlevel > 0 || parlevel1 > 0 ||
6449 (tok != '}' && tok != ',')) && tok != -1) {
6450 if (tok == '(')
6451 parlevel++;
6452 else if (tok == ')') {
6453 if (parlevel == 0 && parlevel1 == 0)
6454 break;
6455 parlevel--;
6457 else if (tok == '{')
6458 parlevel1++;
6459 else if (tok == '}') {
6460 if (parlevel == 0 && parlevel1 == 0)
6461 break;
6462 parlevel1--;
6464 next();
6466 } else {
6467 if (!have_elem) {
6468 /* This should happen only when we haven't parsed
6469 the init element above for fear of committing a
6470 string constant to memory too early. */
6471 if (tok != TOK_STR && tok != TOK_LSTR)
6472 expect("string constant");
6473 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6475 init_putv(type, sec, c);
6479 /* parse an initializer for type 't' if 'has_init' is non zero, and
6480 allocate space in local or global data space ('r' is either
6481 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6482 variable 'v' of scope 'scope' is declared before initializers
6483 are parsed. If 'v' is zero, then a reference to the new object
6484 is put in the value stack. If 'has_init' is 2, a special parsing
6485 is done to handle string constants. */
6486 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6487 int has_init, int v, int scope)
6489 int size, align, addr, data_offset;
6490 int level;
6491 ParseState saved_parse_state = {0};
6492 TokenString *init_str = NULL;
6493 Section *sec;
6494 Sym *flexible_array;
6496 flexible_array = NULL;
6497 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6498 Sym *field = type->ref->next;
6499 if (field) {
6500 while (field->next)
6501 field = field->next;
6502 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6503 flexible_array = field;
6507 size = type_size(type, &align);
6508 /* If unknown size, we must evaluate it before
6509 evaluating initializers because
6510 initializers can generate global data too
6511 (e.g. string pointers or ISOC99 compound
6512 literals). It also simplifies local
6513 initializers handling */
6514 if (size < 0 || (flexible_array && has_init)) {
6515 if (!has_init)
6516 tcc_error("unknown type size");
6517 /* get all init string */
6518 init_str = tok_str_alloc();
6519 if (has_init == 2) {
6520 /* only get strings */
6521 while (tok == TOK_STR || tok == TOK_LSTR) {
6522 tok_str_add_tok(init_str);
6523 next();
6525 } else {
6526 level = 0;
6527 while (level > 0 || (tok != ',' && tok != ';')) {
6528 if (tok < 0)
6529 tcc_error("unexpected end of file in initializer");
6530 tok_str_add_tok(init_str);
6531 if (tok == '{')
6532 level++;
6533 else if (tok == '}') {
6534 level--;
6535 if (level <= 0) {
6536 next();
6537 break;
6540 next();
6543 tok_str_add(init_str, -1);
6544 tok_str_add(init_str, 0);
6546 /* compute size */
6547 save_parse_state(&saved_parse_state);
6549 begin_macro(init_str, 1);
6550 next();
6551 decl_initializer(type, NULL, 0, 1, 1);
6552 /* prepare second initializer parsing */
6553 macro_ptr = init_str->str;
6554 next();
6556 /* if still unknown size, error */
6557 size = type_size(type, &align);
6558 if (size < 0)
6559 tcc_error("unknown type size");
6561 /* If there's a flex member and it was used in the initializer
6562 adjust size. */
6563 if (flexible_array &&
6564 flexible_array->type.ref->c > 0)
6565 size += flexible_array->type.ref->c
6566 * pointed_size(&flexible_array->type);
6567 /* take into account specified alignment if bigger */
6568 if (ad->a.aligned) {
6569 int speca = 1 << (ad->a.aligned - 1);
6570 if (speca > align)
6571 align = speca;
6572 } else if (ad->a.packed) {
6573 align = 1;
6575 if ((r & VT_VALMASK) == VT_LOCAL) {
6576 sec = NULL;
6577 #ifdef CONFIG_TCC_BCHECK
6578 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6579 loc--;
6581 #endif
6582 loc = (loc - size) & -align;
6583 addr = loc;
6584 #ifdef CONFIG_TCC_BCHECK
6585 /* handles bounds */
6586 /* XXX: currently, since we do only one pass, we cannot track
6587 '&' operators, so we add only arrays */
6588 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6589 addr_t *bounds_ptr;
6590 /* add padding between regions */
6591 loc--;
6592 /* then add local bound info */
6593 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6594 bounds_ptr[0] = addr;
6595 bounds_ptr[1] = size;
6597 #endif
6598 if (v) {
6599 /* local variable */
6600 #ifdef CONFIG_TCC_ASM
6601 if (ad->asm_label) {
6602 int reg = asm_parse_regvar(ad->asm_label);
6603 if (reg >= 0)
6604 r = (r & ~VT_VALMASK) | reg;
6606 #endif
6607 sym_push(v, type, r, addr);
6608 } else {
6609 /* push local reference */
6610 vset(type, r, addr);
6612 } else {
6613 Sym *sym;
6615 sym = NULL;
6616 if (v && scope == VT_CONST) {
6617 /* see if the symbol was already defined */
6618 sym = sym_find(v);
6619 if (sym) {
6620 if (!is_compatible_types(&sym->type, type))
6621 tcc_error("incompatible types for redefinition of '%s'",
6622 get_tok_str(v, NULL));
6623 if (sym->type.t & VT_EXTERN) {
6624 /* if the variable is extern, it was not allocated */
6625 sym->type.t &= ~VT_EXTERN;
6626 /* set array size if it was omitted in extern
6627 declaration */
6628 if ((sym->type.t & VT_ARRAY) &&
6629 sym->type.ref->c < 0 &&
6630 type->ref->c >= 0)
6631 sym->type.ref->c = type->ref->c;
6632 } else {
6633 /* we accept several definitions of the same
6634 global variable. this is tricky, because we
6635 must play with the SHN_COMMON type of the symbol */
6636 /* XXX: should check if the variable was already
6637 initialized. It is incorrect to initialized it
6638 twice */
6639 /* no init data, we won't add more to the symbol */
6640 if (!has_init)
6641 goto no_alloc;
6646 /* allocate symbol in corresponding section */
6647 sec = ad->section;
6648 if (!sec) {
6649 if (has_init)
6650 sec = data_section;
6651 else if (tcc_state->nocommon)
6652 sec = bss_section;
6654 if (sec) {
6655 data_offset = sec->data_offset;
6656 data_offset = (data_offset + align - 1) & -align;
6657 addr = data_offset;
6658 /* very important to increment global pointer at this time
6659 because initializers themselves can create new initializers */
6660 data_offset += size;
6661 #ifdef CONFIG_TCC_BCHECK
6662 /* add padding if bound check */
6663 if (tcc_state->do_bounds_check)
6664 data_offset++;
6665 #endif
6666 sec->data_offset = data_offset;
6667 /* allocate section space to put the data */
6668 if (sec->sh_type != SHT_NOBITS &&
6669 data_offset > sec->data_allocated)
6670 section_realloc(sec, data_offset);
6671 /* align section if needed */
6672 if (align > sec->sh_addralign)
6673 sec->sh_addralign = align;
6674 } else {
6675 addr = 0; /* avoid warning */
6678 if (v) {
6679 if (scope != VT_CONST || !sym) {
6680 sym = sym_push(v, type, r | VT_SYM, 0);
6681 sym->asm_label = ad->asm_label;
6683 /* update symbol definition */
6684 if (sec) {
6685 put_extern_sym(sym, sec, addr, size);
6686 } else {
6687 ElfW(Sym) *esym;
6688 /* put a common area */
6689 put_extern_sym(sym, NULL, align, size);
6690 /* XXX: find a nicer way */
6691 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6692 esym->st_shndx = SHN_COMMON;
6694 } else {
6695 /* push global reference */
6696 sym = get_sym_ref(type, sec, addr, size);
6697 vpushsym(type, sym);
6699 /* patch symbol weakness */
6700 if (type->t & VT_WEAK)
6701 weaken_symbol(sym);
6702 apply_visibility(sym, type);
6703 #ifdef CONFIG_TCC_BCHECK
6704 /* handles bounds now because the symbol must be defined
6705 before for the relocation */
6706 if (tcc_state->do_bounds_check) {
6707 addr_t *bounds_ptr;
6709 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6710 /* then add global bound info */
6711 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6712 bounds_ptr[0] = 0; /* relocated */
6713 bounds_ptr[1] = size;
6715 #endif
6717 if (type->t & VT_VLA) {
6718 int a;
6720 /* save current stack pointer */
6721 if (vlas_in_scope == 0) {
6722 if (vla_sp_root_loc == -1)
6723 vla_sp_root_loc = (loc -= PTR_SIZE);
6724 gen_vla_sp_save(vla_sp_root_loc);
6727 vla_runtime_type_size(type, &a);
6728 gen_vla_alloc(type, a);
6729 gen_vla_sp_save(addr);
6730 vla_sp_loc = addr;
6731 vlas_in_scope++;
6732 } else if (has_init) {
6733 size_t oldreloc_offset = 0;
6734 if (sec && sec->reloc)
6735 oldreloc_offset = sec->reloc->data_offset;
6736 decl_initializer(type, sec, addr, 1, 0);
6737 if (sec && sec->reloc)
6738 squeeze_multi_relocs(sec, oldreloc_offset);
6739 /* patch flexible array member size back to -1, */
6740 /* for possible subsequent similar declarations */
6741 if (flexible_array)
6742 flexible_array->type.ref->c = -1;
6744 no_alloc: ;
6745 /* restore parse state if needed */
6746 if (init_str) {
6747 end_macro();
6748 restore_parse_state(&saved_parse_state);
6752 static void put_func_debug(Sym *sym)
6754 char buf[512];
6756 /* stabs info */
6757 /* XXX: we put here a dummy type */
6758 snprintf(buf, sizeof(buf), "%s:%c1",
6759 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6760 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6761 cur_text_section, sym->c);
6762 /* //gr gdb wants a line at the function */
6763 put_stabn(N_SLINE, 0, file->line_num, 0);
6764 last_ind = 0;
6765 last_line_num = 0;
6768 /* parse an old style function declaration list */
6769 /* XXX: check multiple parameter */
6770 static void func_decl_list(Sym *func_sym)
6772 AttributeDef ad;
6773 int v;
6774 Sym *s;
6775 CType btype, type;
6777 /* parse each declaration */
6778 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6779 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6780 if (!parse_btype(&btype, &ad))
6781 expect("declaration list");
6782 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6783 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6784 tok == ';') {
6785 /* we accept no variable after */
6786 } else {
6787 for(;;) {
6788 type = btype;
6789 type_decl(&type, &ad, &v, TYPE_DIRECT);
6790 /* find parameter in function parameter list */
6791 s = func_sym->next;
6792 while (s != NULL) {
6793 if ((s->v & ~SYM_FIELD) == v)
6794 goto found;
6795 s = s->next;
6797 tcc_error("declaration for parameter '%s' but no such parameter",
6798 get_tok_str(v, NULL));
6799 found:
6800 /* check that no storage specifier except 'register' was given */
6801 if (type.t & VT_STORAGE)
6802 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6803 convert_parameter_type(&type);
6804 /* we can add the type (NOTE: it could be local to the function) */
6805 s->type = type;
6806 /* accept other parameters */
6807 if (tok == ',')
6808 next();
6809 else
6810 break;
6813 skip(';');
6817 /* parse a function defined by symbol 'sym' and generate its code in
6818 'cur_text_section' */
6819 static void gen_function(Sym *sym)
6821 nocode_wanted = 0;
6822 ind = cur_text_section->data_offset;
6823 /* NOTE: we patch the symbol size later */
6824 put_extern_sym(sym, cur_text_section, ind, 0);
6825 funcname = get_tok_str(sym->v, NULL);
6826 func_ind = ind;
6827 /* Initialize VLA state */
6828 vla_sp_loc = -1;
6829 vla_sp_root_loc = -1;
6830 /* put debug symbol */
6831 if (tcc_state->do_debug)
6832 put_func_debug(sym);
6834 /* push a dummy symbol to enable local sym storage */
6835 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6836 local_scope = 1; /* for function parameters */
6837 gfunc_prolog(&sym->type);
6838 local_scope = 0;
6840 rsym = 0;
6841 block(NULL, NULL, 0);
6842 nocode_wanted = 0;
6843 gsym(rsym);
6844 gfunc_epilog();
6845 cur_text_section->data_offset = ind;
6846 label_pop(&global_label_stack, NULL);
6847 /* reset local stack */
6848 local_scope = 0;
6849 sym_pop(&local_stack, NULL, 0);
6850 /* end of function */
6851 /* patch symbol size */
6852 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6853 ind - func_ind;
6854 /* patch symbol weakness (this definition overrules any prototype) */
6855 if (sym->type.t & VT_WEAK)
6856 weaken_symbol(sym);
6857 apply_visibility(sym, &sym->type);
6858 if (tcc_state->do_debug) {
6859 put_stabn(N_FUN, 0, 0, ind - func_ind);
6861 /* It's better to crash than to generate wrong code */
6862 cur_text_section = NULL;
6863 funcname = ""; /* for safety */
6864 func_vt.t = VT_VOID; /* for safety */
6865 func_var = 0; /* for safety */
6866 ind = 0; /* for safety */
6867 nocode_wanted = 1;
6868 check_vstack();
6871 static void gen_inline_functions(TCCState *s)
6873 Sym *sym;
6874 int inline_generated, i, ln;
6875 struct InlineFunc *fn;
6877 ln = file->line_num;
6878 /* iterate while inline function are referenced */
6879 for(;;) {
6880 inline_generated = 0;
6881 for (i = 0; i < s->nb_inline_fns; ++i) {
6882 fn = s->inline_fns[i];
6883 sym = fn->sym;
6884 if (sym && sym->c) {
6885 /* the function was used: generate its code and
6886 convert it to a normal function */
6887 fn->sym = NULL;
6888 if (file)
6889 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6890 sym->r = VT_SYM | VT_CONST;
6891 sym->type.t &= ~VT_INLINE;
6893 begin_macro(fn->func_str, 1);
6894 next();
6895 cur_text_section = text_section;
6896 gen_function(sym);
6897 end_macro();
6899 inline_generated = 1;
6902 if (!inline_generated)
6903 break;
6905 file->line_num = ln;
6908 ST_FUNC void free_inline_functions(TCCState *s)
6910 int i;
6911 /* free tokens of unused inline functions */
6912 for (i = 0; i < s->nb_inline_fns; ++i) {
6913 struct InlineFunc *fn = s->inline_fns[i];
6914 if (fn->sym)
6915 tok_str_free(fn->func_str);
6917 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6920 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6921 static int decl0(int l, int is_for_loop_init)
6923 int v, has_init, r;
6924 CType type, btype;
6925 Sym *sym;
6926 AttributeDef ad;
6928 while (1) {
6929 if (!parse_btype(&btype, &ad)) {
6930 if (is_for_loop_init)
6931 return 0;
6932 /* skip redundant ';' */
6933 /* XXX: find more elegant solution */
6934 if (tok == ';') {
6935 next();
6936 continue;
6938 if (l == VT_CONST &&
6939 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6940 /* global asm block */
6941 asm_global_instr();
6942 continue;
6944 /* special test for old K&R protos without explicit int
6945 type. Only accepted when defining global data */
6946 if (l == VT_LOCAL || tok < TOK_UIDENT)
6947 break;
6948 btype.t = VT_INT;
6950 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6951 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6952 tok == ';') {
6953 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6954 int v = btype.ref->v;
6955 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6956 tcc_warning("unnamed struct/union that defines no instances");
6958 next();
6959 continue;
6961 while (1) { /* iterate thru each declaration */
6962 type = btype;
6963 /* If the base type itself was an array type of unspecified
6964 size (like in 'typedef int arr[]; arr x = {1};') then
6965 we will overwrite the unknown size by the real one for
6966 this decl. We need to unshare the ref symbol holding
6967 that size. */
6968 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6969 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6971 type_decl(&type, &ad, &v, TYPE_DIRECT);
6972 #if 0
6974 char buf[500];
6975 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6976 printf("type = '%s'\n", buf);
6978 #endif
6979 if ((type.t & VT_BTYPE) == VT_FUNC) {
6980 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6981 tcc_error("function without file scope cannot be static");
6983 /* if old style function prototype, we accept a
6984 declaration list */
6985 sym = type.ref;
6986 if (sym->c == FUNC_OLD)
6987 func_decl_list(sym);
6990 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6991 ad.asm_label = asm_label_instr();
6992 /* parse one last attribute list, after asm label */
6993 parse_attribute(&ad);
6994 if (tok == '{')
6995 expect(";");
6998 if (ad.a.weak)
6999 type.t |= VT_WEAK;
7000 #ifdef TCC_TARGET_PE
7001 if (ad.a.func_import)
7002 type.t |= VT_IMPORT;
7003 if (ad.a.func_export)
7004 type.t |= VT_EXPORT;
7005 #endif
7006 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7008 if (tok == '{') {
7009 if (l == VT_LOCAL)
7010 tcc_error("cannot use local functions");
7011 if ((type.t & VT_BTYPE) != VT_FUNC)
7012 expect("function definition");
7014 /* reject abstract declarators in function definition */
7015 sym = type.ref;
7016 while ((sym = sym->next) != NULL)
7017 if (!(sym->v & ~SYM_FIELD))
7018 expect("identifier");
7020 /* XXX: cannot do better now: convert extern line to static inline */
7021 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7022 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7024 sym = sym_find(v);
7025 if (sym) {
7026 Sym *ref;
7027 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7028 goto func_error1;
7030 ref = sym->type.ref;
7031 if (0 == ref->a.func_proto)
7032 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7034 /* use func_call from prototype if not defined */
7035 if (ref->a.func_call != FUNC_CDECL
7036 && type.ref->a.func_call == FUNC_CDECL)
7037 type.ref->a.func_call = ref->a.func_call;
7039 /* use export from prototype */
7040 if (ref->a.func_export)
7041 type.ref->a.func_export = 1;
7043 /* use static from prototype */
7044 if (sym->type.t & VT_STATIC)
7045 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7047 /* If the definition has no visibility use the
7048 one from prototype. */
7049 if (! (type.t & VT_VIS_MASK))
7050 type.t |= sym->type.t & VT_VIS_MASK;
7052 if (!is_compatible_types(&sym->type, &type)) {
7053 func_error1:
7054 tcc_error("incompatible types for redefinition of '%s'",
7055 get_tok_str(v, NULL));
7057 type.ref->a.func_proto = 0;
7058 /* if symbol is already defined, then put complete type */
7059 sym->type = type;
7060 } else {
7061 /* put function symbol */
7062 sym = global_identifier_push(v, type.t, 0);
7063 sym->type.ref = type.ref;
7066 /* static inline functions are just recorded as a kind
7067 of macro. Their code will be emitted at the end of
7068 the compilation unit only if they are used */
7069 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7070 (VT_INLINE | VT_STATIC)) {
7071 int block_level;
7072 struct InlineFunc *fn;
7073 const char *filename;
7075 filename = file ? file->filename : "";
7076 fn = tcc_malloc(sizeof *fn + strlen(filename));
7077 strcpy(fn->filename, filename);
7078 fn->sym = sym;
7079 fn->func_str = tok_str_alloc();
7081 block_level = 0;
7082 for(;;) {
7083 int t;
7084 if (tok == TOK_EOF)
7085 tcc_error("unexpected end of file");
7086 tok_str_add_tok(fn->func_str);
7087 t = tok;
7088 next();
7089 if (t == '{') {
7090 block_level++;
7091 } else if (t == '}') {
7092 block_level--;
7093 if (block_level == 0)
7094 break;
7097 tok_str_add(fn->func_str, -1);
7098 tok_str_add(fn->func_str, 0);
7099 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7101 } else {
7102 /* compute text section */
7103 cur_text_section = ad.section;
7104 if (!cur_text_section)
7105 cur_text_section = text_section;
7106 sym->r = VT_SYM | VT_CONST;
7107 gen_function(sym);
7109 break;
7110 } else {
7111 if (btype.t & VT_TYPEDEF) {
7112 /* save typedefed type */
7113 /* XXX: test storage specifiers ? */
7114 sym = sym_find(v);
7115 if (sym && sym->scope == local_scope) {
7116 if (!is_compatible_types(&sym->type, &type)
7117 || !(sym->type.t & VT_TYPEDEF))
7118 tcc_error("incompatible redefinition of '%s'",
7119 get_tok_str(v, NULL));
7120 sym->type = type;
7121 } else {
7122 sym = sym_push(v, &type, 0, 0);
7124 sym->a = ad.a;
7125 sym->type.t |= VT_TYPEDEF;
7126 } else {
7127 r = 0;
7128 if ((type.t & VT_BTYPE) == VT_FUNC) {
7129 /* external function definition */
7130 /* specific case for func_call attribute */
7131 ad.a.func_proto = 1;
7132 type.ref->a = ad.a;
7133 } else if (!(type.t & VT_ARRAY)) {
7134 /* not lvalue if array */
7135 r |= lvalue_type(type.t);
7137 has_init = (tok == '=');
7138 if (has_init && (type.t & VT_VLA))
7139 tcc_error("variable length array cannot be initialized");
7140 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7141 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7142 !has_init && l == VT_CONST && type.ref->c < 0)) {
7143 /* external variable or function */
7144 /* NOTE: as GCC, uninitialized global static
7145 arrays of null size are considered as
7146 extern */
7147 sym = external_sym(v, &type, r);
7148 sym->asm_label = ad.asm_label;
7150 if (ad.alias_target) {
7151 Section tsec;
7152 ElfW(Sym) *esym;
7153 Sym *alias_target;
7155 alias_target = sym_find(ad.alias_target);
7156 if (!alias_target || !alias_target->c)
7157 tcc_error("unsupported forward __alias__ attribute");
7158 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7159 tsec.sh_num = esym->st_shndx;
7160 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7162 } else {
7163 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7164 if (type.t & VT_STATIC)
7165 r |= VT_CONST;
7166 else
7167 r |= l;
7168 if (has_init)
7169 next();
7170 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7173 if (tok != ',') {
7174 if (is_for_loop_init)
7175 return 1;
7176 skip(';');
7177 break;
7179 next();
7181 ad.a.aligned = 0;
7184 return 0;
7187 ST_FUNC void decl(int l)
7189 decl0(l, 0);
7192 /* ------------------------------------------------------------------------- */