opt: Make break and goto not fallthrough
[tinycc.git] / tccgen.c
blobc9d43b45a4a722506e474901df30859658ad973f
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int v1, v2, sym;
66 } **p; int n; /* list of case ranges */
67 int def_sym; /* default symbol */
68 } *cur_switch; /* current switch */
70 /* ------------------------------------------------------------------------- */
71 static void gen_cast(CType *type);
72 static inline CType *pointed_type(CType *type);
73 static int is_compatible_types(CType *type1, CType *type2);
74 static int parse_btype(CType *type, AttributeDef *ad);
75 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
76 static void parse_expr_type(CType *type);
77 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
78 static void block(int *bsym, int *csym, int is_expr);
79 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
80 static int decl0(int l, int is_for_loop_init);
81 static void expr_eq(void);
82 static void expr_lor_const(void);
83 static void unary_type(CType *type);
84 static void vla_runtime_type_size(CType *type, int *a);
85 static void vla_sp_restore(void);
86 static void vla_sp_restore_root(void);
87 static int is_compatible_parameter_types(CType *type1, CType *type2);
88 static void expr_type(CType *type);
89 ST_FUNC void vpush64(int ty, unsigned long long v);
90 ST_FUNC void vpush(CType *type);
91 ST_FUNC int gvtst(int inv, int t);
92 ST_FUNC int is_btype_size(int bt);
93 static void gen_inline_functions(TCCState *s);
95 ST_INLN int is_float(int t)
97 int bt;
98 bt = t & VT_BTYPE;
99 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
102 /* we use our own 'finite' function to avoid potential problems with
103 non standard math libs */
104 /* XXX: endianness dependent */
105 ST_FUNC int ieee_finite(double d)
107 int p[4];
108 memcpy(p, &d, sizeof(double));
109 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
112 ST_FUNC void test_lvalue(void)
114 if (!(vtop->r & VT_LVAL))
115 expect("lvalue");
118 ST_FUNC void check_vstack(void)
120 if (pvtop != vtop)
121 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
124 /* ------------------------------------------------------------------------- */
125 /* vstack debugging aid */
127 #if 0
128 void pv (const char *lbl, int a, int b)
130 int i;
131 for (i = a; i < a + b; ++i) {
132 SValue *p = &vtop[-i];
133 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
134 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
137 #endif
139 /* ------------------------------------------------------------------------- */
140 ST_FUNC void tccgen_start(TCCState *s1)
142 cur_text_section = NULL;
143 funcname = "";
144 anon_sym = SYM_FIRST_ANOM;
145 section_sym = 0;
146 nocode_wanted = 1;
148 /* define some often used types */
149 int_type.t = VT_INT;
150 char_pointer_type.t = VT_BYTE;
151 mk_pointer(&char_pointer_type);
152 #if PTR_SIZE == 4
153 size_type.t = VT_INT;
154 #else
155 size_type.t = VT_LLONG;
156 #endif
157 func_old_type.t = VT_FUNC;
158 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
160 if (s1->do_debug) {
161 char buf[512];
163 /* file info: full path + filename */
164 section_sym = put_elf_sym(symtab_section, 0, 0,
165 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
166 text_section->sh_num, NULL);
167 getcwd(buf, sizeof(buf));
168 #ifdef _WIN32
169 normalize_slashes(buf);
170 #endif
171 pstrcat(buf, sizeof(buf), "/");
172 put_stabs_r(buf, N_SO, 0, 0,
173 text_section->data_offset, text_section, section_sym);
174 put_stabs_r(file->filename, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
177 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
178 symbols can be safely used */
179 put_elf_sym(symtab_section, 0, 0,
180 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
181 SHN_ABS, file->filename);
183 #ifdef TCC_TARGET_ARM
184 arm_init(s1);
185 #endif
188 ST_FUNC void tccgen_end(TCCState *s1)
190 gen_inline_functions(s1);
191 check_vstack();
192 /* end of translation unit info */
193 if (s1->do_debug) {
194 put_stabs_r(NULL, N_SO, 0, 0,
195 text_section->data_offset, text_section, section_sym);
199 /* ------------------------------------------------------------------------- */
200 /* update sym->c so that it points to an external symbol in section
201 'section' with value 'value' */
203 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
204 addr_t value, unsigned long size,
205 int can_add_underscore)
207 int sym_type, sym_bind, sh_num, info, other;
208 ElfW(Sym) *esym;
209 const char *name;
210 char buf1[256];
212 #ifdef CONFIG_TCC_BCHECK
213 char buf[32];
214 #endif
216 if (section == NULL)
217 sh_num = SHN_UNDEF;
218 else if (section == SECTION_ABS)
219 sh_num = SHN_ABS;
220 else
221 sh_num = section->sh_num;
223 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
224 sym_type = STT_FUNC;
225 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
226 sym_type = STT_NOTYPE;
227 } else {
228 sym_type = STT_OBJECT;
231 if (sym->type.t & VT_STATIC)
232 sym_bind = STB_LOCAL;
233 else {
234 if (sym->type.t & VT_WEAK)
235 sym_bind = STB_WEAK;
236 else
237 sym_bind = STB_GLOBAL;
240 if (!sym->c) {
241 name = get_tok_str(sym->v, NULL);
242 #ifdef CONFIG_TCC_BCHECK
243 if (tcc_state->do_bounds_check) {
244 /* XXX: avoid doing that for statics ? */
245 /* if bound checking is activated, we change some function
246 names by adding the "__bound" prefix */
247 switch(sym->v) {
248 #ifdef TCC_TARGET_PE
249 /* XXX: we rely only on malloc hooks */
250 case TOK_malloc:
251 case TOK_free:
252 case TOK_realloc:
253 case TOK_memalign:
254 case TOK_calloc:
255 #endif
256 case TOK_memcpy:
257 case TOK_memmove:
258 case TOK_memset:
259 case TOK_strlen:
260 case TOK_strcpy:
261 case TOK_alloca:
262 strcpy(buf, "__bound_");
263 strcat(buf, name);
264 name = buf;
265 break;
268 #endif
269 other = 0;
271 #ifdef TCC_TARGET_PE
272 if (sym->type.t & VT_EXPORT)
273 other |= ST_PE_EXPORT;
274 if (sym_type == STT_FUNC && sym->type.ref) {
275 Sym *ref = sym->type.ref;
276 if (ref->a.func_export)
277 other |= ST_PE_EXPORT;
278 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
279 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
280 name = buf1;
281 other |= ST_PE_STDCALL;
282 can_add_underscore = 0;
284 } else {
285 if (find_elf_sym(tcc_state->dynsymtab_section, name))
286 other |= ST_PE_IMPORT;
287 if (sym->type.t & VT_IMPORT)
288 other |= ST_PE_IMPORT;
290 #else
291 if (! (sym->type.t & VT_STATIC))
292 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
293 #endif
294 if (tcc_state->leading_underscore && can_add_underscore) {
295 buf1[0] = '_';
296 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
297 name = buf1;
299 if (sym->asm_label) {
300 name = get_tok_str(sym->asm_label, NULL);
302 info = ELFW(ST_INFO)(sym_bind, sym_type);
303 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
304 } else {
305 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
306 esym->st_value = value;
307 esym->st_size = size;
308 esym->st_shndx = sh_num;
312 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
313 addr_t value, unsigned long size)
315 put_extern_sym2(sym, section, value, size, 1);
318 /* add a new relocation entry to symbol 'sym' in section 's' */
319 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
320 addr_t addend)
322 int c = 0;
323 if (sym) {
324 if (0 == sym->c)
325 put_extern_sym(sym, NULL, 0, 0);
326 c = sym->c;
328 /* now we can add ELF relocation info */
329 put_elf_reloca(symtab_section, s, offset, type, c, addend);
332 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
334 greloca(s, sym, offset, type, 0);
337 /* ------------------------------------------------------------------------- */
338 /* symbol allocator */
339 static Sym *__sym_malloc(void)
341 Sym *sym_pool, *sym, *last_sym;
342 int i;
344 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
345 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
347 last_sym = sym_free_first;
348 sym = sym_pool;
349 for(i = 0; i < SYM_POOL_NB; i++) {
350 sym->next = last_sym;
351 last_sym = sym;
352 sym++;
354 sym_free_first = last_sym;
355 return last_sym;
358 static inline Sym *sym_malloc(void)
360 Sym *sym;
361 #ifndef SYM_DEBUG
362 sym = sym_free_first;
363 if (!sym)
364 sym = __sym_malloc();
365 sym_free_first = sym->next;
366 return sym;
367 #else
368 sym = tcc_malloc(sizeof(Sym));
369 return sym;
370 #endif
373 ST_INLN void sym_free(Sym *sym)
375 #ifndef SYM_DEBUG
376 sym->next = sym_free_first;
377 sym_free_first = sym;
378 #else
379 tcc_free(sym);
380 #endif
383 /* push, without hashing */
384 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
386 Sym *s;
388 s = sym_malloc();
389 s->asm_label = 0;
390 s->v = v;
391 s->type.t = t;
392 s->type.ref = NULL;
393 #ifdef _WIN64
394 s->d = NULL;
395 #endif
396 s->c = c;
397 s->next = NULL;
398 /* add in stack */
399 s->prev = *ps;
400 *ps = s;
401 return s;
404 /* find a symbol and return its associated structure. 's' is the top
405 of the symbol stack */
406 ST_FUNC Sym *sym_find2(Sym *s, int v)
408 while (s) {
409 if (s->v == v)
410 return s;
411 else if (s->v == -1)
412 return NULL;
413 s = s->prev;
415 return NULL;
418 /* structure lookup */
419 ST_INLN Sym *struct_find(int v)
421 v -= TOK_IDENT;
422 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
423 return NULL;
424 return table_ident[v]->sym_struct;
427 /* find an identifier */
428 ST_INLN Sym *sym_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_identifier;
436 /* push a given symbol on the symbol stack */
437 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
439 Sym *s, **ps;
440 TokenSym *ts;
442 if (local_stack)
443 ps = &local_stack;
444 else
445 ps = &global_stack;
446 s = sym_push2(ps, v, type->t, c);
447 s->type.ref = type->ref;
448 s->r = r;
449 /* don't record fields or anonymous symbols */
450 /* XXX: simplify */
451 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
452 /* record symbol in token array */
453 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
454 if (v & SYM_STRUCT)
455 ps = &ts->sym_struct;
456 else
457 ps = &ts->sym_identifier;
458 s->prev_tok = *ps;
459 *ps = s;
460 s->scope = local_scope;
461 if (s->prev_tok && s->prev_tok->scope == s->scope)
462 tcc_error("redeclaration of '%s'",
463 get_tok_str(v & ~SYM_STRUCT, NULL));
465 return s;
468 /* push a global identifier */
469 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
471 Sym *s, **ps;
472 s = sym_push2(&global_stack, v, t, c);
473 /* don't record anonymous symbol */
474 if (v < SYM_FIRST_ANOM) {
475 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
476 /* modify the top most local identifier, so that
477 sym_identifier will point to 's' when popped */
478 while (*ps != NULL)
479 ps = &(*ps)->prev_tok;
480 s->prev_tok = NULL;
481 *ps = s;
483 return s;
486 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
487 pop them yet from the list, but do remove them from the token array. */
488 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
490 Sym *s, *ss, **ps;
491 TokenSym *ts;
492 int v;
494 s = *ptop;
495 while(s != b) {
496 ss = s->prev;
497 v = s->v;
498 /* remove symbol in token array */
499 /* XXX: simplify */
500 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
501 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
502 if (v & SYM_STRUCT)
503 ps = &ts->sym_struct;
504 else
505 ps = &ts->sym_identifier;
506 *ps = s->prev_tok;
508 if (!keep)
509 sym_free(s);
510 s = ss;
512 if (!keep)
513 *ptop = b;
516 static void weaken_symbol(Sym *sym)
518 sym->type.t |= VT_WEAK;
519 if (sym->c > 0) {
520 int esym_type;
521 ElfW(Sym) *esym;
523 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
524 esym_type = ELFW(ST_TYPE)(esym->st_info);
525 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
529 static void apply_visibility(Sym *sym, CType *type)
531 int vis = sym->type.t & VT_VIS_MASK;
532 int vis2 = type->t & VT_VIS_MASK;
533 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
534 vis = vis2;
535 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
537 else
538 vis = (vis < vis2) ? vis : vis2;
539 sym->type.t &= ~VT_VIS_MASK;
540 sym->type.t |= vis;
542 if (sym->c > 0) {
543 ElfW(Sym) *esym;
545 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
546 vis >>= VT_VIS_SHIFT;
547 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
551 /* ------------------------------------------------------------------------- */
553 ST_FUNC void swap(int *p, int *q)
555 int t;
556 t = *p;
557 *p = *q;
558 *q = t;
561 static void vsetc(CType *type, int r, CValue *vc)
563 int v;
565 if (vtop >= vstack + (VSTACK_SIZE - 1))
566 tcc_error("memory full (vstack)");
567 /* cannot let cpu flags if other instruction are generated. Also
568 avoid leaving VT_JMP anywhere except on the top of the stack
569 because it would complicate the code generator. */
570 if (vtop >= vstack) {
571 v = vtop->r & VT_VALMASK;
572 if (v == VT_CMP || (v & ~1) == VT_JMP)
573 gv(RC_INT);
575 vtop++;
576 vtop->type = *type;
577 vtop->r = r;
578 vtop->r2 = VT_CONST;
579 vtop->c = *vc;
582 /* push constant of type "type" with useless value */
583 ST_FUNC void vpush(CType *type)
585 CValue cval;
586 vsetc(type, VT_CONST, &cval);
589 /* push integer constant */
590 ST_FUNC void vpushi(int v)
592 CValue cval;
593 cval.i = v;
594 vsetc(&int_type, VT_CONST, &cval);
597 /* push a pointer sized constant */
598 static void vpushs(addr_t v)
600 CValue cval;
601 cval.i = v;
602 vsetc(&size_type, VT_CONST, &cval);
605 /* push arbitrary 64bit constant */
606 ST_FUNC void vpush64(int ty, unsigned long long v)
608 CValue cval;
609 CType ctype;
610 ctype.t = ty;
611 ctype.ref = NULL;
612 cval.i = v;
613 vsetc(&ctype, VT_CONST, &cval);
616 /* push long long constant */
617 static inline void vpushll(long long v)
619 vpush64(VT_LLONG, v);
622 /* push a symbol value of TYPE */
623 static inline void vpushsym(CType *type, Sym *sym)
625 CValue cval;
626 cval.i = 0;
627 vsetc(type, VT_CONST | VT_SYM, &cval);
628 vtop->sym = sym;
631 /* Return a static symbol pointing to a section */
632 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
634 int v;
635 Sym *sym;
637 v = anon_sym++;
638 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
639 sym->type.ref = type->ref;
640 sym->r = VT_CONST | VT_SYM;
641 put_extern_sym(sym, sec, offset, size);
642 return sym;
645 /* push a reference to a section offset by adding a dummy symbol */
646 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
648 vpushsym(type, get_sym_ref(type, sec, offset, size));
651 /* define a new external reference to a symbol 'v' of type 'u' */
652 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
654 Sym *s;
656 s = sym_find(v);
657 if (!s) {
658 /* push forward reference */
659 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
660 s->type.ref = type->ref;
661 s->r = r | VT_CONST | VT_SYM;
663 return s;
666 /* define a new external reference to a symbol 'v' */
667 static Sym *external_sym(int v, CType *type, int r)
669 Sym *s;
671 s = sym_find(v);
672 if (!s) {
673 /* push forward reference */
674 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
675 s->type.t |= VT_EXTERN;
676 } else if (s->type.ref == func_old_type.ref) {
677 s->type.ref = type->ref;
678 s->r = r | VT_CONST | VT_SYM;
679 s->type.t |= VT_EXTERN;
680 } else if (!is_compatible_types(&s->type, type)) {
681 tcc_error("incompatible types for redefinition of '%s'",
682 get_tok_str(v, NULL));
684 /* Merge some storage attributes. */
685 if (type->t & VT_WEAK)
686 weaken_symbol(s);
688 if (type->t & VT_VIS_MASK)
689 apply_visibility(s, type);
691 return s;
694 /* push a reference to global symbol v */
695 ST_FUNC void vpush_global_sym(CType *type, int v)
697 vpushsym(type, external_global_sym(v, type, 0));
700 ST_FUNC void vset(CType *type, int r, int v)
702 CValue cval;
704 cval.i = v;
705 vsetc(type, r, &cval);
708 static void vseti(int r, int v)
710 CType type;
711 type.t = VT_INT;
712 type.ref = 0;
713 vset(&type, r, v);
716 ST_FUNC void vswap(void)
718 SValue tmp;
719 /* cannot let cpu flags if other instruction are generated. Also
720 avoid leaving VT_JMP anywhere except on the top of the stack
721 because it would complicate the code generator. */
722 if (vtop >= vstack) {
723 int v = vtop->r & VT_VALMASK;
724 if (v == VT_CMP || (v & ~1) == VT_JMP)
725 gv(RC_INT);
727 tmp = vtop[0];
728 vtop[0] = vtop[-1];
729 vtop[-1] = tmp;
731 /* XXX: +2% overall speed possible with optimized memswap
733 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
737 ST_FUNC void vpushv(SValue *v)
739 if (vtop >= vstack + (VSTACK_SIZE - 1))
740 tcc_error("memory full (vstack)");
741 vtop++;
742 *vtop = *v;
745 static void vdup(void)
747 vpushv(vtop);
750 /* save registers up to (vtop - n) stack entry */
751 ST_FUNC void save_regs(int n)
753 SValue *p, *p1;
754 for(p = vstack, p1 = vtop - n; p <= p1; p++)
755 save_reg(p->r);
758 /* save r to the memory stack, and mark it as being free */
759 ST_FUNC void save_reg(int r)
761 save_reg_upstack(r, 0);
764 /* save r to the memory stack, and mark it as being free,
765 if seen up to (vtop - n) stack entry */
766 ST_FUNC void save_reg_upstack(int r, int n)
768 int l, saved, size, align;
769 SValue *p, *p1, sv;
770 CType *type;
772 if ((r &= VT_VALMASK) >= VT_CONST)
773 return;
775 /* modify all stack values */
776 saved = 0;
777 l = 0;
778 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
779 if ((p->r & VT_VALMASK) == r ||
780 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
781 /* must save value on stack if not already done */
782 if (!saved) {
783 /* NOTE: must reload 'r' because r might be equal to r2 */
784 r = p->r & VT_VALMASK;
785 /* store register in the stack */
786 type = &p->type;
787 if ((p->r & VT_LVAL) ||
788 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
789 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
790 type = &char_pointer_type;
791 #else
792 type = &int_type;
793 #endif
794 size = type_size(type, &align);
795 loc = (loc - size) & -align;
796 sv.type.t = type->t;
797 sv.r = VT_LOCAL | VT_LVAL;
798 sv.c.i = loc;
799 store(r, &sv);
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* x86 specific: need to pop fp register ST0 if saved */
802 if (r == TREG_ST0) {
803 o(0xd8dd); /* fstp %st(0) */
805 #endif
806 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
807 /* special long long case */
808 if ((type->t & VT_BTYPE) == VT_LLONG) {
809 sv.c.i += 4;
810 store(p->r2, &sv);
812 #endif
813 l = loc;
814 saved = 1;
816 /* mark that stack entry as being saved on the stack */
817 if (p->r & VT_LVAL) {
818 /* also clear the bounded flag because the
819 relocation address of the function was stored in
820 p->c.i */
821 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
822 } else {
823 p->r = lvalue_type(p->type.t) | VT_LOCAL;
825 p->r2 = VT_CONST;
826 p->c.i = l;
831 #ifdef TCC_TARGET_ARM
832 /* find a register of class 'rc2' with at most one reference on stack.
833 * If none, call get_reg(rc) */
834 ST_FUNC int get_reg_ex(int rc, int rc2)
836 int r;
837 SValue *p;
839 for(r=0;r<NB_REGS;r++) {
840 if (reg_classes[r] & rc2) {
841 int n;
842 n=0;
843 for(p = vstack; p <= vtop; p++) {
844 if ((p->r & VT_VALMASK) == r ||
845 (p->r2 & VT_VALMASK) == r)
846 n++;
848 if (n <= 1)
849 return r;
852 return get_reg(rc);
854 #endif
856 /* find a free register of class 'rc'. If none, save one register */
857 ST_FUNC int get_reg(int rc)
859 int r;
860 SValue *p;
862 /* find a free register */
863 for(r=0;r<NB_REGS;r++) {
864 if (reg_classes[r] & rc) {
865 for(p=vstack;p<=vtop;p++) {
866 if ((p->r & VT_VALMASK) == r ||
867 (p->r2 & VT_VALMASK) == r)
868 goto notfound;
870 return r;
872 notfound: ;
875 /* no register left : free the first one on the stack (VERY
876 IMPORTANT to start from the bottom to ensure that we don't
877 spill registers used in gen_opi()) */
878 for(p=vstack;p<=vtop;p++) {
879 /* look at second register (if long long) */
880 r = p->r2 & VT_VALMASK;
881 if (r < VT_CONST && (reg_classes[r] & rc))
882 goto save_found;
883 r = p->r & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc)) {
885 save_found:
886 save_reg(r);
887 return r;
890 /* Should never comes here */
891 return -1;
894 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
895 if needed */
896 static void move_reg(int r, int s, int t)
898 SValue sv;
900 if (r != s) {
901 save_reg(r);
902 sv.type.t = t;
903 sv.type.ref = NULL;
904 sv.r = s;
905 sv.c.i = 0;
906 load(r, &sv);
910 /* get address of vtop (vtop MUST BE an lvalue) */
911 ST_FUNC void gaddrof(void)
913 if (vtop->r & VT_REF && !nocode_wanted)
914 gv(RC_INT);
915 vtop->r &= ~VT_LVAL;
916 /* tricky: if saved lvalue, then we can go back to lvalue */
917 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
918 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
923 #ifdef CONFIG_TCC_BCHECK
924 /* generate lvalue bound code */
925 static void gbound(void)
927 int lval_type;
928 CType type1;
930 vtop->r &= ~VT_MUSTBOUND;
931 /* if lvalue, then use checking code before dereferencing */
932 if (vtop->r & VT_LVAL) {
933 /* if not VT_BOUNDED value, then make one */
934 if (!(vtop->r & VT_BOUNDED)) {
935 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
936 /* must save type because we must set it to int to get pointer */
937 type1 = vtop->type;
938 vtop->type.t = VT_PTR;
939 gaddrof();
940 vpushi(0);
941 gen_bounded_ptr_add();
942 vtop->r |= lval_type;
943 vtop->type = type1;
945 /* then check for dereferencing */
946 gen_bounded_ptr_deref();
949 #endif
951 /* store vtop a register belonging to class 'rc'. lvalues are
952 converted to values. Cannot be used if cannot be converted to
953 register value (such as structures). */
954 ST_FUNC int gv(int rc)
956 int r, bit_pos, bit_size, size, align, i;
957 int rc2;
959 /* NOTE: get_reg can modify vstack[] */
960 if (vtop->type.t & VT_BITFIELD) {
961 CType type;
962 int bits = 32;
963 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
964 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
965 /* remove bit field info to avoid loops */
966 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
967 /* cast to int to propagate signedness in following ops */
968 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
969 type.t = VT_LLONG;
970 bits = 64;
971 } else
972 type.t = VT_INT;
973 if((vtop->type.t & VT_UNSIGNED) ||
974 (vtop->type.t & VT_BTYPE) == VT_BOOL)
975 type.t |= VT_UNSIGNED;
976 gen_cast(&type);
977 /* generate shifts */
978 vpushi(bits - (bit_pos + bit_size));
979 gen_op(TOK_SHL);
980 vpushi(bits - bit_size);
981 /* NOTE: transformed to SHR if unsigned */
982 gen_op(TOK_SAR);
983 r = gv(rc);
984 } else {
985 if (is_float(vtop->type.t) &&
986 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
987 Sym *sym;
988 int *ptr;
989 unsigned long offset;
990 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
991 CValue check;
992 #endif
994 /* XXX: unify with initializers handling ? */
995 /* CPUs usually cannot use float constants, so we store them
996 generically in data segment */
997 size = type_size(&vtop->type, &align);
998 offset = (data_section->data_offset + align - 1) & -align;
999 data_section->data_offset = offset;
1000 /* XXX: not portable yet */
1001 #if defined(__i386__) || defined(__x86_64__)
1002 /* Zero pad x87 tenbyte long doubles */
1003 if (size == LDOUBLE_SIZE) {
1004 vtop->c.tab[2] &= 0xffff;
1005 #if LDOUBLE_SIZE == 16
1006 vtop->c.tab[3] = 0;
1007 #endif
1009 #endif
1010 ptr = section_ptr_add(data_section, size);
1011 size = size >> 2;
1012 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1013 check.d = 1;
1014 if(check.tab[0])
1015 for(i=0;i<size;i++)
1016 ptr[i] = vtop->c.tab[size-1-i];
1017 else
1018 #endif
1019 for(i=0;i<size;i++)
1020 ptr[i] = vtop->c.tab[i];
1021 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1022 vtop->r |= VT_LVAL | VT_SYM;
1023 vtop->sym = sym;
1024 vtop->c.i = 0;
1026 #ifdef CONFIG_TCC_BCHECK
1027 if (vtop->r & VT_MUSTBOUND)
1028 gbound();
1029 #endif
1031 r = vtop->r & VT_VALMASK;
1032 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1033 #ifndef TCC_TARGET_ARM64
1034 if (rc == RC_IRET)
1035 rc2 = RC_LRET;
1036 #ifdef TCC_TARGET_X86_64
1037 else if (rc == RC_FRET)
1038 rc2 = RC_QRET;
1039 #endif
1040 #endif
1042 /* need to reload if:
1043 - constant
1044 - lvalue (need to dereference pointer)
1045 - already a register, but not in the right class */
1046 if (r >= VT_CONST
1047 || (vtop->r & VT_LVAL)
1048 || !(reg_classes[r] & rc)
1049 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1050 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1051 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1052 #else
1053 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1054 #endif
1057 r = get_reg(rc);
1058 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1059 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1060 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1061 #else
1062 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1063 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1064 unsigned long long ll;
1065 #endif
1066 int r2, original_type;
1067 original_type = vtop->type.t;
1068 /* two register type load : expand to two words
1069 temporarily */
1070 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1071 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1072 /* load constant */
1073 ll = vtop->c.i;
1074 vtop->c.i = ll; /* first word */
1075 load(r, vtop);
1076 vtop->r = r; /* save register value */
1077 vpushi(ll >> 32); /* second word */
1078 } else
1079 #endif
1080 if (vtop->r & VT_LVAL) {
1081 /* We do not want to modifier the long long
1082 pointer here, so the safest (and less
1083 efficient) is to save all the other registers
1084 in the stack. XXX: totally inefficient. */
1085 #if 0
1086 save_regs(1);
1087 #else
1088 /* lvalue_save: save only if used further down the stack */
1089 save_reg_upstack(vtop->r, 1);
1090 #endif
1091 /* load from memory */
1092 vtop->type.t = load_type;
1093 load(r, vtop);
1094 vdup();
1095 vtop[-1].r = r; /* save register value */
1096 /* increment pointer to get second word */
1097 vtop->type.t = addr_type;
1098 gaddrof();
1099 vpushi(load_size);
1100 gen_op('+');
1101 vtop->r |= VT_LVAL;
1102 vtop->type.t = load_type;
1103 } else {
1104 /* move registers */
1105 load(r, vtop);
1106 vdup();
1107 vtop[-1].r = r; /* save register value */
1108 vtop->r = vtop[-1].r2;
1110 /* Allocate second register. Here we rely on the fact that
1111 get_reg() tries first to free r2 of an SValue. */
1112 r2 = get_reg(rc2);
1113 load(r2, vtop);
1114 vpop();
1115 /* write second register */
1116 vtop->r2 = r2;
1117 vtop->type.t = original_type;
1118 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1119 int t1, t;
1120 /* lvalue of scalar type : need to use lvalue type
1121 because of possible cast */
1122 t = vtop->type.t;
1123 t1 = t;
1124 /* compute memory access type */
1125 if (vtop->r & VT_REF)
1126 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1127 t = VT_PTR;
1128 #else
1129 t = VT_INT;
1130 #endif
1131 else if (vtop->r & VT_LVAL_BYTE)
1132 t = VT_BYTE;
1133 else if (vtop->r & VT_LVAL_SHORT)
1134 t = VT_SHORT;
1135 if (vtop->r & VT_LVAL_UNSIGNED)
1136 t |= VT_UNSIGNED;
1137 vtop->type.t = t;
1138 load(r, vtop);
1139 /* restore wanted type */
1140 vtop->type.t = t1;
1141 } else {
1142 /* one register type load */
1143 load(r, vtop);
1146 vtop->r = r;
1147 #ifdef TCC_TARGET_C67
1148 /* uses register pairs for doubles */
1149 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1150 vtop->r2 = r+1;
1151 #endif
1153 return r;
1156 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1157 ST_FUNC void gv2(int rc1, int rc2)
1159 int v;
1161 /* generate more generic register first. But VT_JMP or VT_CMP
1162 values must be generated first in all cases to avoid possible
1163 reload errors */
1164 v = vtop[0].r & VT_VALMASK;
1165 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1166 vswap();
1167 gv(rc1);
1168 vswap();
1169 gv(rc2);
1170 /* test if reload is needed for first register */
1171 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1172 vswap();
1173 gv(rc1);
1174 vswap();
1176 } else {
1177 gv(rc2);
1178 vswap();
1179 gv(rc1);
1180 vswap();
1181 /* test if reload is needed for first register */
1182 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1183 gv(rc2);
1188 #ifndef TCC_TARGET_ARM64
1189 /* wrapper around RC_FRET to return a register by type */
1190 static int rc_fret(int t)
1192 #ifdef TCC_TARGET_X86_64
1193 if (t == VT_LDOUBLE) {
1194 return RC_ST0;
1196 #endif
1197 return RC_FRET;
1199 #endif
1201 /* wrapper around REG_FRET to return a register by type */
1202 static int reg_fret(int t)
1204 #ifdef TCC_TARGET_X86_64
1205 if (t == VT_LDOUBLE) {
1206 return TREG_ST0;
1208 #endif
1209 return REG_FRET;
1212 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1213 /* expand 64bit on stack in two ints */
1214 static void lexpand(void)
1216 int u, v;
1217 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1218 v = vtop->r & (VT_VALMASK | VT_LVAL);
1219 if (v == VT_CONST) {
1220 vdup();
1221 vtop[0].c.i >>= 32;
1222 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1223 vdup();
1224 vtop[0].c.i += 4;
1225 } else {
1226 gv(RC_INT);
1227 vdup();
1228 vtop[0].r = vtop[-1].r2;
1229 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1231 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1233 #endif
1235 #ifdef TCC_TARGET_ARM
1236 /* expand long long on stack */
1237 ST_FUNC void lexpand_nr(void)
1239 int u,v;
1241 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1242 vdup();
1243 vtop->r2 = VT_CONST;
1244 vtop->type.t = VT_INT | u;
1245 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1246 if (v == VT_CONST) {
1247 vtop[-1].c.i = vtop->c.i;
1248 vtop->c.i = vtop->c.i >> 32;
1249 vtop->r = VT_CONST;
1250 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1251 vtop->c.i += 4;
1252 vtop->r = vtop[-1].r;
1253 } else if (v > VT_CONST) {
1254 vtop--;
1255 lexpand();
1256 } else
1257 vtop->r = vtop[-1].r2;
1258 vtop[-1].r2 = VT_CONST;
1259 vtop[-1].type.t = VT_INT | u;
1261 #endif
1263 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1264 /* build a long long from two ints */
1265 static void lbuild(int t)
1267 gv2(RC_INT, RC_INT);
1268 vtop[-1].r2 = vtop[0].r;
1269 vtop[-1].type.t = t;
1270 vpop();
1272 #endif
1274 /* rotate n first stack elements to the bottom
1275 I1 ... In -> I2 ... In I1 [top is right]
1277 ST_FUNC void vrotb(int n)
1279 int i;
1280 SValue tmp;
1282 tmp = vtop[-n + 1];
1283 for(i=-n+1;i!=0;i++)
1284 vtop[i] = vtop[i+1];
1285 vtop[0] = tmp;
1288 /* rotate the n elements before entry e towards the top
1289 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1291 ST_FUNC void vrote(SValue *e, int n)
1293 int i;
1294 SValue tmp;
1296 tmp = *e;
1297 for(i = 0;i < n - 1; i++)
1298 e[-i] = e[-i - 1];
1299 e[-n + 1] = tmp;
1302 /* rotate n first stack elements to the top
1303 I1 ... In -> In I1 ... I(n-1) [top is right]
1305 ST_FUNC void vrott(int n)
1307 vrote(vtop, n);
1310 /* pop stack value */
1311 ST_FUNC void vpop(void)
1313 int v;
1314 v = vtop->r & VT_VALMASK;
1315 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1316 /* for x86, we need to pop the FP stack */
1317 if (v == TREG_ST0 && !nocode_wanted) {
1318 o(0xd8dd); /* fstp %st(0) */
1319 } else
1320 #endif
1321 if (v == VT_JMP || v == VT_JMPI) {
1322 /* need to put correct jump if && or || without test */
1323 gsym(vtop->c.i);
1325 vtop--;
1328 /* convert stack entry to register and duplicate its value in another
1329 register */
1330 static void gv_dup(void)
1332 int rc, t, r, r1;
1333 SValue sv;
1335 t = vtop->type.t;
1336 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1337 if ((t & VT_BTYPE) == VT_LLONG) {
1338 lexpand();
1339 gv_dup();
1340 vswap();
1341 vrotb(3);
1342 gv_dup();
1343 vrotb(4);
1344 /* stack: H L L1 H1 */
1345 lbuild(t);
1346 vrotb(3);
1347 vrotb(3);
1348 vswap();
1349 lbuild(t);
1350 vswap();
1351 } else
1352 #endif
1354 /* duplicate value */
1355 rc = RC_INT;
1356 sv.type.t = VT_INT;
1357 if (is_float(t)) {
1358 rc = RC_FLOAT;
1359 #ifdef TCC_TARGET_X86_64
1360 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1361 rc = RC_ST0;
1363 #endif
1364 sv.type.t = t;
1366 r = gv(rc);
1367 r1 = get_reg(rc);
1368 sv.r = r;
1369 sv.c.i = 0;
1370 load(r1, &sv); /* move r to r1 */
1371 vdup();
1372 /* duplicates value */
1373 if (r != r1)
1374 vtop->r = r1;
1378 /* Generate value test
1380 * Generate a test for any value (jump, comparison and integers) */
1381 ST_FUNC int gvtst(int inv, int t)
1383 int v = vtop->r & VT_VALMASK;
1384 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1385 vpushi(0);
1386 gen_op(TOK_NE);
1388 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1389 /* constant jmp optimization */
1390 if ((vtop->c.i != 0) != inv)
1391 t = gjmp(t);
1392 vtop--;
1393 return t;
1395 return gtst(inv, t);
1398 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1399 /* generate CPU independent (unsigned) long long operations */
1400 static void gen_opl(int op)
1402 int t, a, b, op1, c, i;
1403 int func;
1404 unsigned short reg_iret = REG_IRET;
1405 unsigned short reg_lret = REG_LRET;
1406 SValue tmp;
1408 switch(op) {
1409 case '/':
1410 case TOK_PDIV:
1411 func = TOK___divdi3;
1412 goto gen_func;
1413 case TOK_UDIV:
1414 func = TOK___udivdi3;
1415 goto gen_func;
1416 case '%':
1417 func = TOK___moddi3;
1418 goto gen_mod_func;
1419 case TOK_UMOD:
1420 func = TOK___umoddi3;
1421 gen_mod_func:
1422 #ifdef TCC_ARM_EABI
1423 reg_iret = TREG_R2;
1424 reg_lret = TREG_R3;
1425 #endif
1426 gen_func:
1427 /* call generic long long function */
1428 vpush_global_sym(&func_old_type, func);
1429 vrott(3);
1430 gfunc_call(2);
1431 vpushi(0);
1432 vtop->r = reg_iret;
1433 vtop->r2 = reg_lret;
1434 break;
1435 case '^':
1436 case '&':
1437 case '|':
1438 case '*':
1439 case '+':
1440 case '-':
1441 //pv("gen_opl A",0,2);
1442 t = vtop->type.t;
1443 vswap();
1444 lexpand();
1445 vrotb(3);
1446 lexpand();
1447 /* stack: L1 H1 L2 H2 */
1448 tmp = vtop[0];
1449 vtop[0] = vtop[-3];
1450 vtop[-3] = tmp;
1451 tmp = vtop[-2];
1452 vtop[-2] = vtop[-3];
1453 vtop[-3] = tmp;
1454 vswap();
1455 /* stack: H1 H2 L1 L2 */
1456 //pv("gen_opl B",0,4);
1457 if (op == '*') {
1458 vpushv(vtop - 1);
1459 vpushv(vtop - 1);
1460 gen_op(TOK_UMULL);
1461 lexpand();
1462 /* stack: H1 H2 L1 L2 ML MH */
1463 for(i=0;i<4;i++)
1464 vrotb(6);
1465 /* stack: ML MH H1 H2 L1 L2 */
1466 tmp = vtop[0];
1467 vtop[0] = vtop[-2];
1468 vtop[-2] = tmp;
1469 /* stack: ML MH H1 L2 H2 L1 */
1470 gen_op('*');
1471 vrotb(3);
1472 vrotb(3);
1473 gen_op('*');
1474 /* stack: ML MH M1 M2 */
1475 gen_op('+');
1476 gen_op('+');
1477 } else if (op == '+' || op == '-') {
1478 /* XXX: add non carry method too (for MIPS or alpha) */
1479 if (op == '+')
1480 op1 = TOK_ADDC1;
1481 else
1482 op1 = TOK_SUBC1;
1483 gen_op(op1);
1484 /* stack: H1 H2 (L1 op L2) */
1485 vrotb(3);
1486 vrotb(3);
1487 gen_op(op1 + 1); /* TOK_xxxC2 */
1488 } else {
1489 gen_op(op);
1490 /* stack: H1 H2 (L1 op L2) */
1491 vrotb(3);
1492 vrotb(3);
1493 /* stack: (L1 op L2) H1 H2 */
1494 gen_op(op);
1495 /* stack: (L1 op L2) (H1 op H2) */
1497 /* stack: L H */
1498 lbuild(t);
1499 break;
1500 case TOK_SAR:
1501 case TOK_SHR:
1502 case TOK_SHL:
1503 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1504 t = vtop[-1].type.t;
1505 vswap();
1506 lexpand();
1507 vrotb(3);
1508 /* stack: L H shift */
1509 c = (int)vtop->c.i;
1510 /* constant: simpler */
1511 /* NOTE: all comments are for SHL. the other cases are
1512 done by swaping words */
1513 vpop();
1514 if (op != TOK_SHL)
1515 vswap();
1516 if (c >= 32) {
1517 /* stack: L H */
1518 vpop();
1519 if (c > 32) {
1520 vpushi(c - 32);
1521 gen_op(op);
1523 if (op != TOK_SAR) {
1524 vpushi(0);
1525 } else {
1526 gv_dup();
1527 vpushi(31);
1528 gen_op(TOK_SAR);
1530 vswap();
1531 } else {
1532 vswap();
1533 gv_dup();
1534 /* stack: H L L */
1535 vpushi(c);
1536 gen_op(op);
1537 vswap();
1538 vpushi(32 - c);
1539 if (op == TOK_SHL)
1540 gen_op(TOK_SHR);
1541 else
1542 gen_op(TOK_SHL);
1543 vrotb(3);
1544 /* stack: L L H */
1545 vpushi(c);
1546 if (op == TOK_SHL)
1547 gen_op(TOK_SHL);
1548 else
1549 gen_op(TOK_SHR);
1550 gen_op('|');
1552 if (op != TOK_SHL)
1553 vswap();
1554 lbuild(t);
1555 } else {
1556 /* XXX: should provide a faster fallback on x86 ? */
1557 switch(op) {
1558 case TOK_SAR:
1559 func = TOK___ashrdi3;
1560 goto gen_func;
1561 case TOK_SHR:
1562 func = TOK___lshrdi3;
1563 goto gen_func;
1564 case TOK_SHL:
1565 func = TOK___ashldi3;
1566 goto gen_func;
1569 break;
1570 default:
1571 /* compare operations */
1572 t = vtop->type.t;
1573 vswap();
1574 lexpand();
1575 vrotb(3);
1576 lexpand();
1577 /* stack: L1 H1 L2 H2 */
1578 tmp = vtop[-1];
1579 vtop[-1] = vtop[-2];
1580 vtop[-2] = tmp;
1581 /* stack: L1 L2 H1 H2 */
1582 /* compare high */
1583 op1 = op;
1584 /* when values are equal, we need to compare low words. since
1585 the jump is inverted, we invert the test too. */
1586 if (op1 == TOK_LT)
1587 op1 = TOK_LE;
1588 else if (op1 == TOK_GT)
1589 op1 = TOK_GE;
1590 else if (op1 == TOK_ULT)
1591 op1 = TOK_ULE;
1592 else if (op1 == TOK_UGT)
1593 op1 = TOK_UGE;
1594 a = 0;
1595 b = 0;
1596 gen_op(op1);
1597 if (op1 != TOK_NE) {
1598 a = gvtst(1, 0);
1600 if (op != TOK_EQ) {
1601 /* generate non equal test */
1602 /* XXX: NOT PORTABLE yet */
1603 if (a == 0) {
1604 b = gvtst(0, 0);
1605 } else {
1606 #if defined(TCC_TARGET_I386)
1607 b = psym(0x850f, 0);
1608 #elif defined(TCC_TARGET_ARM)
1609 b = ind;
1610 o(0x1A000000 | encbranch(ind, 0, 1));
1611 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1612 tcc_error("not implemented");
1613 #else
1614 #error not supported
1615 #endif
1618 /* compare low. Always unsigned */
1619 op1 = op;
1620 if (op1 == TOK_LT)
1621 op1 = TOK_ULT;
1622 else if (op1 == TOK_LE)
1623 op1 = TOK_ULE;
1624 else if (op1 == TOK_GT)
1625 op1 = TOK_UGT;
1626 else if (op1 == TOK_GE)
1627 op1 = TOK_UGE;
1628 gen_op(op1);
1629 a = gvtst(1, a);
1630 gsym(b);
1631 vseti(VT_JMPI, a);
1632 break;
1635 #endif
1637 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1639 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1640 return (a ^ b) >> 63 ? -x : x;
1643 static int gen_opic_lt(uint64_t a, uint64_t b)
1645 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1648 /* handle integer constant optimizations and various machine
1649 independent opt */
1650 static void gen_opic(int op)
1652 SValue *v1 = vtop - 1;
1653 SValue *v2 = vtop;
1654 int t1 = v1->type.t & VT_BTYPE;
1655 int t2 = v2->type.t & VT_BTYPE;
1656 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1657 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1658 uint64_t l1 = c1 ? v1->c.i : 0;
1659 uint64_t l2 = c2 ? v2->c.i : 0;
1660 int shm = (t1 == VT_LLONG) ? 63 : 31;
1662 if (t1 != VT_LLONG)
1663 l1 = ((uint32_t)l1 |
1664 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1665 if (t2 != VT_LLONG)
1666 l2 = ((uint32_t)l2 |
1667 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1669 if (c1 && c2) {
1670 switch(op) {
1671 case '+': l1 += l2; break;
1672 case '-': l1 -= l2; break;
1673 case '&': l1 &= l2; break;
1674 case '^': l1 ^= l2; break;
1675 case '|': l1 |= l2; break;
1676 case '*': l1 *= l2; break;
1678 case TOK_PDIV:
1679 case '/':
1680 case '%':
1681 case TOK_UDIV:
1682 case TOK_UMOD:
1683 /* if division by zero, generate explicit division */
1684 if (l2 == 0) {
1685 if (const_wanted)
1686 tcc_error("division by zero in constant");
1687 goto general_case;
1689 switch(op) {
1690 default: l1 = gen_opic_sdiv(l1, l2); break;
1691 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1692 case TOK_UDIV: l1 = l1 / l2; break;
1693 case TOK_UMOD: l1 = l1 % l2; break;
1695 break;
1696 case TOK_SHL: l1 <<= (l2 & shm); break;
1697 case TOK_SHR: l1 >>= (l2 & shm); break;
1698 case TOK_SAR:
1699 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1700 break;
1701 /* tests */
1702 case TOK_ULT: l1 = l1 < l2; break;
1703 case TOK_UGE: l1 = l1 >= l2; break;
1704 case TOK_EQ: l1 = l1 == l2; break;
1705 case TOK_NE: l1 = l1 != l2; break;
1706 case TOK_ULE: l1 = l1 <= l2; break;
1707 case TOK_UGT: l1 = l1 > l2; break;
1708 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1709 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1710 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1711 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1712 /* logical */
1713 case TOK_LAND: l1 = l1 && l2; break;
1714 case TOK_LOR: l1 = l1 || l2; break;
1715 default:
1716 goto general_case;
1718 v1->c.i = l1;
1719 vtop--;
1720 } else {
1721 /* if commutative ops, put c2 as constant */
1722 if (c1 && (op == '+' || op == '&' || op == '^' ||
1723 op == '|' || op == '*')) {
1724 vswap();
1725 c2 = c1; //c = c1, c1 = c2, c2 = c;
1726 l2 = l1; //l = l1, l1 = l2, l2 = l;
1728 if (!const_wanted &&
1729 c1 && ((l1 == 0 &&
1730 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1731 (l1 == -1 && op == TOK_SAR))) {
1732 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1733 vtop--;
1734 } else if (!const_wanted &&
1735 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1736 (l2 == -1 && op == '|') ||
1737 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1738 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1739 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1740 if (l2 == 1)
1741 vtop->c.i = 0;
1742 vswap();
1743 vtop--;
1744 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1745 op == TOK_PDIV) &&
1746 l2 == 1) ||
1747 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1748 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1749 l2 == 0) ||
1750 (op == '&' &&
1751 l2 == -1))) {
1752 /* filter out NOP operations like x*1, x-0, x&-1... */
1753 vtop--;
1754 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1755 /* try to use shifts instead of muls or divs */
1756 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1757 int n = -1;
1758 while (l2) {
1759 l2 >>= 1;
1760 n++;
1762 vtop->c.i = n;
1763 if (op == '*')
1764 op = TOK_SHL;
1765 else if (op == TOK_PDIV)
1766 op = TOK_SAR;
1767 else
1768 op = TOK_SHR;
1770 goto general_case;
1771 } else if (c2 && (op == '+' || op == '-') &&
1772 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1773 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1774 /* symbol + constant case */
1775 if (op == '-')
1776 l2 = -l2;
1777 vtop--;
1778 vtop->c.i += l2;
1779 } else {
1780 general_case:
1781 if (!nocode_wanted) {
1782 /* call low level op generator */
1783 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1784 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1785 gen_opl(op);
1786 else
1787 gen_opi(op);
1788 } else {
1789 vtop--;
1790 /* Ensure vtop isn't marked VT_CONST in case something
1791 up our callchain is interested in const-ness of the
1792 expression. Also make it a non-LVAL if it was,
1793 so that further code can't accidentally generate
1794 a deref (happen only for buggy uses of e.g.
1795 gv() under nocode_wanted). */
1796 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1802 /* generate a floating point operation with constant propagation */
1803 static void gen_opif(int op)
1805 int c1, c2;
1806 SValue *v1, *v2;
1807 long double f1, f2;
1809 v1 = vtop - 1;
1810 v2 = vtop;
1811 /* currently, we cannot do computations with forward symbols */
1812 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1813 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1814 if (c1 && c2) {
1815 if (v1->type.t == VT_FLOAT) {
1816 f1 = v1->c.f;
1817 f2 = v2->c.f;
1818 } else if (v1->type.t == VT_DOUBLE) {
1819 f1 = v1->c.d;
1820 f2 = v2->c.d;
1821 } else {
1822 f1 = v1->c.ld;
1823 f2 = v2->c.ld;
1826 /* NOTE: we only do constant propagation if finite number (not
1827 NaN or infinity) (ANSI spec) */
1828 if (!ieee_finite(f1) || !ieee_finite(f2))
1829 goto general_case;
1831 switch(op) {
1832 case '+': f1 += f2; break;
1833 case '-': f1 -= f2; break;
1834 case '*': f1 *= f2; break;
1835 case '/':
1836 if (f2 == 0.0) {
1837 if (const_wanted)
1838 tcc_error("division by zero in constant");
1839 goto general_case;
1841 f1 /= f2;
1842 break;
1843 /* XXX: also handles tests ? */
1844 default:
1845 goto general_case;
1847 /* XXX: overflow test ? */
1848 if (v1->type.t == VT_FLOAT) {
1849 v1->c.f = f1;
1850 } else if (v1->type.t == VT_DOUBLE) {
1851 v1->c.d = f1;
1852 } else {
1853 v1->c.ld = f1;
1855 vtop--;
1856 } else {
1857 general_case:
1858 if (!nocode_wanted) {
1859 gen_opf(op);
1860 } else {
1861 vtop--;
1866 static int pointed_size(CType *type)
1868 int align;
1869 return type_size(pointed_type(type), &align);
1872 static void vla_runtime_pointed_size(CType *type)
1874 int align;
1875 vla_runtime_type_size(pointed_type(type), &align);
1878 static inline int is_null_pointer(SValue *p)
1880 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1881 return 0;
1882 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1883 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1884 ((p->type.t & VT_BTYPE) == VT_PTR &&
1885 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1888 static inline int is_integer_btype(int bt)
1890 return (bt == VT_BYTE || bt == VT_SHORT ||
1891 bt == VT_INT || bt == VT_LLONG);
1894 /* check types for comparison or subtraction of pointers */
1895 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1897 CType *type1, *type2, tmp_type1, tmp_type2;
1898 int bt1, bt2;
1900 /* null pointers are accepted for all comparisons as gcc */
1901 if (is_null_pointer(p1) || is_null_pointer(p2))
1902 return;
1903 type1 = &p1->type;
1904 type2 = &p2->type;
1905 bt1 = type1->t & VT_BTYPE;
1906 bt2 = type2->t & VT_BTYPE;
1907 /* accept comparison between pointer and integer with a warning */
1908 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1909 if (op != TOK_LOR && op != TOK_LAND )
1910 tcc_warning("comparison between pointer and integer");
1911 return;
1914 /* both must be pointers or implicit function pointers */
1915 if (bt1 == VT_PTR) {
1916 type1 = pointed_type(type1);
1917 } else if (bt1 != VT_FUNC)
1918 goto invalid_operands;
1920 if (bt2 == VT_PTR) {
1921 type2 = pointed_type(type2);
1922 } else if (bt2 != VT_FUNC) {
1923 invalid_operands:
1924 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1926 if ((type1->t & VT_BTYPE) == VT_VOID ||
1927 (type2->t & VT_BTYPE) == VT_VOID)
1928 return;
1929 tmp_type1 = *type1;
1930 tmp_type2 = *type2;
1931 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1932 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1933 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1934 /* gcc-like error if '-' is used */
1935 if (op == '-')
1936 goto invalid_operands;
1937 else
1938 tcc_warning("comparison of distinct pointer types lacks a cast");
1942 /* generic gen_op: handles types problems */
1943 ST_FUNC void gen_op(int op)
1945 int u, t1, t2, bt1, bt2, t;
1946 CType type1;
1948 redo:
1949 t1 = vtop[-1].type.t;
1950 t2 = vtop[0].type.t;
1951 bt1 = t1 & VT_BTYPE;
1952 bt2 = t2 & VT_BTYPE;
1954 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1955 tcc_error("operation on a struct");
1956 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1957 if (bt2 == VT_FUNC) {
1958 mk_pointer(&vtop->type);
1959 gaddrof();
1961 if (bt1 == VT_FUNC) {
1962 vswap();
1963 mk_pointer(&vtop->type);
1964 gaddrof();
1965 vswap();
1967 goto redo;
1968 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1969 /* at least one operand is a pointer */
1970 /* relationnal op: must be both pointers */
1971 if (op >= TOK_ULT && op <= TOK_LOR) {
1972 check_comparison_pointer_types(vtop - 1, vtop, op);
1973 /* pointers are handled are unsigned */
1974 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1975 t = VT_LLONG | VT_UNSIGNED;
1976 #else
1977 t = VT_INT | VT_UNSIGNED;
1978 #endif
1979 goto std_op;
1981 /* if both pointers, then it must be the '-' op */
1982 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1983 if (op != '-')
1984 tcc_error("cannot use pointers here");
1985 check_comparison_pointer_types(vtop - 1, vtop, op);
1986 /* XXX: check that types are compatible */
1987 if (vtop[-1].type.t & VT_VLA) {
1988 vla_runtime_pointed_size(&vtop[-1].type);
1989 } else {
1990 vpushi(pointed_size(&vtop[-1].type));
1992 vrott(3);
1993 gen_opic(op);
1994 /* set to integer type */
1995 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1996 vtop->type.t = VT_LLONG;
1997 #else
1998 vtop->type.t = VT_INT;
1999 #endif
2000 vswap();
2001 gen_op(TOK_PDIV);
2002 } else {
2003 /* exactly one pointer : must be '+' or '-'. */
2004 if (op != '-' && op != '+')
2005 tcc_error("cannot use pointers here");
2006 /* Put pointer as first operand */
2007 if (bt2 == VT_PTR) {
2008 vswap();
2009 swap(&t1, &t2);
2011 #if PTR_SIZE == 4
2012 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2013 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2014 gen_cast(&int_type);
2015 #endif
2016 type1 = vtop[-1].type;
2017 type1.t &= ~VT_ARRAY;
2018 if (vtop[-1].type.t & VT_VLA)
2019 vla_runtime_pointed_size(&vtop[-1].type);
2020 else {
2021 u = pointed_size(&vtop[-1].type);
2022 if (u < 0)
2023 tcc_error("unknown array element size");
2024 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2025 vpushll(u);
2026 #else
2027 /* XXX: cast to int ? (long long case) */
2028 vpushi(u);
2029 #endif
2031 gen_op('*');
2032 #if 0
2033 /* #ifdef CONFIG_TCC_BCHECK
2034 The main reason to removing this code:
2035 #include <stdio.h>
2036 int main ()
2038 int v[10];
2039 int i = 10;
2040 int j = 9;
2041 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2042 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2044 When this code is on. then the output looks like
2045 v+i-j = 0xfffffffe
2046 v+(i-j) = 0xbff84000
2048 /* if evaluating constant expression, no code should be
2049 generated, so no bound check */
2050 if (tcc_state->do_bounds_check && !const_wanted) {
2051 /* if bounded pointers, we generate a special code to
2052 test bounds */
2053 if (op == '-') {
2054 vpushi(0);
2055 vswap();
2056 gen_op('-');
2058 gen_bounded_ptr_add();
2059 } else
2060 #endif
2062 gen_opic(op);
2064 /* put again type if gen_opic() swaped operands */
2065 vtop->type = type1;
2067 } else if (is_float(bt1) || is_float(bt2)) {
2068 /* compute bigger type and do implicit casts */
2069 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2070 t = VT_LDOUBLE;
2071 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2072 t = VT_DOUBLE;
2073 } else {
2074 t = VT_FLOAT;
2076 /* floats can only be used for a few operations */
2077 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2078 (op < TOK_ULT || op > TOK_GT))
2079 tcc_error("invalid operands for binary operation");
2080 goto std_op;
2081 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2082 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2083 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2084 t |= VT_UNSIGNED;
2085 goto std_op;
2086 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2087 /* cast to biggest op */
2088 t = VT_LLONG;
2089 /* convert to unsigned if it does not fit in a long long */
2090 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2091 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2092 t |= VT_UNSIGNED;
2093 goto std_op;
2094 } else {
2095 /* integer operations */
2096 t = VT_INT;
2097 /* convert to unsigned if it does not fit in an integer */
2098 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2099 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2100 t |= VT_UNSIGNED;
2101 std_op:
2102 /* XXX: currently, some unsigned operations are explicit, so
2103 we modify them here */
2104 if (t & VT_UNSIGNED) {
2105 if (op == TOK_SAR)
2106 op = TOK_SHR;
2107 else if (op == '/')
2108 op = TOK_UDIV;
2109 else if (op == '%')
2110 op = TOK_UMOD;
2111 else if (op == TOK_LT)
2112 op = TOK_ULT;
2113 else if (op == TOK_GT)
2114 op = TOK_UGT;
2115 else if (op == TOK_LE)
2116 op = TOK_ULE;
2117 else if (op == TOK_GE)
2118 op = TOK_UGE;
2120 vswap();
2121 type1.t = t;
2122 gen_cast(&type1);
2123 vswap();
2124 /* special case for shifts and long long: we keep the shift as
2125 an integer */
2126 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2127 type1.t = VT_INT;
2128 gen_cast(&type1);
2129 if (is_float(t))
2130 gen_opif(op);
2131 else
2132 gen_opic(op);
2133 if (op >= TOK_ULT && op <= TOK_GT) {
2134 /* relationnal op: the result is an int */
2135 vtop->type.t = VT_INT;
2136 } else {
2137 vtop->type.t = t;
2140 // Make sure that we have converted to an rvalue:
2141 if (vtop->r & VT_LVAL && !nocode_wanted)
2142 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2145 #ifndef TCC_TARGET_ARM
2146 /* generic itof for unsigned long long case */
2147 static void gen_cvt_itof1(int t)
2149 #ifdef TCC_TARGET_ARM64
2150 gen_cvt_itof(t);
2151 #else
2152 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2153 (VT_LLONG | VT_UNSIGNED)) {
2155 if (t == VT_FLOAT)
2156 vpush_global_sym(&func_old_type, TOK___floatundisf);
2157 #if LDOUBLE_SIZE != 8
2158 else if (t == VT_LDOUBLE)
2159 vpush_global_sym(&func_old_type, TOK___floatundixf);
2160 #endif
2161 else
2162 vpush_global_sym(&func_old_type, TOK___floatundidf);
2163 vrott(2);
2164 gfunc_call(1);
2165 vpushi(0);
2166 vtop->r = reg_fret(t);
2167 } else {
2168 gen_cvt_itof(t);
2170 #endif
2172 #endif
2174 /* generic ftoi for unsigned long long case */
2175 static void gen_cvt_ftoi1(int t)
2177 #ifdef TCC_TARGET_ARM64
2178 gen_cvt_ftoi(t);
2179 #else
2180 int st;
2182 if (t == (VT_LLONG | VT_UNSIGNED)) {
2183 /* not handled natively */
2184 st = vtop->type.t & VT_BTYPE;
2185 if (st == VT_FLOAT)
2186 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2187 #if LDOUBLE_SIZE != 8
2188 else if (st == VT_LDOUBLE)
2189 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2190 #endif
2191 else
2192 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2193 vrott(2);
2194 gfunc_call(1);
2195 vpushi(0);
2196 vtop->r = REG_IRET;
2197 vtop->r2 = REG_LRET;
2198 } else {
2199 gen_cvt_ftoi(t);
2201 #endif
2204 /* force char or short cast */
2205 static void force_charshort_cast(int t)
2207 int bits, dbt;
2208 dbt = t & VT_BTYPE;
2209 /* XXX: add optimization if lvalue : just change type and offset */
2210 if (dbt == VT_BYTE)
2211 bits = 8;
2212 else
2213 bits = 16;
2214 if (t & VT_UNSIGNED) {
2215 vpushi((1 << bits) - 1);
2216 gen_op('&');
2217 } else {
2218 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2219 bits = 64 - bits;
2220 else
2221 bits = 32 - bits;
2222 vpushi(bits);
2223 gen_op(TOK_SHL);
2224 /* result must be signed or the SAR is converted to an SHL
2225 This was not the case when "t" was a signed short
2226 and the last value on the stack was an unsigned int */
2227 vtop->type.t &= ~VT_UNSIGNED;
2228 vpushi(bits);
2229 gen_op(TOK_SAR);
2233 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2234 static void gen_cast(CType *type)
2236 int sbt, dbt, sf, df, c, p;
2238 /* special delayed cast for char/short */
2239 /* XXX: in some cases (multiple cascaded casts), it may still
2240 be incorrect */
2241 if (vtop->r & VT_MUSTCAST) {
2242 vtop->r &= ~VT_MUSTCAST;
2243 force_charshort_cast(vtop->type.t);
2246 /* bitfields first get cast to ints */
2247 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2248 gv(RC_INT);
2251 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2252 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2254 if (sbt != dbt) {
2255 sf = is_float(sbt);
2256 df = is_float(dbt);
2257 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2258 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2259 if (c) {
2260 /* constant case: we can do it now */
2261 /* XXX: in ISOC, cannot do it if error in convert */
2262 if (sbt == VT_FLOAT)
2263 vtop->c.ld = vtop->c.f;
2264 else if (sbt == VT_DOUBLE)
2265 vtop->c.ld = vtop->c.d;
2267 if (df) {
2268 if ((sbt & VT_BTYPE) == VT_LLONG) {
2269 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2270 vtop->c.ld = vtop->c.i;
2271 else
2272 vtop->c.ld = -(long double)-vtop->c.i;
2273 } else if(!sf) {
2274 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2275 vtop->c.ld = (uint32_t)vtop->c.i;
2276 else
2277 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2280 if (dbt == VT_FLOAT)
2281 vtop->c.f = (float)vtop->c.ld;
2282 else if (dbt == VT_DOUBLE)
2283 vtop->c.d = (double)vtop->c.ld;
2284 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2285 vtop->c.i = vtop->c.ld;
2286 } else if (sf && dbt == VT_BOOL) {
2287 vtop->c.i = (vtop->c.ld != 0);
2288 } else {
2289 if(sf)
2290 vtop->c.i = vtop->c.ld;
2291 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2293 else if (sbt & VT_UNSIGNED)
2294 vtop->c.i = (uint32_t)vtop->c.i;
2295 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2296 else if (sbt == VT_PTR)
2298 #endif
2299 else if (sbt != VT_LLONG)
2300 vtop->c.i = ((uint32_t)vtop->c.i |
2301 -(vtop->c.i & 0x80000000));
2303 if (dbt == (VT_LLONG|VT_UNSIGNED))
2305 else if (dbt == VT_BOOL)
2306 vtop->c.i = (vtop->c.i != 0);
2307 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2308 else if (dbt == VT_PTR)
2310 #endif
2311 else if (dbt != VT_LLONG) {
2312 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2313 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2314 0xffffffff);
2315 vtop->c.i &= m;
2316 if (!(dbt & VT_UNSIGNED))
2317 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2320 } else if (p && dbt == VT_BOOL) {
2321 vtop->r = VT_CONST;
2322 vtop->c.i = 1;
2323 } else if (!nocode_wanted) {
2324 /* non constant case: generate code */
2325 if (sf && df) {
2326 /* convert from fp to fp */
2327 gen_cvt_ftof(dbt);
2328 } else if (df) {
2329 /* convert int to fp */
2330 gen_cvt_itof1(dbt);
2331 } else if (sf) {
2332 /* convert fp to int */
2333 if (dbt == VT_BOOL) {
2334 vpushi(0);
2335 gen_op(TOK_NE);
2336 } else {
2337 /* we handle char/short/etc... with generic code */
2338 if (dbt != (VT_INT | VT_UNSIGNED) &&
2339 dbt != (VT_LLONG | VT_UNSIGNED) &&
2340 dbt != VT_LLONG)
2341 dbt = VT_INT;
2342 gen_cvt_ftoi1(dbt);
2343 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2344 /* additional cast for char/short... */
2345 vtop->type.t = dbt;
2346 gen_cast(type);
2349 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2350 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2351 if ((sbt & VT_BTYPE) != VT_LLONG) {
2352 /* scalar to long long */
2353 /* machine independent conversion */
2354 gv(RC_INT);
2355 /* generate high word */
2356 if (sbt == (VT_INT | VT_UNSIGNED)) {
2357 vpushi(0);
2358 gv(RC_INT);
2359 } else {
2360 if (sbt == VT_PTR) {
2361 /* cast from pointer to int before we apply
2362 shift operation, which pointers don't support*/
2363 gen_cast(&int_type);
2365 gv_dup();
2366 vpushi(31);
2367 gen_op(TOK_SAR);
2369 /* patch second register */
2370 vtop[-1].r2 = vtop->r;
2371 vpop();
2373 #else
2374 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2375 (dbt & VT_BTYPE) == VT_PTR ||
2376 (dbt & VT_BTYPE) == VT_FUNC) {
2377 if ((sbt & VT_BTYPE) != VT_LLONG &&
2378 (sbt & VT_BTYPE) != VT_PTR &&
2379 (sbt & VT_BTYPE) != VT_FUNC) {
2380 /* need to convert from 32bit to 64bit */
2381 gv(RC_INT);
2382 if (sbt != (VT_INT | VT_UNSIGNED)) {
2383 #if defined(TCC_TARGET_ARM64)
2384 gen_cvt_sxtw();
2385 #elif defined(TCC_TARGET_X86_64)
2386 int r = gv(RC_INT);
2387 /* x86_64 specific: movslq */
2388 o(0x6348);
2389 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2390 #else
2391 #error
2392 #endif
2395 #endif
2396 } else if (dbt == VT_BOOL) {
2397 /* scalar to bool */
2398 vpushi(0);
2399 gen_op(TOK_NE);
2400 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2401 (dbt & VT_BTYPE) == VT_SHORT) {
2402 if (sbt == VT_PTR) {
2403 vtop->type.t = VT_INT;
2404 tcc_warning("nonportable conversion from pointer to char/short");
2406 force_charshort_cast(dbt);
2407 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2408 } else if ((dbt & VT_BTYPE) == VT_INT) {
2409 /* scalar to int */
2410 if ((sbt & VT_BTYPE) == VT_LLONG) {
2411 /* from long long: just take low order word */
2412 lexpand();
2413 vpop();
2415 /* if lvalue and single word type, nothing to do because
2416 the lvalue already contains the real type size (see
2417 VT_LVAL_xxx constants) */
2418 #endif
2421 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2422 /* if we are casting between pointer types,
2423 we must update the VT_LVAL_xxx size */
2424 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2425 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2427 vtop->type = *type;
2430 /* return type size as known at compile time. Put alignment at 'a' */
2431 ST_FUNC int type_size(CType *type, int *a)
2433 Sym *s;
2434 int bt;
2436 bt = type->t & VT_BTYPE;
2437 if (bt == VT_STRUCT) {
2438 /* struct/union */
2439 s = type->ref;
2440 *a = s->r;
2441 return s->c;
2442 } else if (bt == VT_PTR) {
2443 if (type->t & VT_ARRAY) {
2444 int ts;
2446 s = type->ref;
2447 ts = type_size(&s->type, a);
2449 if (ts < 0 && s->c < 0)
2450 ts = -ts;
2452 return ts * s->c;
2453 } else {
2454 *a = PTR_SIZE;
2455 return PTR_SIZE;
2457 } else if (bt == VT_LDOUBLE) {
2458 *a = LDOUBLE_ALIGN;
2459 return LDOUBLE_SIZE;
2460 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2461 #ifdef TCC_TARGET_I386
2462 #ifdef TCC_TARGET_PE
2463 *a = 8;
2464 #else
2465 *a = 4;
2466 #endif
2467 #elif defined(TCC_TARGET_ARM)
2468 #ifdef TCC_ARM_EABI
2469 *a = 8;
2470 #else
2471 *a = 4;
2472 #endif
2473 #else
2474 *a = 8;
2475 #endif
2476 return 8;
2477 } else if (bt == VT_INT || bt == VT_FLOAT) {
2478 *a = 4;
2479 return 4;
2480 } else if (bt == VT_SHORT) {
2481 *a = 2;
2482 return 2;
2483 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2484 *a = 8;
2485 return 16;
2486 } else if (bt == VT_ENUM) {
2487 *a = 4;
2488 /* Enums might be incomplete, so don't just return '4' here. */
2489 return type->ref->c;
2490 } else {
2491 /* char, void, function, _Bool */
2492 *a = 1;
2493 return 1;
2497 /* push type size as known at runtime time on top of value stack. Put
2498 alignment at 'a' */
2499 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2501 if (type->t & VT_VLA) {
2502 type_size(&type->ref->type, a);
2503 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2504 } else {
2505 vpushi(type_size(type, a));
2509 static void vla_sp_restore(void) {
2510 if (vlas_in_scope) {
2511 gen_vla_sp_restore(vla_sp_loc);
2515 static void vla_sp_restore_root(void) {
2516 if (vlas_in_scope) {
2517 gen_vla_sp_restore(vla_sp_root_loc);
2521 /* return the pointed type of t */
2522 static inline CType *pointed_type(CType *type)
2524 return &type->ref->type;
2527 /* modify type so that its it is a pointer to type. */
2528 ST_FUNC void mk_pointer(CType *type)
2530 Sym *s;
2531 s = sym_push(SYM_FIELD, type, 0, -1);
2532 type->t = VT_PTR | (type->t & ~VT_TYPE);
2533 type->ref = s;
2536 /* compare function types. OLD functions match any new functions */
2537 static int is_compatible_func(CType *type1, CType *type2)
2539 Sym *s1, *s2;
2541 s1 = type1->ref;
2542 s2 = type2->ref;
2543 if (!is_compatible_types(&s1->type, &s2->type))
2544 return 0;
2545 /* check func_call */
2546 if (s1->a.func_call != s2->a.func_call)
2547 return 0;
2548 /* XXX: not complete */
2549 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2550 return 1;
2551 if (s1->c != s2->c)
2552 return 0;
2553 while (s1 != NULL) {
2554 if (s2 == NULL)
2555 return 0;
2556 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2557 return 0;
2558 s1 = s1->next;
2559 s2 = s2->next;
2561 if (s2)
2562 return 0;
2563 return 1;
2566 /* return true if type1 and type2 are the same. If unqualified is
2567 true, qualifiers on the types are ignored.
2569 - enums are not checked as gcc __builtin_types_compatible_p ()
2571 static int compare_types(CType *type1, CType *type2, int unqualified)
2573 int bt1, t1, t2;
2575 t1 = type1->t & VT_TYPE;
2576 t2 = type2->t & VT_TYPE;
2577 if (unqualified) {
2578 /* strip qualifiers before comparing */
2579 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2580 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2582 /* Default Vs explicit signedness only matters for char */
2583 if ((t1 & VT_BTYPE) != VT_BYTE) {
2584 t1 &= ~VT_DEFSIGN;
2585 t2 &= ~VT_DEFSIGN;
2587 /* An enum is compatible with (unsigned) int. Ideally we would
2588 store the enums signedness in type->ref.a.<some_bit> and
2589 only accept unsigned enums with unsigned int and vice versa.
2590 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2591 from pointer target types, so we can't add it here either. */
2592 if ((t1 & VT_BTYPE) == VT_ENUM) {
2593 t1 = VT_INT;
2594 if (type1->ref->a.unsigned_enum)
2595 t1 |= VT_UNSIGNED;
2597 if ((t2 & VT_BTYPE) == VT_ENUM) {
2598 t2 = VT_INT;
2599 if (type2->ref->a.unsigned_enum)
2600 t2 |= VT_UNSIGNED;
2602 /* XXX: bitfields ? */
2603 if (t1 != t2)
2604 return 0;
2605 /* test more complicated cases */
2606 bt1 = t1 & VT_BTYPE;
2607 if (bt1 == VT_PTR) {
2608 type1 = pointed_type(type1);
2609 type2 = pointed_type(type2);
2610 return is_compatible_types(type1, type2);
2611 } else if (bt1 == VT_STRUCT) {
2612 return (type1->ref == type2->ref);
2613 } else if (bt1 == VT_FUNC) {
2614 return is_compatible_func(type1, type2);
2615 } else {
2616 return 1;
2620 /* return true if type1 and type2 are exactly the same (including
2621 qualifiers).
2623 static int is_compatible_types(CType *type1, CType *type2)
2625 return compare_types(type1,type2,0);
2628 /* return true if type1 and type2 are the same (ignoring qualifiers).
2630 static int is_compatible_parameter_types(CType *type1, CType *type2)
2632 return compare_types(type1,type2,1);
2635 /* print a type. If 'varstr' is not NULL, then the variable is also
2636 printed in the type */
2637 /* XXX: union */
2638 /* XXX: add array and function pointers */
2639 static void type_to_str(char *buf, int buf_size,
2640 CType *type, const char *varstr)
2642 int bt, v, t;
2643 Sym *s, *sa;
2644 char buf1[256];
2645 const char *tstr;
2647 t = type->t & VT_TYPE;
2648 bt = t & VT_BTYPE;
2649 buf[0] = '\0';
2650 if (t & VT_CONSTANT)
2651 pstrcat(buf, buf_size, "const ");
2652 if (t & VT_VOLATILE)
2653 pstrcat(buf, buf_size, "volatile ");
2654 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2655 pstrcat(buf, buf_size, "unsigned ");
2656 else if (t & VT_DEFSIGN)
2657 pstrcat(buf, buf_size, "signed ");
2658 switch(bt) {
2659 case VT_VOID:
2660 tstr = "void";
2661 goto add_tstr;
2662 case VT_BOOL:
2663 tstr = "_Bool";
2664 goto add_tstr;
2665 case VT_BYTE:
2666 tstr = "char";
2667 goto add_tstr;
2668 case VT_SHORT:
2669 tstr = "short";
2670 goto add_tstr;
2671 case VT_INT:
2672 tstr = "int";
2673 goto add_tstr;
2674 case VT_LONG:
2675 tstr = "long";
2676 goto add_tstr;
2677 case VT_LLONG:
2678 tstr = "long long";
2679 goto add_tstr;
2680 case VT_FLOAT:
2681 tstr = "float";
2682 goto add_tstr;
2683 case VT_DOUBLE:
2684 tstr = "double";
2685 goto add_tstr;
2686 case VT_LDOUBLE:
2687 tstr = "long double";
2688 add_tstr:
2689 pstrcat(buf, buf_size, tstr);
2690 break;
2691 case VT_ENUM:
2692 case VT_STRUCT:
2693 if (bt == VT_STRUCT)
2694 tstr = "struct ";
2695 else
2696 tstr = "enum ";
2697 pstrcat(buf, buf_size, tstr);
2698 v = type->ref->v & ~SYM_STRUCT;
2699 if (v >= SYM_FIRST_ANOM)
2700 pstrcat(buf, buf_size, "<anonymous>");
2701 else
2702 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2703 break;
2704 case VT_FUNC:
2705 s = type->ref;
2706 type_to_str(buf, buf_size, &s->type, varstr);
2707 pstrcat(buf, buf_size, "(");
2708 sa = s->next;
2709 while (sa != NULL) {
2710 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2711 pstrcat(buf, buf_size, buf1);
2712 sa = sa->next;
2713 if (sa)
2714 pstrcat(buf, buf_size, ", ");
2716 pstrcat(buf, buf_size, ")");
2717 goto no_var;
2718 case VT_PTR:
2719 s = type->ref;
2720 if (t & VT_ARRAY) {
2721 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2722 type_to_str(buf, buf_size, &s->type, buf1);
2723 goto no_var;
2725 pstrcpy(buf1, sizeof(buf1), "*");
2726 if (t & VT_CONSTANT)
2727 pstrcat(buf1, buf_size, "const ");
2728 if (t & VT_VOLATILE)
2729 pstrcat(buf1, buf_size, "volatile ");
2730 if (varstr)
2731 pstrcat(buf1, sizeof(buf1), varstr);
2732 type_to_str(buf, buf_size, &s->type, buf1);
2733 goto no_var;
2735 if (varstr) {
2736 pstrcat(buf, buf_size, " ");
2737 pstrcat(buf, buf_size, varstr);
2739 no_var: ;
2742 /* verify type compatibility to store vtop in 'dt' type, and generate
2743 casts if needed. */
2744 static void gen_assign_cast(CType *dt)
2746 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2747 char buf1[256], buf2[256];
2748 int dbt, sbt;
2750 st = &vtop->type; /* source type */
2751 dbt = dt->t & VT_BTYPE;
2752 sbt = st->t & VT_BTYPE;
2753 if (sbt == VT_VOID || dbt == VT_VOID) {
2754 if (sbt == VT_VOID && dbt == VT_VOID)
2755 ; /*
2756 It is Ok if both are void
2757 A test program:
2758 void func1() {}
2759 void func2() {
2760 return func1();
2762 gcc accepts this program
2764 else
2765 tcc_error("cannot cast from/to void");
2767 if (dt->t & VT_CONSTANT)
2768 tcc_warning("assignment of read-only location");
2769 switch(dbt) {
2770 case VT_PTR:
2771 /* special cases for pointers */
2772 /* '0' can also be a pointer */
2773 if (is_null_pointer(vtop))
2774 goto type_ok;
2775 /* accept implicit pointer to integer cast with warning */
2776 if (is_integer_btype(sbt)) {
2777 tcc_warning("assignment makes pointer from integer without a cast");
2778 goto type_ok;
2780 type1 = pointed_type(dt);
2781 /* a function is implicitely a function pointer */
2782 if (sbt == VT_FUNC) {
2783 if ((type1->t & VT_BTYPE) != VT_VOID &&
2784 !is_compatible_types(pointed_type(dt), st))
2785 tcc_warning("assignment from incompatible pointer type");
2786 goto type_ok;
2788 if (sbt != VT_PTR)
2789 goto error;
2790 type2 = pointed_type(st);
2791 if ((type1->t & VT_BTYPE) == VT_VOID ||
2792 (type2->t & VT_BTYPE) == VT_VOID) {
2793 /* void * can match anything */
2794 } else {
2795 /* exact type match, except for qualifiers */
2796 tmp_type1 = *type1;
2797 tmp_type2 = *type2;
2798 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2799 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2800 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2801 /* Like GCC don't warn by default for merely changes
2802 in pointer target signedness. Do warn for different
2803 base types, though, in particular for unsigned enums
2804 and signed int targets. */
2805 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2806 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2807 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2809 else
2810 tcc_warning("assignment from incompatible pointer type");
2813 /* check const and volatile */
2814 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2815 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2816 tcc_warning("assignment discards qualifiers from pointer target type");
2817 break;
2818 case VT_BYTE:
2819 case VT_SHORT:
2820 case VT_INT:
2821 case VT_LLONG:
2822 if (sbt == VT_PTR || sbt == VT_FUNC) {
2823 tcc_warning("assignment makes integer from pointer without a cast");
2824 } else if (sbt == VT_STRUCT) {
2825 goto case_VT_STRUCT;
2827 /* XXX: more tests */
2828 break;
2829 case VT_STRUCT:
2830 case_VT_STRUCT:
2831 tmp_type1 = *dt;
2832 tmp_type2 = *st;
2833 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2834 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2835 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2836 error:
2837 type_to_str(buf1, sizeof(buf1), st, NULL);
2838 type_to_str(buf2, sizeof(buf2), dt, NULL);
2839 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2841 break;
2843 type_ok:
2844 gen_cast(dt);
2847 /* store vtop in lvalue pushed on stack */
2848 ST_FUNC void vstore(void)
2850 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2852 ft = vtop[-1].type.t;
2853 sbt = vtop->type.t & VT_BTYPE;
2854 dbt = ft & VT_BTYPE;
2855 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2856 (sbt == VT_INT && dbt == VT_SHORT))
2857 && !(vtop->type.t & VT_BITFIELD)) {
2858 /* optimize char/short casts */
2859 delayed_cast = VT_MUSTCAST;
2860 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2861 ((1 << VT_STRUCT_SHIFT) - 1));
2862 /* XXX: factorize */
2863 if (ft & VT_CONSTANT)
2864 tcc_warning("assignment of read-only location");
2865 } else {
2866 delayed_cast = 0;
2867 if (!(ft & VT_BITFIELD))
2868 gen_assign_cast(&vtop[-1].type);
2871 if (sbt == VT_STRUCT) {
2872 /* if structure, only generate pointer */
2873 /* structure assignment : generate memcpy */
2874 /* XXX: optimize if small size */
2875 if (!nocode_wanted) {
2876 size = type_size(&vtop->type, &align);
2878 /* destination */
2879 vswap();
2880 vtop->type.t = VT_PTR;
2881 gaddrof();
2883 /* address of memcpy() */
2884 #ifdef TCC_ARM_EABI
2885 if(!(align & 7))
2886 vpush_global_sym(&func_old_type, TOK_memcpy8);
2887 else if(!(align & 3))
2888 vpush_global_sym(&func_old_type, TOK_memcpy4);
2889 else
2890 #endif
2891 /* Use memmove, rather than memcpy, as dest and src may be same: */
2892 vpush_global_sym(&func_old_type, TOK_memmove);
2894 vswap();
2895 /* source */
2896 vpushv(vtop - 2);
2897 vtop->type.t = VT_PTR;
2898 gaddrof();
2899 /* type size */
2900 vpushi(size);
2901 gfunc_call(3);
2902 } else {
2903 vswap();
2904 vpop();
2906 /* leave source on stack */
2907 } else if (ft & VT_BITFIELD) {
2908 /* bitfield store handling */
2910 /* save lvalue as expression result (example: s.b = s.a = n;) */
2911 vdup(), vtop[-1] = vtop[-2];
2913 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2914 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2915 /* remove bit field info to avoid loops */
2916 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2918 if((ft & VT_BTYPE) == VT_BOOL) {
2919 gen_cast(&vtop[-1].type);
2920 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2923 /* duplicate destination */
2924 vdup();
2925 vtop[-1] = vtop[-2];
2927 /* mask and shift source */
2928 if((ft & VT_BTYPE) != VT_BOOL) {
2929 if((ft & VT_BTYPE) == VT_LLONG) {
2930 vpushll((1ULL << bit_size) - 1ULL);
2931 } else {
2932 vpushi((1 << bit_size) - 1);
2934 gen_op('&');
2936 vpushi(bit_pos);
2937 gen_op(TOK_SHL);
2938 /* load destination, mask and or with source */
2939 vswap();
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2942 } else {
2943 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2945 gen_op('&');
2946 gen_op('|');
2947 /* store result */
2948 vstore();
2949 /* ... and discard */
2950 vpop();
2952 } else {
2953 if (!nocode_wanted) {
2954 #ifdef CONFIG_TCC_BCHECK
2955 /* bound check case */
2956 if (vtop[-1].r & VT_MUSTBOUND) {
2957 vswap();
2958 gbound();
2959 vswap();
2961 #endif
2962 rc = RC_INT;
2963 if (is_float(ft)) {
2964 rc = RC_FLOAT;
2965 #ifdef TCC_TARGET_X86_64
2966 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2967 rc = RC_ST0;
2968 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2969 rc = RC_FRET;
2971 #endif
2973 r = gv(rc); /* generate value */
2974 /* if lvalue was saved on stack, must read it */
2975 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2976 SValue sv;
2977 t = get_reg(RC_INT);
2978 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2979 sv.type.t = VT_PTR;
2980 #else
2981 sv.type.t = VT_INT;
2982 #endif
2983 sv.r = VT_LOCAL | VT_LVAL;
2984 sv.c.i = vtop[-1].c.i;
2985 load(t, &sv);
2986 vtop[-1].r = t | VT_LVAL;
2988 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2991 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2992 #else
2993 if ((ft & VT_BTYPE) == VT_LLONG) {
2994 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
2995 #endif
2996 vtop[-1].type.t = load_type;
2997 store(r, vtop - 1);
2998 vswap();
2999 /* convert to int to increment easily */
3000 vtop->type.t = addr_type;
3001 gaddrof();
3002 vpushi(load_size);
3003 gen_op('+');
3004 vtop->r |= VT_LVAL;
3005 vswap();
3006 vtop[-1].type.t = load_type;
3007 /* XXX: it works because r2 is spilled last ! */
3008 store(vtop->r2, vtop - 1);
3009 } else {
3010 store(r, vtop - 1);
3013 vswap();
3014 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3015 vtop->r |= delayed_cast;
3019 /* post defines POST/PRE add. c is the token ++ or -- */
3020 ST_FUNC void inc(int post, int c)
3022 test_lvalue();
3023 vdup(); /* save lvalue */
3024 if (post) {
3025 if (!nocode_wanted)
3026 gv_dup(); /* duplicate value */
3027 else
3028 vdup(); /* duplicate value */
3029 vrotb(3);
3030 vrotb(3);
3032 /* add constant */
3033 vpushi(c - TOK_MID);
3034 gen_op('+');
3035 vstore(); /* store value */
3036 if (post)
3037 vpop(); /* if post op, return saved value */
3040 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3042 /* read the string */
3043 if (tok != TOK_STR)
3044 expect(msg);
3045 cstr_new(astr);
3046 while (tok == TOK_STR) {
3047 /* XXX: add \0 handling too ? */
3048 cstr_cat(astr, tokc.str.data, -1);
3049 next();
3051 cstr_ccat(astr, '\0');
3054 /* Parse GNUC __attribute__ extension. Currently, the following
3055 extensions are recognized:
3056 - aligned(n) : set data/function alignment.
3057 - packed : force data alignment to 1
3058 - section(x) : generate data/code in this section.
3059 - unused : currently ignored, but may be used someday.
3060 - regparm(n) : pass function parameters in registers (i386 only)
3062 static void parse_attribute(AttributeDef *ad)
3064 int t, n;
3065 CString astr;
3067 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3068 next();
3069 skip('(');
3070 skip('(');
3071 while (tok != ')') {
3072 if (tok < TOK_IDENT)
3073 expect("attribute name");
3074 t = tok;
3075 next();
3076 switch(t) {
3077 case TOK_SECTION1:
3078 case TOK_SECTION2:
3079 skip('(');
3080 parse_mult_str(&astr, "section name");
3081 ad->section = find_section(tcc_state, (char *)astr.data);
3082 skip(')');
3083 cstr_free(&astr);
3084 break;
3085 case TOK_ALIAS1:
3086 case TOK_ALIAS2:
3087 skip('(');
3088 parse_mult_str(&astr, "alias(\"target\")");
3089 ad->alias_target = /* save string as token, for later */
3090 tok_alloc((char*)astr.data, astr.size-1)->tok;
3091 skip(')');
3092 cstr_free(&astr);
3093 break;
3094 case TOK_VISIBILITY1:
3095 case TOK_VISIBILITY2:
3096 skip('(');
3097 parse_mult_str(&astr,
3098 "visibility(\"default|hidden|internal|protected\")");
3099 if (!strcmp (astr.data, "default"))
3100 ad->a.visibility = STV_DEFAULT;
3101 else if (!strcmp (astr.data, "hidden"))
3102 ad->a.visibility = STV_HIDDEN;
3103 else if (!strcmp (astr.data, "internal"))
3104 ad->a.visibility = STV_INTERNAL;
3105 else if (!strcmp (astr.data, "protected"))
3106 ad->a.visibility = STV_PROTECTED;
3107 else
3108 expect("visibility(\"default|hidden|internal|protected\")");
3109 skip(')');
3110 cstr_free(&astr);
3111 break;
3112 case TOK_ALIGNED1:
3113 case TOK_ALIGNED2:
3114 if (tok == '(') {
3115 next();
3116 n = expr_const();
3117 if (n <= 0 || (n & (n - 1)) != 0)
3118 tcc_error("alignment must be a positive power of two");
3119 skip(')');
3120 } else {
3121 n = MAX_ALIGN;
3123 ad->a.aligned = n;
3124 break;
3125 case TOK_PACKED1:
3126 case TOK_PACKED2:
3127 ad->a.packed = 1;
3128 break;
3129 case TOK_WEAK1:
3130 case TOK_WEAK2:
3131 ad->a.weak = 1;
3132 break;
3133 case TOK_UNUSED1:
3134 case TOK_UNUSED2:
3135 /* currently, no need to handle it because tcc does not
3136 track unused objects */
3137 break;
3138 case TOK_NORETURN1:
3139 case TOK_NORETURN2:
3140 /* currently, no need to handle it because tcc does not
3141 track unused objects */
3142 break;
3143 case TOK_CDECL1:
3144 case TOK_CDECL2:
3145 case TOK_CDECL3:
3146 ad->a.func_call = FUNC_CDECL;
3147 break;
3148 case TOK_STDCALL1:
3149 case TOK_STDCALL2:
3150 case TOK_STDCALL3:
3151 ad->a.func_call = FUNC_STDCALL;
3152 break;
3153 #ifdef TCC_TARGET_I386
3154 case TOK_REGPARM1:
3155 case TOK_REGPARM2:
3156 skip('(');
3157 n = expr_const();
3158 if (n > 3)
3159 n = 3;
3160 else if (n < 0)
3161 n = 0;
3162 if (n > 0)
3163 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3164 skip(')');
3165 break;
3166 case TOK_FASTCALL1:
3167 case TOK_FASTCALL2:
3168 case TOK_FASTCALL3:
3169 ad->a.func_call = FUNC_FASTCALLW;
3170 break;
3171 #endif
3172 case TOK_MODE:
3173 skip('(');
3174 switch(tok) {
3175 case TOK_MODE_DI:
3176 ad->a.mode = VT_LLONG + 1;
3177 break;
3178 case TOK_MODE_QI:
3179 ad->a.mode = VT_BYTE + 1;
3180 break;
3181 case TOK_MODE_HI:
3182 ad->a.mode = VT_SHORT + 1;
3183 break;
3184 case TOK_MODE_SI:
3185 case TOK_MODE_word:
3186 ad->a.mode = VT_INT + 1;
3187 break;
3188 default:
3189 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3190 break;
3192 next();
3193 skip(')');
3194 break;
3195 case TOK_DLLEXPORT:
3196 ad->a.func_export = 1;
3197 break;
3198 case TOK_DLLIMPORT:
3199 ad->a.func_import = 1;
3200 break;
3201 default:
3202 if (tcc_state->warn_unsupported)
3203 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3204 /* skip parameters */
3205 if (tok == '(') {
3206 int parenthesis = 0;
3207 do {
3208 if (tok == '(')
3209 parenthesis++;
3210 else if (tok == ')')
3211 parenthesis--;
3212 next();
3213 } while (parenthesis && tok != -1);
3215 break;
3217 if (tok != ',')
3218 break;
3219 next();
3221 skip(')');
3222 skip(')');
3226 static Sym * find_field (CType *type, int v)
3228 Sym *s = type->ref;
3229 v |= SYM_FIELD;
3230 while ((s = s->next) != NULL) {
3231 if ((s->v & SYM_FIELD) && (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3232 Sym *ret = find_field (&s->type, v);
3233 if (ret)
3234 return ret;
3236 if (s->v == v)
3237 break;
3239 return s;
3242 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3243 static void struct_decl(CType *type, AttributeDef *ad, int u)
3245 int a, v, size, align, maxalign, c, offset, flexible, extra_bytes;
3246 int bit_size, bit_pos, bsize, bt, lbit_pos, prevbt;
3247 Sym *s, *ss, *ass, **ps;
3248 AttributeDef ad1;
3249 CType type1, btype;
3251 a = tok; /* save decl type */
3252 next();
3253 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3254 parse_attribute(ad);
3255 if (tok != '{') {
3256 v = tok;
3257 next();
3258 /* struct already defined ? return it */
3259 if (v < TOK_IDENT)
3260 expect("struct/union/enum name");
3261 s = struct_find(v);
3262 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3263 if (s->type.t != a)
3264 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3265 goto do_decl;
3267 } else {
3268 v = anon_sym++;
3270 /* Record the original enum/struct/union token. */
3271 type1.t = a;
3272 type1.ref = NULL;
3273 /* we put an undefined size for struct/union */
3274 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3275 s->r = 0; /* default alignment is zero as gcc */
3276 /* put struct/union/enum name in type */
3277 do_decl:
3278 type->t = u;
3279 type->ref = s;
3281 if (tok == '{') {
3282 next();
3283 if (s->c != -1)
3284 tcc_error("struct/union/enum already defined");
3285 /* cannot be empty */
3286 c = 0;
3287 /* non empty enums are not allowed */
3288 if (a == TOK_ENUM) {
3289 int seen_neg = 0;
3290 for(;;) {
3291 v = tok;
3292 if (v < TOK_UIDENT)
3293 expect("identifier");
3294 ss = sym_find(v);
3295 if (ss && !local_stack)
3296 tcc_error("redefinition of enumerator '%s'",
3297 get_tok_str(v, NULL));
3298 next();
3299 if (tok == '=') {
3300 next();
3301 c = expr_const();
3303 if (c < 0)
3304 seen_neg = 1;
3305 /* enum symbols have static storage */
3306 ss = sym_push(v, &int_type, VT_CONST, c);
3307 ss->type.t |= VT_STATIC;
3308 if (tok != ',')
3309 break;
3310 next();
3311 c++;
3312 /* NOTE: we accept a trailing comma */
3313 if (tok == '}')
3314 break;
3316 if (!seen_neg)
3317 s->a.unsigned_enum = 1;
3318 s->c = type_size(&int_type, &align);
3319 skip('}');
3320 } else {
3321 maxalign = 1;
3322 ps = &s->next;
3323 prevbt = VT_INT;
3324 bit_pos = 0;
3325 offset = 0;
3326 flexible = 0;
3327 while (tok != '}') {
3328 if (!parse_btype(&btype, &ad1)) {
3329 skip(';');
3330 continue;
3332 while (1) {
3333 extra_bytes = 0;
3334 if (flexible)
3335 tcc_error("flexible array member '%s' not at the end of struct",
3336 get_tok_str(v, NULL));
3337 bit_size = -1;
3338 v = 0;
3339 type1 = btype;
3340 if (tok != ':') {
3341 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3342 if (v == 0) {
3343 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3344 expect("identifier");
3345 else {
3346 int v = btype.ref->v;
3347 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3348 if (tcc_state->ms_extensions == 0)
3349 expect("identifier");
3353 if (type_size(&type1, &align) < 0) {
3354 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3355 flexible = 1;
3356 else
3357 tcc_error("field '%s' has incomplete type",
3358 get_tok_str(v, NULL));
3360 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3361 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3362 tcc_error("invalid type for '%s'",
3363 get_tok_str(v, NULL));
3365 if (tok == ':') {
3366 next();
3367 bit_size = expr_const();
3368 /* XXX: handle v = 0 case for messages */
3369 if (bit_size < 0)
3370 tcc_error("negative width in bit-field '%s'",
3371 get_tok_str(v, NULL));
3372 if (v && bit_size == 0)
3373 tcc_error("zero width for bit-field '%s'",
3374 get_tok_str(v, NULL));
3376 size = type_size(&type1, &align);
3377 if (ad1.a.aligned) {
3378 if (align < ad1.a.aligned)
3379 align = ad1.a.aligned;
3380 } else if (ad1.a.packed || ad->a.packed) {
3381 align = 1;
3382 } else if (*tcc_state->pack_stack_ptr) {
3383 if (align > *tcc_state->pack_stack_ptr)
3384 align = *tcc_state->pack_stack_ptr;
3386 lbit_pos = 0;
3387 if (bit_size >= 0) {
3388 bt = type1.t & VT_BTYPE;
3389 if (bt != VT_INT &&
3390 bt != VT_BYTE &&
3391 bt != VT_SHORT &&
3392 bt != VT_BOOL &&
3393 bt != VT_ENUM &&
3394 bt != VT_LLONG)
3395 tcc_error("bitfields must have scalar type");
3396 bsize = size * 8;
3397 if (bit_size > bsize) {
3398 tcc_error("width of '%s' exceeds its type",
3399 get_tok_str(v, NULL));
3400 } else if (bit_size == bsize) {
3401 /* no need for bit fields */
3402 bit_pos = 0;
3403 } else if (bit_size == 0) {
3404 /* XXX: what to do if only padding in a
3405 structure ? */
3406 /* zero size: means to pad */
3407 bit_pos = 0;
3408 } else {
3409 /* if type change, union, or will overrun
3410 * allignment slot, start at a newly
3411 * alligned slot */
3412 if ((bit_pos + bit_size) > bsize ||
3413 bt != prevbt || a == TOK_UNION)
3414 bit_pos = 0;
3415 lbit_pos = bit_pos;
3416 /* XXX: handle LSB first */
3417 type1.t |= VT_BITFIELD |
3418 (bit_pos << VT_STRUCT_SHIFT) |
3419 (bit_size << (VT_STRUCT_SHIFT + 6));
3420 bit_pos += bit_size;
3421 /* without ms-bitfields, allocate the
3422 * minimum number of bytes necessary,
3423 * adding single bytes as needed */
3424 if (!tcc_state->ms_bitfields) {
3425 if (lbit_pos == 0)
3426 /* minimum bytes for new bitfield */
3427 size = (bit_size + 7) / 8;
3428 else {
3429 /* enough spare bits already allocated? */
3430 bit_size = (lbit_pos - 1) % 8 + 1 + bit_size;
3431 if (bit_size > 8) /* doesn't fit */
3432 extra_bytes = (bit_size - 1) / 8;
3436 prevbt = bt;
3437 } else {
3438 bit_pos = 0;
3440 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3441 /* add new memory data only if starting bit
3442 field or adding bytes to existing bit field */
3443 if (extra_bytes) c += extra_bytes;
3444 else if (lbit_pos == 0) {
3445 if (a == TOK_STRUCT) {
3446 c = (c + align - 1) & -align;
3447 offset = c;
3448 if (size > 0)
3449 c += size;
3450 } else {
3451 offset = 0;
3452 if (size > c)
3453 c = size;
3455 if (align > maxalign)
3456 maxalign = align;
3458 #if 0
3459 printf("add field %s offset=%d",
3460 get_tok_str(v, NULL), offset);
3461 if (type1.t & VT_BITFIELD) {
3462 printf(" pos=%d size=%d",
3463 (type1.t >> VT_STRUCT_SHIFT) & 0x3f,
3464 (type1.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3466 printf("\n");
3467 #endif
3469 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3470 /* An anonymous struct/union. Adjust member offsets
3471 to reflect the real offset of our containing struct.
3472 Also set the offset of this anon member inside
3473 the outer struct to be zero. Via this it
3474 works when accessing the field offset directly
3475 (from base object), as well as when recursing
3476 members in initializer handling. */
3477 int v2 = btype.ref->v;
3478 if (!(v2 & SYM_FIELD) &&
3479 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3480 Sym **pps;
3481 /* This happens only with MS extensions. The
3482 anon member has a named struct type, so it
3483 potentially is shared with other references.
3484 We need to unshare members so we can modify
3485 them. */
3486 ass = type1.ref;
3487 type1.ref = sym_push(anon_sym++ | SYM_FIELD,
3488 &type1.ref->type, 0,
3489 type1.ref->c);
3490 pps = &type1.ref->next;
3491 while ((ass = ass->next) != NULL) {
3492 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3493 pps = &((*pps)->next);
3495 *pps = NULL;
3497 ass = type1.ref;
3498 while ((ass = ass->next) != NULL)
3499 ass->c += offset;
3500 offset = 0;
3501 v = anon_sym++;
3503 if (v) {
3504 ss = sym_push(v | SYM_FIELD, &type1, 0, offset);
3505 *ps = ss;
3506 ps = &ss->next;
3508 if (tok == ';' || tok == TOK_EOF)
3509 break;
3510 skip(',');
3512 skip(';');
3514 skip('}');
3515 /* store size and alignment */
3516 s->c = (c + maxalign - 1) & -maxalign;
3517 s->r = maxalign;
3522 /* return 1 if basic type is a type size (short, long, long long) */
3523 ST_FUNC int is_btype_size(int bt)
3525 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3528 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3529 are added to the element type, copied because it could be a typedef. */
3530 static void parse_btype_qualify(CType *type, int qualifiers)
3532 while (type->t & VT_ARRAY) {
3533 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3534 type = &type->ref->type;
3536 type->t |= qualifiers;
3539 /* return 0 if no type declaration. otherwise, return the basic type
3540 and skip it.
3542 static int parse_btype(CType *type, AttributeDef *ad)
3544 int t, u, bt_size, complete, type_found, typespec_found;
3545 Sym *s;
3546 CType type1;
3548 memset(ad, 0, sizeof(AttributeDef));
3549 complete = 0;
3550 type_found = 0;
3551 typespec_found = 0;
3552 t = 0;
3553 while(1) {
3554 switch(tok) {
3555 case TOK_EXTENSION:
3556 /* currently, we really ignore extension */
3557 next();
3558 continue;
3560 /* basic types */
3561 case TOK_CHAR:
3562 u = VT_BYTE;
3563 basic_type:
3564 next();
3565 basic_type1:
3566 if (complete)
3567 tcc_error("too many basic types");
3568 t |= u;
3569 bt_size = is_btype_size (u & VT_BTYPE);
3570 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3571 complete = 1;
3572 typespec_found = 1;
3573 break;
3574 case TOK_VOID:
3575 u = VT_VOID;
3576 goto basic_type;
3577 case TOK_SHORT:
3578 u = VT_SHORT;
3579 goto basic_type;
3580 case TOK_INT:
3581 u = VT_INT;
3582 goto basic_type;
3583 case TOK_LONG:
3584 next();
3585 if ((t & VT_BTYPE) == VT_DOUBLE) {
3586 #ifndef TCC_TARGET_PE
3587 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3588 #endif
3589 } else if ((t & VT_BTYPE) == VT_LONG) {
3590 t = (t & ~VT_BTYPE) | VT_LLONG;
3591 } else {
3592 u = VT_LONG;
3593 goto basic_type1;
3595 break;
3596 #ifdef TCC_TARGET_ARM64
3597 case TOK_UINT128:
3598 /* GCC's __uint128_t appears in some Linux header files. Make it a
3599 synonym for long double to get the size and alignment right. */
3600 u = VT_LDOUBLE;
3601 goto basic_type;
3602 #endif
3603 case TOK_BOOL:
3604 u = VT_BOOL;
3605 goto basic_type;
3606 case TOK_FLOAT:
3607 u = VT_FLOAT;
3608 goto basic_type;
3609 case TOK_DOUBLE:
3610 next();
3611 if ((t & VT_BTYPE) == VT_LONG) {
3612 #ifdef TCC_TARGET_PE
3613 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3614 #else
3615 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3616 #endif
3617 } else {
3618 u = VT_DOUBLE;
3619 goto basic_type1;
3621 break;
3622 case TOK_ENUM:
3623 struct_decl(&type1, ad, VT_ENUM);
3624 basic_type2:
3625 u = type1.t;
3626 type->ref = type1.ref;
3627 goto basic_type1;
3628 case TOK_STRUCT:
3629 case TOK_UNION:
3630 struct_decl(&type1, ad, VT_STRUCT);
3631 goto basic_type2;
3633 /* type modifiers */
3634 case TOK_CONST1:
3635 case TOK_CONST2:
3636 case TOK_CONST3:
3637 type->t = t;
3638 parse_btype_qualify(type, VT_CONSTANT);
3639 t = type->t;
3640 next();
3641 break;
3642 case TOK_VOLATILE1:
3643 case TOK_VOLATILE2:
3644 case TOK_VOLATILE3:
3645 type->t = t;
3646 parse_btype_qualify(type, VT_VOLATILE);
3647 t = type->t;
3648 next();
3649 break;
3650 case TOK_SIGNED1:
3651 case TOK_SIGNED2:
3652 case TOK_SIGNED3:
3653 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3654 tcc_error("signed and unsigned modifier");
3655 typespec_found = 1;
3656 t |= VT_DEFSIGN;
3657 next();
3658 break;
3659 case TOK_REGISTER:
3660 case TOK_AUTO:
3661 case TOK_RESTRICT1:
3662 case TOK_RESTRICT2:
3663 case TOK_RESTRICT3:
3664 next();
3665 break;
3666 case TOK_UNSIGNED:
3667 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3668 tcc_error("signed and unsigned modifier");
3669 t |= VT_DEFSIGN | VT_UNSIGNED;
3670 next();
3671 typespec_found = 1;
3672 break;
3674 /* storage */
3675 case TOK_EXTERN:
3676 t |= VT_EXTERN;
3677 next();
3678 break;
3679 case TOK_STATIC:
3680 t |= VT_STATIC;
3681 next();
3682 break;
3683 case TOK_TYPEDEF:
3684 t |= VT_TYPEDEF;
3685 next();
3686 break;
3687 case TOK_INLINE1:
3688 case TOK_INLINE2:
3689 case TOK_INLINE3:
3690 t |= VT_INLINE;
3691 next();
3692 break;
3694 /* GNUC attribute */
3695 case TOK_ATTRIBUTE1:
3696 case TOK_ATTRIBUTE2:
3697 parse_attribute(ad);
3698 if (ad->a.mode) {
3699 u = ad->a.mode -1;
3700 t = (t & ~VT_BTYPE) | u;
3702 break;
3703 /* GNUC typeof */
3704 case TOK_TYPEOF1:
3705 case TOK_TYPEOF2:
3706 case TOK_TYPEOF3:
3707 next();
3708 parse_expr_type(&type1);
3709 /* remove all storage modifiers except typedef */
3710 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3711 goto basic_type2;
3712 default:
3713 if (typespec_found)
3714 goto the_end;
3715 s = sym_find(tok);
3716 if (!s || !(s->type.t & VT_TYPEDEF))
3717 goto the_end;
3719 type->t = ((s->type.t & ~VT_TYPEDEF) |
3720 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3721 type->ref = s->type.ref;
3722 if (t & (VT_CONSTANT | VT_VOLATILE))
3723 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3724 t = type->t;
3726 if (s->r) {
3727 /* get attributes from typedef */
3728 if (0 == ad->a.aligned)
3729 ad->a.aligned = s->a.aligned;
3730 if (0 == ad->a.func_call)
3731 ad->a.func_call = s->a.func_call;
3732 ad->a.packed |= s->a.packed;
3734 next();
3735 typespec_found = 1;
3736 break;
3738 type_found = 1;
3740 the_end:
3741 if (tcc_state->char_is_unsigned) {
3742 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3743 t |= VT_UNSIGNED;
3746 /* long is never used as type */
3747 if ((t & VT_BTYPE) == VT_LONG)
3748 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3749 defined TCC_TARGET_PE
3750 t = (t & ~VT_BTYPE) | VT_INT;
3751 #else
3752 t = (t & ~VT_BTYPE) | VT_LLONG;
3753 #endif
3754 type->t = t;
3755 return type_found;
3758 /* convert a function parameter type (array to pointer and function to
3759 function pointer) */
3760 static inline void convert_parameter_type(CType *pt)
3762 /* remove const and volatile qualifiers (XXX: const could be used
3763 to indicate a const function parameter */
3764 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3765 /* array must be transformed to pointer according to ANSI C */
3766 pt->t &= ~VT_ARRAY;
3767 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3768 mk_pointer(pt);
3772 ST_FUNC void parse_asm_str(CString *astr)
3774 skip('(');
3775 parse_mult_str(astr, "string constant");
3778 /* Parse an asm label and return the token */
3779 static int asm_label_instr(void)
3781 int v;
3782 CString astr;
3784 next();
3785 parse_asm_str(&astr);
3786 skip(')');
3787 #ifdef ASM_DEBUG
3788 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3789 #endif
3790 v = tok_alloc(astr.data, astr.size - 1)->tok;
3791 cstr_free(&astr);
3792 return v;
3795 static void post_type(CType *type, AttributeDef *ad, int storage)
3797 int n, l, t1, arg_size, align;
3798 Sym **plast, *s, *first;
3799 AttributeDef ad1;
3800 CType pt;
3802 if (tok == '(') {
3803 /* function declaration */
3804 next();
3805 l = 0;
3806 first = NULL;
3807 plast = &first;
3808 arg_size = 0;
3809 if (tok != ')') {
3810 for(;;) {
3811 /* read param name and compute offset */
3812 if (l != FUNC_OLD) {
3813 if (!parse_btype(&pt, &ad1)) {
3814 if (l) {
3815 tcc_error("invalid type");
3816 } else {
3817 l = FUNC_OLD;
3818 goto old_proto;
3821 l = FUNC_NEW;
3822 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3823 break;
3824 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3825 if ((pt.t & VT_BTYPE) == VT_VOID)
3826 tcc_error("parameter declared as void");
3827 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3828 } else {
3829 old_proto:
3830 n = tok;
3831 if (n < TOK_UIDENT)
3832 expect("identifier");
3833 pt.t = VT_INT;
3834 next();
3836 convert_parameter_type(&pt);
3837 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3838 *plast = s;
3839 plast = &s->next;
3840 if (tok == ')')
3841 break;
3842 skip(',');
3843 if (l == FUNC_NEW && tok == TOK_DOTS) {
3844 l = FUNC_ELLIPSIS;
3845 next();
3846 break;
3850 /* if no parameters, then old type prototype */
3851 if (l == 0)
3852 l = FUNC_OLD;
3853 skip(')');
3854 /* NOTE: const is ignored in returned type as it has a special
3855 meaning in gcc / C++ */
3856 type->t &= ~VT_CONSTANT;
3857 /* some ancient pre-K&R C allows a function to return an array
3858 and the array brackets to be put after the arguments, such
3859 that "int c()[]" means something like "int[] c()" */
3860 if (tok == '[') {
3861 next();
3862 skip(']'); /* only handle simple "[]" */
3863 type->t |= VT_PTR;
3865 /* we push a anonymous symbol which will contain the function prototype */
3866 ad->a.func_args = arg_size;
3867 s = sym_push(SYM_FIELD, type, 0, l);
3868 s->a = ad->a;
3869 s->next = first;
3870 type->t = VT_FUNC;
3871 type->ref = s;
3872 } else if (tok == '[') {
3873 int saved_nocode_wanted = nocode_wanted;
3874 /* array definition */
3875 next();
3876 if (tok == TOK_RESTRICT1)
3877 next();
3878 n = -1;
3879 t1 = 0;
3880 if (tok != ']') {
3881 if (!local_stack || (storage & VT_STATIC))
3882 vpushi(expr_const());
3883 else {
3884 /* VLAs (which can only happen with local_stack && !VT_STATIC)
3885 length must always be evaluated, even under nocode_wanted,
3886 so that its size slot is initialized (e.g. under sizeof
3887 or typeof). */
3888 nocode_wanted = 0;
3889 gexpr();
3891 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3892 n = vtop->c.i;
3893 if (n < 0)
3894 tcc_error("invalid array size");
3895 } else {
3896 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
3897 tcc_error("size of variable length array should be an integer");
3898 t1 = VT_VLA;
3901 skip(']');
3902 /* parse next post type */
3903 post_type(type, ad, storage);
3904 if (type->t == VT_FUNC)
3905 tcc_error("declaration of an array of functions");
3906 t1 |= type->t & VT_VLA;
3908 if (t1 & VT_VLA) {
3909 loc -= type_size(&int_type, &align);
3910 loc &= -align;
3911 n = loc;
3913 vla_runtime_type_size(type, &align);
3914 gen_op('*');
3915 vset(&int_type, VT_LOCAL|VT_LVAL, n);
3916 vswap();
3917 vstore();
3919 if (n != -1)
3920 vpop();
3921 nocode_wanted = saved_nocode_wanted;
3923 /* we push an anonymous symbol which will contain the array
3924 element type */
3925 s = sym_push(SYM_FIELD, type, 0, n);
3926 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
3927 type->ref = s;
3931 /* Parse a type declaration (except basic type), and return the type
3932 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3933 expected. 'type' should contain the basic type. 'ad' is the
3934 attribute definition of the basic type. It can be modified by
3935 type_decl().
3937 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
3939 Sym *s;
3940 CType type1, *type2;
3941 int qualifiers, storage;
3943 while (tok == '*') {
3944 qualifiers = 0;
3945 redo:
3946 next();
3947 switch(tok) {
3948 case TOK_CONST1:
3949 case TOK_CONST2:
3950 case TOK_CONST3:
3951 qualifiers |= VT_CONSTANT;
3952 goto redo;
3953 case TOK_VOLATILE1:
3954 case TOK_VOLATILE2:
3955 case TOK_VOLATILE3:
3956 qualifiers |= VT_VOLATILE;
3957 goto redo;
3958 case TOK_RESTRICT1:
3959 case TOK_RESTRICT2:
3960 case TOK_RESTRICT3:
3961 goto redo;
3962 /* XXX: clarify attribute handling */
3963 case TOK_ATTRIBUTE1:
3964 case TOK_ATTRIBUTE2:
3965 parse_attribute(ad);
3966 break;
3968 mk_pointer(type);
3969 type->t |= qualifiers;
3972 /* recursive type */
3973 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3974 type1.t = 0; /* XXX: same as int */
3975 if (tok == '(') {
3976 next();
3977 /* XXX: this is not correct to modify 'ad' at this point, but
3978 the syntax is not clear */
3979 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3980 parse_attribute(ad);
3981 type_decl(&type1, ad, v, td);
3982 skip(')');
3983 } else {
3984 /* type identifier */
3985 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
3986 *v = tok;
3987 next();
3988 } else {
3989 if (!(td & TYPE_ABSTRACT))
3990 expect("identifier");
3991 *v = 0;
3994 storage = type->t & VT_STORAGE;
3995 type->t &= ~VT_STORAGE;
3996 post_type(type, ad, storage);
3997 type->t |= storage;
3998 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3999 parse_attribute(ad);
4001 if (!type1.t)
4002 return;
4003 /* append type at the end of type1 */
4004 type2 = &type1;
4005 for(;;) {
4006 s = type2->ref;
4007 type2 = &s->type;
4008 if (!type2->t) {
4009 *type2 = *type;
4010 break;
4013 *type = type1;
4016 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4017 ST_FUNC int lvalue_type(int t)
4019 int bt, r;
4020 r = VT_LVAL;
4021 bt = t & VT_BTYPE;
4022 if (bt == VT_BYTE || bt == VT_BOOL)
4023 r |= VT_LVAL_BYTE;
4024 else if (bt == VT_SHORT)
4025 r |= VT_LVAL_SHORT;
4026 else
4027 return r;
4028 if (t & VT_UNSIGNED)
4029 r |= VT_LVAL_UNSIGNED;
4030 return r;
4033 /* indirection with full error checking and bound check */
4034 ST_FUNC void indir(void)
4036 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4037 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4038 return;
4039 expect("pointer");
4041 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4042 gv(RC_INT);
4043 vtop->type = *pointed_type(&vtop->type);
4044 /* Arrays and functions are never lvalues */
4045 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4046 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4047 vtop->r |= lvalue_type(vtop->type.t);
4048 /* if bound checking, the referenced pointer must be checked */
4049 #ifdef CONFIG_TCC_BCHECK
4050 if (tcc_state->do_bounds_check)
4051 vtop->r |= VT_MUSTBOUND;
4052 #endif
4056 /* pass a parameter to a function and do type checking and casting */
4057 static void gfunc_param_typed(Sym *func, Sym *arg)
4059 int func_type;
4060 CType type;
4062 func_type = func->c;
4063 if (func_type == FUNC_OLD ||
4064 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4065 /* default casting : only need to convert float to double */
4066 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4067 type.t = VT_DOUBLE;
4068 gen_cast(&type);
4069 } else if (vtop->type.t & VT_BITFIELD) {
4070 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4071 type.ref = vtop->type.ref;
4072 gen_cast(&type);
4074 } else if (arg == NULL) {
4075 tcc_error("too many arguments to function");
4076 } else {
4077 type = arg->type;
4078 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4079 gen_assign_cast(&type);
4083 /* parse an expression of the form '(type)' or '(expr)' and return its
4084 type */
4085 static void parse_expr_type(CType *type)
4087 int n;
4088 AttributeDef ad;
4090 skip('(');
4091 if (parse_btype(type, &ad)) {
4092 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4093 } else {
4094 expr_type(type);
4096 skip(')');
4099 static void parse_type(CType *type)
4101 AttributeDef ad;
4102 int n;
4104 if (!parse_btype(type, &ad)) {
4105 expect("type");
4107 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4110 static void vpush_tokc(int t)
4112 CType type;
4113 type.t = t;
4114 type.ref = 0;
4115 vsetc(&type, VT_CONST, &tokc);
4118 ST_FUNC void unary(void)
4120 int n, t, align, size, r, sizeof_caller;
4121 CType type;
4122 Sym *s;
4123 AttributeDef ad;
4125 sizeof_caller = in_sizeof;
4126 in_sizeof = 0;
4127 /* XXX: GCC 2.95.3 does not generate a table although it should be
4128 better here */
4129 tok_next:
4130 switch(tok) {
4131 case TOK_EXTENSION:
4132 next();
4133 goto tok_next;
4134 case TOK_CINT:
4135 case TOK_CCHAR:
4136 case TOK_LCHAR:
4137 vpushi(tokc.i);
4138 next();
4139 break;
4140 case TOK_CUINT:
4141 vpush_tokc(VT_INT | VT_UNSIGNED);
4142 next();
4143 break;
4144 case TOK_CLLONG:
4145 vpush_tokc(VT_LLONG);
4146 next();
4147 break;
4148 case TOK_CULLONG:
4149 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4150 next();
4151 break;
4152 case TOK_CFLOAT:
4153 vpush_tokc(VT_FLOAT);
4154 next();
4155 break;
4156 case TOK_CDOUBLE:
4157 vpush_tokc(VT_DOUBLE);
4158 next();
4159 break;
4160 case TOK_CLDOUBLE:
4161 vpush_tokc(VT_LDOUBLE);
4162 next();
4163 break;
4164 case TOK___FUNCTION__:
4165 if (!gnu_ext)
4166 goto tok_identifier;
4167 /* fall thru */
4168 case TOK___FUNC__:
4170 void *ptr;
4171 int len;
4172 /* special function name identifier */
4173 len = strlen(funcname) + 1;
4174 /* generate char[len] type */
4175 type.t = VT_BYTE;
4176 mk_pointer(&type);
4177 type.t |= VT_ARRAY;
4178 type.ref->c = len;
4179 vpush_ref(&type, data_section, data_section->data_offset, len);
4180 ptr = section_ptr_add(data_section, len);
4181 memcpy(ptr, funcname, len);
4182 next();
4184 break;
4185 case TOK_LSTR:
4186 #ifdef TCC_TARGET_PE
4187 t = VT_SHORT | VT_UNSIGNED;
4188 #else
4189 t = VT_INT;
4190 #endif
4191 goto str_init;
4192 case TOK_STR:
4193 /* string parsing */
4194 t = VT_BYTE;
4195 str_init:
4196 if (tcc_state->warn_write_strings)
4197 t |= VT_CONSTANT;
4198 type.t = t;
4199 mk_pointer(&type);
4200 type.t |= VT_ARRAY;
4201 memset(&ad, 0, sizeof(AttributeDef));
4202 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4203 break;
4204 case '(':
4205 next();
4206 /* cast ? */
4207 if (parse_btype(&type, &ad)) {
4208 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4209 skip(')');
4210 /* check ISOC99 compound literal */
4211 if (tok == '{') {
4212 /* data is allocated locally by default */
4213 if (global_expr)
4214 r = VT_CONST;
4215 else
4216 r = VT_LOCAL;
4217 /* all except arrays are lvalues */
4218 if (!(type.t & VT_ARRAY))
4219 r |= lvalue_type(type.t);
4220 memset(&ad, 0, sizeof(AttributeDef));
4221 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4222 } else {
4223 if (sizeof_caller) {
4224 vpush(&type);
4225 return;
4227 unary();
4228 gen_cast(&type);
4230 } else if (tok == '{') {
4231 if (const_wanted)
4232 tcc_error("expected constant");
4233 /* save all registers */
4234 if (!nocode_wanted)
4235 save_regs(0);
4236 /* statement expression : we do not accept break/continue
4237 inside as GCC does */
4238 block(NULL, NULL, 1);
4239 skip(')');
4240 } else {
4241 gexpr();
4242 skip(')');
4244 break;
4245 case '*':
4246 next();
4247 unary();
4248 indir();
4249 break;
4250 case '&':
4251 next();
4252 unary();
4253 /* functions names must be treated as function pointers,
4254 except for unary '&' and sizeof. Since we consider that
4255 functions are not lvalues, we only have to handle it
4256 there and in function calls. */
4257 /* arrays can also be used although they are not lvalues */
4258 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4259 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4260 test_lvalue();
4261 mk_pointer(&vtop->type);
4262 gaddrof();
4263 break;
4264 case '!':
4265 next();
4266 unary();
4267 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4268 CType boolean;
4269 boolean.t = VT_BOOL;
4270 gen_cast(&boolean);
4271 vtop->c.i = !vtop->c.i;
4272 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4273 vtop->c.i ^= 1;
4274 else {
4275 save_regs(1);
4276 vseti(VT_JMP, gvtst(1, 0));
4278 break;
4279 case '~':
4280 next();
4281 unary();
4282 vpushi(-1);
4283 gen_op('^');
4284 break;
4285 case '+':
4286 next();
4287 unary();
4288 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4289 tcc_error("pointer not accepted for unary plus");
4290 /* In order to force cast, we add zero, except for floating point
4291 where we really need an noop (otherwise -0.0 will be transformed
4292 into +0.0). */
4293 if (!is_float(vtop->type.t)) {
4294 vpushi(0);
4295 gen_op('+');
4297 break;
4298 case TOK_SIZEOF:
4299 case TOK_ALIGNOF1:
4300 case TOK_ALIGNOF2:
4301 t = tok;
4302 next();
4303 in_sizeof++;
4304 unary_type(&type); // Perform a in_sizeof = 0;
4305 size = type_size(&type, &align);
4306 if (t == TOK_SIZEOF) {
4307 if (!(type.t & VT_VLA)) {
4308 if (size < 0)
4309 tcc_error("sizeof applied to an incomplete type");
4310 vpushs(size);
4311 } else {
4312 vla_runtime_type_size(&type, &align);
4314 } else {
4315 vpushs(align);
4317 vtop->type.t |= VT_UNSIGNED;
4318 break;
4320 case TOK_builtin_expect:
4322 /* __builtin_expect is a no-op for now */
4323 int saved_nocode_wanted;
4324 next();
4325 skip('(');
4326 expr_eq();
4327 skip(',');
4328 saved_nocode_wanted = nocode_wanted;
4329 nocode_wanted = 1;
4330 expr_lor_const();
4331 vpop();
4332 nocode_wanted = saved_nocode_wanted;
4333 skip(')');
4335 break;
4336 case TOK_builtin_types_compatible_p:
4338 CType type1, type2;
4339 next();
4340 skip('(');
4341 parse_type(&type1);
4342 skip(',');
4343 parse_type(&type2);
4344 skip(')');
4345 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4346 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4347 vpushi(is_compatible_types(&type1, &type2));
4349 break;
4350 case TOK_builtin_choose_expr:
4352 int saved_nocode_wanted, c;
4353 next();
4354 skip('(');
4355 c = expr_const();
4356 skip(',');
4357 if (!c) {
4358 saved_nocode_wanted = nocode_wanted;
4359 nocode_wanted = 1;
4361 expr_eq();
4362 if (!c) {
4363 vpop();
4364 nocode_wanted = saved_nocode_wanted;
4366 skip(',');
4367 if (c) {
4368 saved_nocode_wanted = nocode_wanted;
4369 nocode_wanted = 1;
4371 expr_eq();
4372 if (c) {
4373 vpop();
4374 nocode_wanted = saved_nocode_wanted;
4376 skip(')');
4378 break;
4379 case TOK_builtin_constant_p:
4381 int saved_nocode_wanted, res;
4382 next();
4383 skip('(');
4384 saved_nocode_wanted = nocode_wanted;
4385 nocode_wanted = 1;
4386 gexpr();
4387 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4388 vpop();
4389 nocode_wanted = saved_nocode_wanted;
4390 skip(')');
4391 vpushi(res);
4393 break;
4394 case TOK_builtin_frame_address:
4395 case TOK_builtin_return_address:
4397 int tok1 = tok;
4398 int level;
4399 CType type;
4400 next();
4401 skip('(');
4402 if (tok != TOK_CINT) {
4403 tcc_error("%s only takes positive integers",
4404 tok1 == TOK_builtin_return_address ?
4405 "__builtin_return_address" :
4406 "__builtin_frame_address");
4408 level = (uint32_t)tokc.i;
4409 next();
4410 skip(')');
4411 type.t = VT_VOID;
4412 mk_pointer(&type);
4413 vset(&type, VT_LOCAL, 0); /* local frame */
4414 while (level--) {
4415 mk_pointer(&vtop->type);
4416 indir(); /* -> parent frame */
4418 if (tok1 == TOK_builtin_return_address) {
4419 // assume return address is just above frame pointer on stack
4420 vpushi(PTR_SIZE);
4421 gen_op('+');
4422 mk_pointer(&vtop->type);
4423 indir();
4426 break;
4427 #ifdef TCC_TARGET_X86_64
4428 #ifdef TCC_TARGET_PE
4429 case TOK_builtin_va_start:
4431 next();
4432 skip('(');
4433 expr_eq();
4434 skip(',');
4435 expr_eq();
4436 skip(')');
4437 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4438 tcc_error("__builtin_va_start expects a local variable");
4439 vtop->r &= ~(VT_LVAL | VT_REF);
4440 vtop->type = char_pointer_type;
4441 vtop->c.i += 8;
4442 vstore();
4444 break;
4445 #else
4446 case TOK_builtin_va_arg_types:
4448 CType type;
4449 next();
4450 skip('(');
4451 parse_type(&type);
4452 skip(')');
4453 vpushi(classify_x86_64_va_arg(&type));
4455 break;
4456 #endif
4457 #endif
4459 #ifdef TCC_TARGET_ARM64
4460 case TOK___va_start: {
4461 if (nocode_wanted)
4462 tcc_error("statement in global scope");
4463 next();
4464 skip('(');
4465 expr_eq();
4466 skip(',');
4467 expr_eq();
4468 skip(')');
4469 //xx check types
4470 gen_va_start();
4471 vpushi(0);
4472 vtop->type.t = VT_VOID;
4473 break;
4475 case TOK___va_arg: {
4476 CType type;
4477 if (nocode_wanted)
4478 tcc_error("statement in global scope");
4479 next();
4480 skip('(');
4481 expr_eq();
4482 skip(',');
4483 parse_type(&type);
4484 skip(')');
4485 //xx check types
4486 gen_va_arg(&type);
4487 vtop->type = type;
4488 break;
4490 case TOK___arm64_clear_cache: {
4491 next();
4492 skip('(');
4493 expr_eq();
4494 skip(',');
4495 expr_eq();
4496 skip(')');
4497 gen_clear_cache();
4498 vpushi(0);
4499 vtop->type.t = VT_VOID;
4500 break;
4502 #endif
4503 /* pre operations */
4504 case TOK_INC:
4505 case TOK_DEC:
4506 t = tok;
4507 next();
4508 unary();
4509 inc(0, t);
4510 break;
4511 case '-':
4512 next();
4513 unary();
4514 t = vtop->type.t & VT_BTYPE;
4515 if (is_float(t)) {
4516 /* In IEEE negate(x) isn't subtract(0,x), but rather
4517 subtract(-0, x). */
4518 vpush(&vtop->type);
4519 if (t == VT_FLOAT)
4520 vtop->c.f = -0.0f;
4521 else if (t == VT_DOUBLE)
4522 vtop->c.d = -0.0;
4523 else
4524 vtop->c.ld = -0.0;
4525 } else
4526 vpushi(0);
4527 vswap();
4528 gen_op('-');
4529 break;
4530 case TOK_LAND:
4531 if (!gnu_ext)
4532 goto tok_identifier;
4533 next();
4534 /* allow to take the address of a label */
4535 if (tok < TOK_UIDENT)
4536 expect("label identifier");
4537 s = label_find(tok);
4538 if (!s) {
4539 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4540 } else {
4541 if (s->r == LABEL_DECLARED)
4542 s->r = LABEL_FORWARD;
4544 if (!s->type.t) {
4545 s->type.t = VT_VOID;
4546 mk_pointer(&s->type);
4547 s->type.t |= VT_STATIC;
4549 vpushsym(&s->type, s);
4550 next();
4551 break;
4553 // special qnan , snan and infinity values
4554 case TOK___NAN__:
4555 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4556 next();
4557 break;
4558 case TOK___SNAN__:
4559 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4560 next();
4561 break;
4562 case TOK___INF__:
4563 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4564 next();
4565 break;
4567 default:
4568 tok_identifier:
4569 t = tok;
4570 next();
4571 if (t < TOK_UIDENT)
4572 expect("identifier");
4573 s = sym_find(t);
4574 if (!s) {
4575 const char *name = get_tok_str(t, NULL);
4576 if (tok != '(')
4577 tcc_error("'%s' undeclared", name);
4578 /* for simple function calls, we tolerate undeclared
4579 external reference to int() function */
4580 if (tcc_state->warn_implicit_function_declaration
4581 #ifdef TCC_TARGET_PE
4582 /* people must be warned about using undeclared WINAPI functions
4583 (which usually start with uppercase letter) */
4584 || (name[0] >= 'A' && name[0] <= 'Z')
4585 #endif
4587 tcc_warning("implicit declaration of function '%s'", name);
4588 s = external_global_sym(t, &func_old_type, 0);
4590 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4591 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4592 /* if referencing an inline function, then we generate a
4593 symbol to it if not already done. It will have the
4594 effect to generate code for it at the end of the
4595 compilation unit. Inline function as always
4596 generated in the text section. */
4597 if (!s->c)
4598 put_extern_sym(s, text_section, 0, 0);
4599 r = VT_SYM | VT_CONST;
4600 } else {
4601 r = s->r;
4603 vset(&s->type, r, s->c);
4604 /* if forward reference, we must point to s */
4605 if (vtop->r & VT_SYM) {
4606 vtop->sym = s;
4607 vtop->c.i = 0;
4609 break;
4612 /* post operations */
4613 while (1) {
4614 if (tok == TOK_INC || tok == TOK_DEC) {
4615 inc(1, tok);
4616 next();
4617 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4618 int qualifiers;
4619 /* field */
4620 if (tok == TOK_ARROW)
4621 indir();
4622 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4623 test_lvalue();
4624 gaddrof();
4625 /* expect pointer on structure */
4626 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4627 expect("struct or union");
4628 if (tok == TOK_CDOUBLE)
4629 expect("field name");
4630 next();
4631 if (tok == TOK_CINT || tok == TOK_CUINT)
4632 expect("field name");
4633 s = find_field(&vtop->type, tok);
4634 if (!s)
4635 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4636 /* add field offset to pointer */
4637 vtop->type = char_pointer_type; /* change type to 'char *' */
4638 vpushi(s->c);
4639 gen_op('+');
4640 /* change type to field type, and set to lvalue */
4641 vtop->type = s->type;
4642 vtop->type.t |= qualifiers;
4643 /* an array is never an lvalue */
4644 if (!(vtop->type.t & VT_ARRAY)) {
4645 vtop->r |= lvalue_type(vtop->type.t);
4646 #ifdef CONFIG_TCC_BCHECK
4647 /* if bound checking, the referenced pointer must be checked */
4648 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4649 vtop->r |= VT_MUSTBOUND;
4650 #endif
4652 next();
4653 } else if (tok == '[') {
4654 next();
4655 gexpr();
4656 gen_op('+');
4657 indir();
4658 skip(']');
4659 } else if (tok == '(') {
4660 SValue ret;
4661 Sym *sa;
4662 int nb_args, ret_nregs, ret_align, regsize, variadic;
4664 /* function call */
4665 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4666 /* pointer test (no array accepted) */
4667 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4668 vtop->type = *pointed_type(&vtop->type);
4669 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4670 goto error_func;
4671 } else {
4672 error_func:
4673 expect("function pointer");
4675 } else {
4676 vtop->r &= ~VT_LVAL; /* no lvalue */
4678 /* get return type */
4679 s = vtop->type.ref;
4680 next();
4681 sa = s->next; /* first parameter */
4682 nb_args = 0;
4683 ret.r2 = VT_CONST;
4684 /* compute first implicit argument if a structure is returned */
4685 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4686 variadic = (s->c == FUNC_ELLIPSIS);
4687 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4688 &ret_align, &regsize);
4689 if (!ret_nregs) {
4690 /* get some space for the returned structure */
4691 size = type_size(&s->type, &align);
4692 #ifdef TCC_TARGET_ARM64
4693 /* On arm64, a small struct is return in registers.
4694 It is much easier to write it to memory if we know
4695 that we are allowed to write some extra bytes, so
4696 round the allocated space up to a power of 2: */
4697 if (size < 16)
4698 while (size & (size - 1))
4699 size = (size | (size - 1)) + 1;
4700 #endif
4701 loc = (loc - size) & -align;
4702 ret.type = s->type;
4703 ret.r = VT_LOCAL | VT_LVAL;
4704 /* pass it as 'int' to avoid structure arg passing
4705 problems */
4706 vseti(VT_LOCAL, loc);
4707 ret.c = vtop->c;
4708 nb_args++;
4710 } else {
4711 ret_nregs = 1;
4712 ret.type = s->type;
4715 if (ret_nregs) {
4716 /* return in register */
4717 if (is_float(ret.type.t)) {
4718 ret.r = reg_fret(ret.type.t);
4719 #ifdef TCC_TARGET_X86_64
4720 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4721 ret.r2 = REG_QRET;
4722 #endif
4723 } else {
4724 #ifndef TCC_TARGET_ARM64
4725 #ifdef TCC_TARGET_X86_64
4726 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4727 #else
4728 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4729 #endif
4730 ret.r2 = REG_LRET;
4731 #endif
4732 ret.r = REG_IRET;
4734 ret.c.i = 0;
4736 if (tok != ')') {
4737 for(;;) {
4738 expr_eq();
4739 gfunc_param_typed(s, sa);
4740 nb_args++;
4741 if (sa)
4742 sa = sa->next;
4743 if (tok == ')')
4744 break;
4745 skip(',');
4748 if (sa)
4749 tcc_error("too few arguments to function");
4750 skip(')');
4751 if (!nocode_wanted) {
4752 gfunc_call(nb_args);
4753 } else {
4754 vtop -= (nb_args + 1);
4757 /* return value */
4758 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4759 vsetc(&ret.type, r, &ret.c);
4760 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4763 /* handle packed struct return */
4764 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4765 int addr, offset;
4767 size = type_size(&s->type, &align);
4768 /* We're writing whole regs often, make sure there's enough
4769 space. Assume register size is power of 2. */
4770 if (regsize > align)
4771 align = regsize;
4772 loc = (loc - size) & -align;
4773 addr = loc;
4774 offset = 0;
4775 for (;;) {
4776 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4777 vswap();
4778 vstore();
4779 vtop--;
4780 if (--ret_nregs == 0)
4781 break;
4782 offset += regsize;
4784 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4786 } else {
4787 break;
4792 ST_FUNC void expr_prod(void)
4794 int t;
4796 unary();
4797 while (tok == '*' || tok == '/' || tok == '%') {
4798 t = tok;
4799 next();
4800 unary();
4801 gen_op(t);
4805 ST_FUNC void expr_sum(void)
4807 int t;
4809 expr_prod();
4810 while (tok == '+' || tok == '-') {
4811 t = tok;
4812 next();
4813 expr_prod();
4814 gen_op(t);
4818 static void expr_shift(void)
4820 int t;
4822 expr_sum();
4823 while (tok == TOK_SHL || tok == TOK_SAR) {
4824 t = tok;
4825 next();
4826 expr_sum();
4827 gen_op(t);
4831 static void expr_cmp(void)
4833 int t;
4835 expr_shift();
4836 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4837 tok == TOK_ULT || tok == TOK_UGE) {
4838 t = tok;
4839 next();
4840 expr_shift();
4841 gen_op(t);
4845 static void expr_cmpeq(void)
4847 int t;
4849 expr_cmp();
4850 while (tok == TOK_EQ || tok == TOK_NE) {
4851 t = tok;
4852 next();
4853 expr_cmp();
4854 gen_op(t);
4858 static void expr_and(void)
4860 expr_cmpeq();
4861 while (tok == '&') {
4862 next();
4863 expr_cmpeq();
4864 gen_op('&');
4868 static void expr_xor(void)
4870 expr_and();
4871 while (tok == '^') {
4872 next();
4873 expr_and();
4874 gen_op('^');
4878 static void expr_or(void)
4880 expr_xor();
4881 while (tok == '|') {
4882 next();
4883 expr_xor();
4884 gen_op('|');
4888 /* XXX: fix this mess */
4889 static void expr_land_const(void)
4891 expr_or();
4892 while (tok == TOK_LAND) {
4893 next();
4894 expr_or();
4895 gen_op(TOK_LAND);
4898 static void expr_lor_const(void)
4900 expr_land_const();
4901 while (tok == TOK_LOR) {
4902 next();
4903 expr_land_const();
4904 gen_op(TOK_LOR);
4908 static void expr_land(void)
4910 expr_or();
4911 if (tok == TOK_LAND) {
4912 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4913 CType ctb, cti;
4914 ctb.t = VT_BOOL;
4915 cti.t = VT_INT;
4916 next();
4917 gen_cast(&ctb);
4918 if (vtop->c.i) {
4919 vpop();
4920 expr_land();
4921 gen_cast(&ctb);
4922 } else {
4923 int saved_nocode_wanted = nocode_wanted;
4924 nocode_wanted = 1;
4925 expr_land();
4926 vpop();
4927 nocode_wanted = saved_nocode_wanted;
4929 gen_cast(&cti);
4930 } else {
4931 int t = 0;
4932 save_regs(1);
4933 for(;;) {
4934 t = gvtst(1, t);
4935 if (tok != TOK_LAND) {
4936 vseti(VT_JMPI, t);
4937 break;
4939 next();
4940 expr_or();
4946 static void expr_lor(void)
4948 expr_land();
4949 if (tok == TOK_LOR) {
4950 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4951 CType ctb, cti;
4952 ctb.t = VT_BOOL;
4953 cti.t = VT_INT;
4954 next();
4955 gen_cast(&ctb);
4956 if (vtop->c.i) {
4957 int saved_nocode_wanted = nocode_wanted;
4958 nocode_wanted = 1;
4959 expr_lor();
4960 vpop();
4961 nocode_wanted = saved_nocode_wanted;
4962 } else {
4963 vpop();
4964 expr_lor();
4965 gen_cast(&ctb);
4967 gen_cast(&cti);
4968 } else {
4969 int t = 0;
4970 save_regs(1);
4971 for(;;) {
4972 t = gvtst(0, t);
4973 if (tok != TOK_LOR) {
4974 vseti(VT_JMP, t);
4975 break;
4977 next();
4978 expr_land();
4984 /* Assuming vtop is a value used in a conditional context
4985 (i.e. compared with zero) return 0 if it's false, 1 if
4986 true and -1 if it can't be statically determined. */
4987 static int condition_3way(void)
4989 int c = -1;
4990 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
4991 (!(vtop->r & VT_SYM) ||
4992 !(vtop->sym->type.t & VT_WEAK))) {
4993 CType boolean;
4994 boolean.t = VT_BOOL;
4995 vdup();
4996 gen_cast(&boolean);
4997 c = vtop->c.i;
4998 vpop();
5000 return c;
5003 static void expr_cond(void)
5005 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5006 int c;
5007 SValue sv;
5008 CType type, type1, type2;
5010 expr_lor();
5011 if (tok == '?') {
5012 next();
5013 c = condition_3way();
5014 if (c >= 0) {
5015 int saved_nocode_wanted = nocode_wanted;
5016 if (c) {
5017 if (tok != ':' || !gnu_ext) {
5018 vpop();
5019 gexpr();
5021 skip(':');
5022 nocode_wanted = 1;
5023 expr_cond();
5024 vpop();
5025 nocode_wanted = saved_nocode_wanted;
5026 } else {
5027 vpop();
5028 if (tok != ':' || !gnu_ext) {
5029 nocode_wanted = 1;
5030 gexpr();
5031 vpop();
5032 nocode_wanted = saved_nocode_wanted;
5034 skip(':');
5035 expr_cond();
5038 else {
5039 /* XXX This doesn't handle nocode_wanted correctly at all.
5040 It unconditionally calls gv/gvtst and friends. That's
5041 the case for many of the expr_ routines. Currently
5042 that should generate only useless code, but depending
5043 on other operand handling this might also generate
5044 pointer derefs for lvalue conversions whose result
5045 is useless, but nevertheless can lead to segfault.
5047 Somewhen we need to overhaul the whole nocode_wanted
5048 handling. */
5049 if (vtop != vstack) {
5050 /* needed to avoid having different registers saved in
5051 each branch */
5052 if (is_float(vtop->type.t)) {
5053 rc = RC_FLOAT;
5054 #ifdef TCC_TARGET_X86_64
5055 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5056 rc = RC_ST0;
5058 #endif
5060 else
5061 rc = RC_INT;
5062 gv(rc);
5063 save_regs(1);
5065 if (tok == ':' && gnu_ext) {
5066 gv_dup();
5067 tt = gvtst(1, 0);
5068 } else {
5069 tt = gvtst(1, 0);
5070 gexpr();
5072 type1 = vtop->type;
5073 sv = *vtop; /* save value to handle it later */
5074 vtop--; /* no vpop so that FP stack is not flushed */
5075 skip(':');
5076 u = gjmp(0);
5077 gsym(tt);
5078 expr_cond();
5079 type2 = vtop->type;
5081 t1 = type1.t;
5082 bt1 = t1 & VT_BTYPE;
5083 t2 = type2.t;
5084 bt2 = t2 & VT_BTYPE;
5085 /* cast operands to correct type according to ISOC rules */
5086 if (is_float(bt1) || is_float(bt2)) {
5087 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5088 type.t = VT_LDOUBLE;
5089 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5090 type.t = VT_DOUBLE;
5091 } else {
5092 type.t = VT_FLOAT;
5094 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5095 /* cast to biggest op */
5096 type.t = VT_LLONG;
5097 /* convert to unsigned if it does not fit in a long long */
5098 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5099 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5100 type.t |= VT_UNSIGNED;
5101 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5102 /* If one is a null ptr constant the result type
5103 is the other. */
5104 if (is_null_pointer (vtop))
5105 type = type1;
5106 else if (is_null_pointer (&sv))
5107 type = type2;
5108 /* XXX: test pointer compatibility, C99 has more elaborate
5109 rules here. */
5110 else
5111 type = type1;
5112 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5113 /* XXX: test function pointer compatibility */
5114 type = bt1 == VT_FUNC ? type1 : type2;
5115 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5116 /* XXX: test structure compatibility */
5117 type = bt1 == VT_STRUCT ? type1 : type2;
5118 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5119 /* NOTE: as an extension, we accept void on only one side */
5120 type.t = VT_VOID;
5121 } else {
5122 /* integer operations */
5123 type.t = VT_INT;
5124 /* convert to unsigned if it does not fit in an integer */
5125 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5126 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5127 type.t |= VT_UNSIGNED;
5129 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5130 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5131 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5133 /* now we convert second operand */
5134 gen_cast(&type);
5135 if (islv) {
5136 mk_pointer(&vtop->type);
5137 gaddrof();
5139 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5140 gaddrof();
5141 rc = RC_INT;
5142 if (is_float(type.t)) {
5143 rc = RC_FLOAT;
5144 #ifdef TCC_TARGET_X86_64
5145 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5146 rc = RC_ST0;
5148 #endif
5149 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5150 /* for long longs, we use fixed registers to avoid having
5151 to handle a complicated move */
5152 rc = RC_IRET;
5155 r2 = gv(rc);
5156 /* this is horrible, but we must also convert first
5157 operand */
5158 tt = gjmp(0);
5159 gsym(u);
5160 /* put again first value and cast it */
5161 *vtop = sv;
5162 gen_cast(&type);
5163 if (islv) {
5164 mk_pointer(&vtop->type);
5165 gaddrof();
5167 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5168 gaddrof();
5169 r1 = gv(rc);
5170 move_reg(r2, r1, type.t);
5171 vtop->r = r2;
5172 gsym(tt);
5173 if (islv)
5174 indir();
5179 static void expr_eq(void)
5181 int t;
5183 expr_cond();
5184 if (tok == '=' ||
5185 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5186 tok == TOK_A_XOR || tok == TOK_A_OR ||
5187 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5188 test_lvalue();
5189 t = tok;
5190 next();
5191 if (t == '=') {
5192 expr_eq();
5193 } else {
5194 vdup();
5195 expr_eq();
5196 gen_op(t & 0x7f);
5198 vstore();
5202 ST_FUNC void gexpr(void)
5204 while (1) {
5205 expr_eq();
5206 if (tok != ',')
5207 break;
5208 vpop();
5209 next();
5213 /* parse an expression and return its type without any side effect. */
5214 static void expr_type(CType *type)
5216 int saved_nocode_wanted;
5218 saved_nocode_wanted = nocode_wanted;
5219 nocode_wanted = 1;
5220 gexpr();
5221 *type = vtop->type;
5222 vpop();
5223 nocode_wanted = saved_nocode_wanted;
5226 /* parse a unary expression and return its type without any side
5227 effect. */
5228 static void unary_type(CType *type)
5230 int a;
5232 a = nocode_wanted;
5233 nocode_wanted = 1;
5234 unary();
5235 *type = vtop->type;
5236 vpop();
5237 nocode_wanted = a;
5240 /* parse a constant expression and return value in vtop. */
5241 static void expr_const1(void)
5243 int a;
5244 a = const_wanted;
5245 const_wanted = 1;
5246 expr_cond();
5247 const_wanted = a;
5250 /* parse an integer constant and return its value. */
5251 ST_FUNC int expr_const(void)
5253 int c;
5254 expr_const1();
5255 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5256 expect("constant expression");
5257 c = vtop->c.i;
5258 vpop();
5259 return c;
5262 /* return the label token if current token is a label, otherwise
5263 return zero */
5264 static int is_label(void)
5266 int last_tok;
5268 /* fast test first */
5269 if (tok < TOK_UIDENT)
5270 return 0;
5271 /* no need to save tokc because tok is an identifier */
5272 last_tok = tok;
5273 next();
5274 if (tok == ':') {
5275 next();
5276 return last_tok;
5277 } else {
5278 unget_tok(last_tok);
5279 return 0;
5283 static void label_or_decl(int l)
5285 int last_tok;
5287 /* fast test first */
5288 if (tok >= TOK_UIDENT)
5290 /* no need to save tokc because tok is an identifier */
5291 last_tok = tok;
5292 next();
5293 if (tok == ':') {
5294 unget_tok(last_tok);
5295 return;
5297 unget_tok(last_tok);
5299 decl(l);
5302 static int case_cmp(const void *pa, const void *pb)
5304 int a = (*(struct case_t**) pa)->v1;
5305 int b = (*(struct case_t**) pb)->v1;
5306 return a < b ? -1 : a > b;
5309 static int gcase(struct case_t **base, int len, int case_reg, int *bsym)
5311 struct case_t *p;
5312 int e;
5313 while (len > 4) {
5314 /* binary search */
5315 p = base[len/2];
5316 vseti(case_reg, 0);
5317 vdup();
5318 vpushi(p->v2);
5319 gen_op(TOK_LE);
5320 e = gtst(1, 0);
5321 case_reg = gv(RC_INT);
5322 vpop();
5323 vseti(case_reg, 0);
5324 vdup();
5325 vpushi(p->v1);
5326 gen_op(TOK_GE);
5327 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5328 case_reg = gv(RC_INT);
5329 vpop();
5330 /* x < v1 */
5331 case_reg = gcase(base, len/2, case_reg, bsym);
5332 if (cur_switch->def_sym)
5333 gjmp_addr(cur_switch->def_sym);
5334 else
5335 *bsym = gjmp(*bsym);
5336 /* x > v2 */
5337 gsym(e);
5338 e = len/2 + 1;
5339 base += e; len -= e;
5341 /* linear scan */
5342 while (len--) {
5343 p = *base++;
5344 vseti(case_reg, 0);
5345 vdup();
5346 vpushi(p->v2);
5347 if (p->v1 == p->v2) {
5348 gen_op(TOK_EQ);
5349 gtst_addr(0, p->sym);
5350 } else {
5351 gen_op(TOK_LE);
5352 e = gtst(1, 0);
5353 case_reg = gv(RC_INT);
5354 vpop();
5355 vseti(case_reg, 0);
5356 vdup();
5357 vpushi(p->v1);
5358 gen_op(TOK_GE);
5359 gtst_addr(0, p->sym);
5360 gsym(e);
5362 case_reg = gv(RC_INT);
5363 vpop();
5365 return case_reg;
5368 static void block(int *bsym, int *csym, int is_expr)
5370 int a, b, c, d, cond;
5371 Sym *s;
5373 /* generate line number info */
5374 if (tcc_state->do_debug &&
5375 (last_line_num != file->line_num || last_ind != ind)) {
5376 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5377 last_ind = ind;
5378 last_line_num = file->line_num;
5381 if (is_expr) {
5382 /* default return value is (void) */
5383 vpushi(0);
5384 vtop->type.t = VT_VOID;
5387 if (tok == TOK_IF) {
5388 /* if test */
5389 int saved_nocode_wanted = nocode_wanted;
5390 next();
5391 skip('(');
5392 gexpr();
5393 skip(')');
5394 cond = condition_3way();
5395 if (cond == 0)
5396 nocode_wanted |= 2;
5397 a = gvtst(1, 0);
5398 block(bsym, csym, 0);
5399 if (cond != 1)
5400 nocode_wanted = saved_nocode_wanted;
5401 c = tok;
5402 if (c == TOK_ELSE) {
5403 next();
5404 if (cond == 1)
5405 nocode_wanted |= 2;
5406 d = gjmp(0);
5407 gsym(a);
5408 block(bsym, csym, 0);
5409 gsym(d); /* patch else jmp */
5410 if (cond != 0)
5411 nocode_wanted = saved_nocode_wanted;
5412 } else
5413 gsym(a);
5414 } else if (tok == TOK_WHILE) {
5415 int saved_nocode_wanted;
5416 nocode_wanted &= ~2;
5417 next();
5418 d = ind;
5419 vla_sp_restore();
5420 skip('(');
5421 gexpr();
5422 skip(')');
5423 a = gvtst(1, 0);
5424 b = 0;
5425 ++local_scope;
5426 saved_nocode_wanted = nocode_wanted;
5427 block(&a, &b, 0);
5428 nocode_wanted = saved_nocode_wanted;
5429 --local_scope;
5430 if(!nocode_wanted)
5431 gjmp_addr(d);
5432 gsym(a);
5433 gsym_addr(b, d);
5434 } else if (tok == '{') {
5435 Sym *llabel;
5436 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5438 next();
5439 /* record local declaration stack position */
5440 s = local_stack;
5441 llabel = local_label_stack;
5442 ++local_scope;
5444 /* handle local labels declarations */
5445 if (tok == TOK_LABEL) {
5446 next();
5447 for(;;) {
5448 if (tok < TOK_UIDENT)
5449 expect("label identifier");
5450 label_push(&local_label_stack, tok, LABEL_DECLARED);
5451 next();
5452 if (tok == ',') {
5453 next();
5454 } else {
5455 skip(';');
5456 break;
5460 while (tok != '}') {
5461 label_or_decl(VT_LOCAL);
5462 if (tok != '}') {
5463 if (is_expr)
5464 vpop();
5465 block(bsym, csym, is_expr);
5468 /* pop locally defined labels */
5469 label_pop(&local_label_stack, llabel);
5470 /* pop locally defined symbols */
5471 --local_scope;
5472 /* In the is_expr case (a statement expression is finished here),
5473 vtop might refer to symbols on the local_stack. Either via the
5474 type or via vtop->sym. We can't pop those nor any that in turn
5475 might be referred to. To make it easier we don't roll back
5476 any symbols in that case; some upper level call to block() will
5477 do that. We do have to remove such symbols from the lookup
5478 tables, though. sym_pop will do that. */
5479 sym_pop(&local_stack, s, is_expr);
5481 /* Pop VLA frames and restore stack pointer if required */
5482 if (vlas_in_scope > saved_vlas_in_scope) {
5483 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5484 vla_sp_restore();
5486 vlas_in_scope = saved_vlas_in_scope;
5488 next();
5489 } else if (tok == TOK_RETURN) {
5490 next();
5491 if (tok != ';') {
5492 gexpr();
5493 gen_assign_cast(&func_vt);
5494 #ifdef TCC_TARGET_ARM64
5495 // Perhaps it would be better to use this for all backends:
5496 greturn();
5497 #else
5498 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5499 CType type, ret_type;
5500 int ret_align, ret_nregs, regsize;
5501 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5502 &ret_align, &regsize);
5503 if (0 == ret_nregs) {
5504 /* if returning structure, must copy it to implicit
5505 first pointer arg location */
5506 type = func_vt;
5507 mk_pointer(&type);
5508 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5509 indir();
5510 vswap();
5511 /* copy structure value to pointer */
5512 vstore();
5513 } else {
5514 /* returning structure packed into registers */
5515 int r, size, addr, align;
5516 size = type_size(&func_vt,&align);
5517 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5518 (vtop->c.i & (ret_align-1)))
5519 && (align & (ret_align-1))) {
5520 loc = (loc - size) & -ret_align;
5521 addr = loc;
5522 type = func_vt;
5523 vset(&type, VT_LOCAL | VT_LVAL, addr);
5524 vswap();
5525 vstore();
5526 vpop();
5527 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5529 vtop->type = ret_type;
5530 if (is_float(ret_type.t))
5531 r = rc_fret(ret_type.t);
5532 else
5533 r = RC_IRET;
5535 if (ret_nregs == 1)
5536 gv(r);
5537 else {
5538 for (;;) {
5539 vdup();
5540 gv(r);
5541 vpop();
5542 if (--ret_nregs == 0)
5543 break;
5544 /* We assume that when a structure is returned in multiple
5545 registers, their classes are consecutive values of the
5546 suite s(n) = 2^n */
5547 r <<= 1;
5548 vtop->c.i += regsize;
5552 } else if (is_float(func_vt.t)) {
5553 gv(rc_fret(func_vt.t));
5554 } else {
5555 gv(RC_IRET);
5557 #endif
5558 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5560 skip(';');
5561 /* jump unless last stmt in top-level block */
5562 if (tok != '}' || local_scope != 1)
5563 rsym = gjmp(rsym);
5564 nocode_wanted |= 2;
5565 } else if (tok == TOK_BREAK) {
5566 /* compute jump */
5567 if (!bsym)
5568 tcc_error("cannot break");
5569 *bsym = gjmp(*bsym);
5570 next();
5571 skip(';');
5572 nocode_wanted |= 2;
5573 } else if (tok == TOK_CONTINUE) {
5574 /* compute jump */
5575 if (!csym)
5576 tcc_error("cannot continue");
5577 vla_sp_restore_root();
5578 *csym = gjmp(*csym);
5579 next();
5580 skip(';');
5581 } else if (tok == TOK_FOR) {
5582 int e;
5583 int saved_nocode_wanted;
5584 nocode_wanted &= ~2;
5585 next();
5586 skip('(');
5587 s = local_stack;
5588 ++local_scope;
5589 if (tok != ';') {
5590 /* c99 for-loop init decl? */
5591 if (!decl0(VT_LOCAL, 1)) {
5592 /* no, regular for-loop init expr */
5593 gexpr();
5594 vpop();
5597 skip(';');
5598 d = ind;
5599 c = ind;
5600 vla_sp_restore();
5601 a = 0;
5602 b = 0;
5603 if (tok != ';') {
5604 gexpr();
5605 a = gvtst(1, 0);
5607 skip(';');
5608 if (tok != ')') {
5609 e = gjmp(0);
5610 c = ind;
5611 vla_sp_restore();
5612 gexpr();
5613 vpop();
5614 gjmp_addr(d);
5615 gsym(e);
5617 skip(')');
5618 saved_nocode_wanted = nocode_wanted;
5619 block(&a, &b, 0);
5620 nocode_wanted = saved_nocode_wanted;
5621 if(!nocode_wanted)
5622 gjmp_addr(c);
5623 gsym(a);
5624 gsym_addr(b, c);
5625 --local_scope;
5626 sym_pop(&local_stack, s, 0);
5628 } else
5629 if (tok == TOK_DO) {
5630 int saved_nocode_wanted;
5631 nocode_wanted &= ~2;
5632 next();
5633 a = 0;
5634 b = 0;
5635 d = ind;
5636 vla_sp_restore();
5637 saved_nocode_wanted = nocode_wanted;
5638 block(&a, &b, 0);
5639 nocode_wanted = saved_nocode_wanted;
5640 skip(TOK_WHILE);
5641 skip('(');
5642 gsym(b);
5643 gexpr();
5644 c = gvtst(0, 0);
5645 if (!nocode_wanted)
5646 gsym_addr(c, d);
5647 skip(')');
5648 gsym(a);
5649 skip(';');
5650 } else
5651 if (tok == TOK_SWITCH) {
5652 struct switch_t *saved, sw;
5653 int saved_nocode_wanted = nocode_wanted;
5654 next();
5655 skip('(');
5656 gexpr();
5657 /* XXX: other types than integer */
5658 c = gv(RC_INT);
5659 vpop();
5660 skip(')');
5661 a = 0;
5662 b = gjmp(0); /* jump to first case */
5663 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5664 saved = cur_switch;
5665 cur_switch = &sw;
5666 block(&a, csym, 0);
5667 nocode_wanted = saved_nocode_wanted;
5668 a = gjmp(a); /* add implicit break */
5669 /* case lookup */
5670 gsym(b);
5671 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5672 for (b = 1; b < sw.n; b++)
5673 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5674 tcc_error("duplicate case value");
5675 gcase(sw.p, sw.n, c, &a);
5676 if (sw.def_sym)
5677 gjmp_addr(sw.def_sym);
5678 dynarray_reset(&sw.p, &sw.n);
5679 cur_switch = saved;
5680 /* break label */
5681 gsym(a);
5682 } else
5683 if (tok == TOK_CASE) {
5684 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5685 if (!cur_switch)
5686 expect("switch");
5687 nocode_wanted &= ~2;
5688 next();
5689 cr->v1 = cr->v2 = expr_const();
5690 if (gnu_ext && tok == TOK_DOTS) {
5691 next();
5692 cr->v2 = expr_const();
5693 if (cr->v2 < cr->v1)
5694 tcc_warning("empty case range");
5696 cr->sym = ind;
5697 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5698 skip(':');
5699 is_expr = 0;
5700 goto block_after_label;
5701 } else
5702 if (tok == TOK_DEFAULT) {
5703 next();
5704 skip(':');
5705 if (!cur_switch)
5706 expect("switch");
5707 if (cur_switch->def_sym)
5708 tcc_error("too many 'default'");
5709 cur_switch->def_sym = ind;
5710 is_expr = 0;
5711 goto block_after_label;
5712 } else
5713 if (tok == TOK_GOTO) {
5714 next();
5715 if (tok == '*' && gnu_ext) {
5716 /* computed goto */
5717 next();
5718 gexpr();
5719 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5720 expect("pointer");
5721 if (!nocode_wanted)
5722 ggoto();
5723 else
5724 vtop--;
5725 } else if (tok >= TOK_UIDENT) {
5726 s = label_find(tok);
5727 /* put forward definition if needed */
5728 if (!s) {
5729 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5730 } else {
5731 if (s->r == LABEL_DECLARED)
5732 s->r = LABEL_FORWARD;
5734 vla_sp_restore_root();
5735 if (nocode_wanted)
5737 else if (s->r & LABEL_FORWARD)
5738 s->jnext = gjmp(s->jnext);
5739 else
5740 gjmp_addr(s->jnext);
5741 next();
5742 } else {
5743 expect("label identifier");
5745 skip(';');
5746 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5747 asm_instr();
5748 } else {
5749 b = is_label();
5750 if (b) {
5751 /* label case */
5752 s = label_find(b);
5753 if (s) {
5754 if (s->r == LABEL_DEFINED)
5755 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5756 gsym(s->jnext);
5757 s->r = LABEL_DEFINED;
5758 } else {
5759 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5761 s->jnext = ind;
5762 vla_sp_restore();
5763 /* we accept this, but it is a mistake */
5764 block_after_label:
5765 nocode_wanted &= ~2;
5766 if (tok == '}') {
5767 tcc_warning("deprecated use of label at end of compound statement");
5768 } else {
5769 if (is_expr)
5770 vpop();
5771 block(bsym, csym, is_expr);
5773 } else {
5774 /* expression case */
5775 if (tok != ';') {
5776 if (is_expr) {
5777 vpop();
5778 gexpr();
5779 } else {
5780 gexpr();
5781 vpop();
5784 skip(';');
5789 #define EXPR_CONST 1
5790 #define EXPR_ANY 2
5792 static void parse_init_elem(int expr_type)
5794 int saved_global_expr;
5795 switch(expr_type) {
5796 case EXPR_CONST:
5797 /* compound literals must be allocated globally in this case */
5798 saved_global_expr = global_expr;
5799 global_expr = 1;
5800 expr_const1();
5801 global_expr = saved_global_expr;
5802 /* NOTE: symbols are accepted */
5803 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5804 tcc_error("initializer element is not constant");
5805 break;
5806 case EXPR_ANY:
5807 expr_eq();
5808 break;
5812 /* t is the array or struct type. c is the array or struct
5813 address. cur_field is the pointer to the current
5814 value, for arrays the 'c' member contains the current start
5815 index and the 'r' contains the end index (in case of range init).
5816 'size_only' is true if only size info is needed (only used
5817 in arrays) */
5818 static void decl_designator(CType *type, Section *sec, unsigned long c,
5819 Sym **cur_field, int size_only)
5821 Sym *s, *f;
5822 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5823 CType type1;
5825 notfirst = 0;
5826 elem_size = 0;
5827 nb_elems = 1;
5828 if (gnu_ext && (l = is_label()) != 0)
5829 goto struct_field;
5830 while (tok == '[' || tok == '.') {
5831 if (tok == '[') {
5832 if (!(type->t & VT_ARRAY))
5833 expect("array type");
5834 s = type->ref;
5835 next();
5836 index = expr_const();
5837 if (index < 0 || (s->c >= 0 && index >= s->c))
5838 tcc_error("invalid index");
5839 if (tok == TOK_DOTS && gnu_ext) {
5840 next();
5841 index_last = expr_const();
5842 if (index_last < 0 ||
5843 (s->c >= 0 && index_last >= s->c) ||
5844 index_last < index)
5845 tcc_error("invalid index");
5846 } else {
5847 index_last = index;
5849 skip(']');
5850 if (!notfirst) {
5851 (*cur_field)->c = index;
5852 (*cur_field)->r = index_last;
5854 type = pointed_type(type);
5855 elem_size = type_size(type, &align);
5856 c += index * elem_size;
5857 /* NOTE: we only support ranges for last designator */
5858 nb_elems = index_last - index + 1;
5859 if (nb_elems != 1) {
5860 notfirst = 1;
5861 break;
5863 } else {
5864 next();
5865 l = tok;
5866 next();
5867 struct_field:
5868 if ((type->t & VT_BTYPE) != VT_STRUCT)
5869 expect("struct/union type");
5870 f = find_field(type, l);
5871 if (!f)
5872 expect("field");
5873 if (!notfirst)
5874 *cur_field = f;
5875 /* XXX: fix this mess by using explicit storage field */
5876 type1 = f->type;
5877 type1.t |= (type->t & ~VT_TYPE);
5878 type = &type1;
5879 c += f->c;
5881 notfirst = 1;
5883 if (notfirst) {
5884 if (tok == '=') {
5885 next();
5886 } else {
5887 if (!gnu_ext)
5888 expect("=");
5890 } else {
5891 if (type->t & VT_ARRAY) {
5892 index = (*cur_field)->c;
5893 if (type->ref->c >= 0 && index >= type->ref->c)
5894 tcc_error("index too large");
5895 type = pointed_type(type);
5896 c += index * type_size(type, &align);
5897 } else {
5898 f = *cur_field;
5899 if (!f)
5900 tcc_error("too many field init");
5901 /* XXX: fix this mess by using explicit storage field */
5902 type1 = f->type;
5903 type1.t |= (type->t & ~VT_TYPE);
5904 type = &type1;
5905 c += f->c;
5908 decl_initializer(type, sec, c, 0, size_only);
5910 /* XXX: make it more general */
5911 if (!size_only && nb_elems > 1) {
5912 unsigned long c_end;
5913 uint8_t *src, *dst;
5914 int i;
5916 if (!sec) {
5917 vset(type, VT_LOCAL|VT_LVAL, c);
5918 for (i = 1; i < nb_elems; i++) {
5919 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
5920 vswap();
5921 vstore();
5923 vpop();
5924 } else {
5925 c_end = c + nb_elems * elem_size;
5926 if (c_end > sec->data_allocated)
5927 section_realloc(sec, c_end);
5928 src = sec->data + c;
5929 dst = src;
5930 for(i = 1; i < nb_elems; i++) {
5931 dst += elem_size;
5932 memcpy(dst, src, elem_size);
5938 /* store a value or an expression directly in global data or in local array */
5939 static void init_putv(CType *type, Section *sec, unsigned long c)
5941 int bt, bit_pos, bit_size;
5942 void *ptr;
5943 unsigned long long bit_mask;
5944 CType dtype;
5946 dtype = *type;
5947 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5949 if (sec) {
5950 int size, align;
5951 /* XXX: not portable */
5952 /* XXX: generate error if incorrect relocation */
5953 gen_assign_cast(&dtype);
5954 bt = type->t & VT_BTYPE;
5955 size = type_size(type, &align);
5956 if (c + size > sec->data_allocated) {
5957 section_realloc(sec, c + size);
5959 ptr = sec->data + c;
5960 /* XXX: make code faster ? */
5961 if (!(type->t & VT_BITFIELD)) {
5962 bit_pos = 0;
5963 bit_size = PTR_SIZE * 8;
5964 bit_mask = -1LL;
5965 } else {
5966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
5967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
5968 bit_mask = (1LL << bit_size) - 1;
5970 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
5971 vtop->sym->v >= SYM_FIRST_ANOM &&
5972 /* XXX This rejects compount literals like
5973 '(void *){ptr}'. The problem is that '&sym' is
5974 represented the same way, which would be ruled out
5975 by the SYM_FIRST_ANOM check above, but also '"string"'
5976 in 'char *p = "string"' is represented the same
5977 with the type being VT_PTR and the symbol being an
5978 anonymous one. That is, there's no difference in vtop
5979 between '(void *){x}' and '&(void *){x}'. Ignore
5980 pointer typed entities here. Hopefully no real code
5981 will every use compound literals with scalar type. */
5982 (vtop->type.t & VT_BTYPE) != VT_PTR) {
5983 /* These come from compound literals, memcpy stuff over. */
5984 Section *ssec;
5985 ElfW(Sym) *esym;
5986 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
5987 ssec = tcc_state->sections[esym->st_shndx];
5988 memmove (ptr, ssec->data + esym->st_value, size);
5989 } else {
5990 if ((vtop->r & VT_SYM) &&
5991 (bt == VT_BYTE ||
5992 bt == VT_SHORT ||
5993 bt == VT_DOUBLE ||
5994 bt == VT_LDOUBLE ||
5995 #if PTR_SIZE == 8
5996 (bt == VT_LLONG && bit_size != 64) ||
5997 bt == VT_INT
5998 #else
5999 bt == VT_LLONG ||
6000 (bt == VT_INT && bit_size != 32)
6001 #endif
6003 tcc_error("initializer element is not computable at load time");
6004 switch(bt) {
6005 /* XXX: when cross-compiling we assume that each type has the
6006 same representation on host and target, which is likely to
6007 be wrong in the case of long double */
6008 case VT_BOOL:
6009 vtop->c.i = (vtop->c.i != 0);
6010 case VT_BYTE:
6011 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6012 break;
6013 case VT_SHORT:
6014 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6015 break;
6016 case VT_DOUBLE:
6017 *(double *)ptr = vtop->c.d;
6018 break;
6019 case VT_LDOUBLE:
6020 if (sizeof(long double) == LDOUBLE_SIZE)
6021 *(long double *)ptr = vtop->c.ld;
6022 else if (sizeof(double) == LDOUBLE_SIZE)
6023 *(double *)ptr = vtop->c.ld;
6024 else
6025 tcc_error("can't cross compile long double constants");
6026 break;
6027 #if PTR_SIZE != 8
6028 case VT_LLONG:
6029 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6030 break;
6031 #else
6032 case VT_LLONG:
6033 #endif
6034 case VT_PTR:
6036 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6037 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6038 if (vtop->r & VT_SYM)
6039 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6040 else
6041 *(addr_t *)ptr |= val;
6042 #else
6043 if (vtop->r & VT_SYM)
6044 greloc(sec, vtop->sym, c, R_DATA_PTR);
6045 *(addr_t *)ptr |= val;
6046 #endif
6047 break;
6049 default:
6051 int val = (vtop->c.i & bit_mask) << bit_pos;
6052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6053 if (vtop->r & VT_SYM)
6054 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6055 else
6056 *(int *)ptr |= val;
6057 #else
6058 if (vtop->r & VT_SYM)
6059 greloc(sec, vtop->sym, c, R_DATA_PTR);
6060 *(int *)ptr |= val;
6061 #endif
6062 break;
6066 vtop--;
6067 } else {
6068 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6069 vswap();
6070 vstore();
6071 vpop();
6075 /* put zeros for variable based init */
6076 static void init_putz(Section *sec, unsigned long c, int size)
6078 if (sec) {
6079 /* nothing to do because globals are already set to zero */
6080 } else {
6081 vpush_global_sym(&func_old_type, TOK_memset);
6082 vseti(VT_LOCAL, c);
6083 #ifdef TCC_TARGET_ARM
6084 vpushs(size);
6085 vpushi(0);
6086 #else
6087 vpushi(0);
6088 vpushs(size);
6089 #endif
6090 gfunc_call(3);
6094 /* 't' contains the type and storage info. 'c' is the offset of the
6095 object in section 'sec'. If 'sec' is NULL, it means stack based
6096 allocation. 'first' is true if array '{' must be read (multi
6097 dimension implicit array init handling). 'size_only' is true if
6098 size only evaluation is wanted (only for arrays). */
6099 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6100 int first, int size_only)
6102 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6103 int size1, align1;
6104 int have_elem;
6105 Sym *s, *f;
6106 Sym indexsym;
6107 CType *t1;
6109 /* If we currently are at an '}' or ',' we have read an initializer
6110 element in one of our callers, and not yet consumed it. */
6111 have_elem = tok == '}' || tok == ',';
6112 if (!have_elem && tok != '{' &&
6113 /* In case of strings we have special handling for arrays, so
6114 don't consume them as initializer value (which would commit them
6115 to some anonymous symbol). */
6116 tok != TOK_LSTR && tok != TOK_STR &&
6117 !size_only) {
6118 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6119 have_elem = 1;
6122 if (have_elem &&
6123 !(type->t & VT_ARRAY) &&
6124 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6125 The source type might have VT_CONSTANT set, which is
6126 of course assignable to non-const elements. */
6127 is_compatible_parameter_types(type, &vtop->type)) {
6128 init_putv(type, sec, c);
6129 } else if (type->t & VT_ARRAY) {
6130 s = type->ref;
6131 n = s->c;
6132 array_length = 0;
6133 t1 = pointed_type(type);
6134 size1 = type_size(t1, &align1);
6136 no_oblock = 1;
6137 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6138 tok == '{') {
6139 if (tok != '{')
6140 tcc_error("character array initializer must be a literal,"
6141 " optionally enclosed in braces");
6142 skip('{');
6143 no_oblock = 0;
6146 /* only parse strings here if correct type (otherwise: handle
6147 them as ((w)char *) expressions */
6148 if ((tok == TOK_LSTR &&
6149 #ifdef TCC_TARGET_PE
6150 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6151 #else
6152 (t1->t & VT_BTYPE) == VT_INT
6153 #endif
6154 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6155 while (tok == TOK_STR || tok == TOK_LSTR) {
6156 int cstr_len, ch;
6158 /* compute maximum number of chars wanted */
6159 if (tok == TOK_STR)
6160 cstr_len = tokc.str.size;
6161 else
6162 cstr_len = tokc.str.size / sizeof(nwchar_t);
6163 cstr_len--;
6164 nb = cstr_len;
6165 if (n >= 0 && nb > (n - array_length))
6166 nb = n - array_length;
6167 if (!size_only) {
6168 if (cstr_len > nb)
6169 tcc_warning("initializer-string for array is too long");
6170 /* in order to go faster for common case (char
6171 string in global variable, we handle it
6172 specifically */
6173 if (sec && tok == TOK_STR && size1 == 1) {
6174 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6175 } else {
6176 for(i=0;i<nb;i++) {
6177 if (tok == TOK_STR)
6178 ch = ((unsigned char *)tokc.str.data)[i];
6179 else
6180 ch = ((nwchar_t *)tokc.str.data)[i];
6181 vpushi(ch);
6182 init_putv(t1, sec, c + (array_length + i) * size1);
6186 array_length += nb;
6187 next();
6189 /* only add trailing zero if enough storage (no
6190 warning in this case since it is standard) */
6191 if (n < 0 || array_length < n) {
6192 if (!size_only) {
6193 vpushi(0);
6194 init_putv(t1, sec, c + (array_length * size1));
6196 array_length++;
6198 } else {
6199 indexsym.c = 0;
6200 indexsym.r = 0;
6201 f = &indexsym;
6203 do_init_list:
6204 while (tok != '}' || have_elem) {
6205 decl_designator(type, sec, c, &f, size_only);
6206 have_elem = 0;
6207 index = f->c;
6208 /* must put zero in holes (note that doing it that way
6209 ensures that it even works with designators) */
6210 if (!size_only && array_length < index) {
6211 init_putz(sec, c + array_length * size1,
6212 (index - array_length) * size1);
6214 if (type->t & VT_ARRAY) {
6215 index = indexsym.c = ++indexsym.r;
6216 } else {
6217 index = index + type_size(&f->type, &align1);
6218 if (s->type.t == TOK_UNION)
6219 f = NULL;
6220 else
6221 f = f->next;
6223 if (index > array_length)
6224 array_length = index;
6226 if (type->t & VT_ARRAY) {
6227 /* special test for multi dimensional arrays (may not
6228 be strictly correct if designators are used at the
6229 same time) */
6230 if (no_oblock && index >= n)
6231 break;
6232 } else {
6233 if (no_oblock && f == NULL)
6234 break;
6236 if (tok == '}')
6237 break;
6238 skip(',');
6241 /* put zeros at the end */
6242 if (!size_only && array_length < n) {
6243 init_putz(sec, c + array_length * size1,
6244 (n - array_length) * size1);
6246 if (!no_oblock)
6247 skip('}');
6248 /* patch type size if needed, which happens only for array types */
6249 if (n < 0)
6250 s->c = array_length;
6251 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6252 size1 = 1;
6253 no_oblock = 1;
6254 if (first || tok == '{') {
6255 skip('{');
6256 no_oblock = 0;
6258 s = type->ref;
6259 f = s->next;
6260 array_length = 0;
6261 n = s->c;
6262 goto do_init_list;
6263 } else if (tok == '{') {
6264 next();
6265 decl_initializer(type, sec, c, first, size_only);
6266 skip('}');
6267 } else if (size_only) {
6268 /* If we supported only ISO C we wouldn't have to accept calling
6269 this on anything than an array size_only==1 (and even then
6270 only on the outermost level, so no recursion would be needed),
6271 because initializing a flex array member isn't supported.
6272 But GNU C supports it, so we need to recurse even into
6273 subfields of structs and arrays when size_only is set. */
6274 /* just skip expression */
6275 parlevel = parlevel1 = 0;
6276 while ((parlevel > 0 || parlevel1 > 0 ||
6277 (tok != '}' && tok != ',')) && tok != -1) {
6278 if (tok == '(')
6279 parlevel++;
6280 else if (tok == ')') {
6281 if (parlevel == 0 && parlevel1 == 0)
6282 break;
6283 parlevel--;
6285 else if (tok == '{')
6286 parlevel1++;
6287 else if (tok == '}') {
6288 if (parlevel == 0 && parlevel1 == 0)
6289 break;
6290 parlevel1--;
6292 next();
6294 } else {
6295 if (!have_elem) {
6296 /* This should happen only when we haven't parsed
6297 the init element above for fear of committing a
6298 string constant to memory too early. */
6299 if (tok != TOK_STR && tok != TOK_LSTR)
6300 expect("string constant");
6301 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6303 init_putv(type, sec, c);
6307 /* parse an initializer for type 't' if 'has_init' is non zero, and
6308 allocate space in local or global data space ('r' is either
6309 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6310 variable 'v' of scope 'scope' is declared before initializers
6311 are parsed. If 'v' is zero, then a reference to the new object
6312 is put in the value stack. If 'has_init' is 2, a special parsing
6313 is done to handle string constants. */
6314 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6315 int has_init, int v, int scope)
6317 int size, align, addr, data_offset;
6318 int level;
6319 ParseState saved_parse_state = {0};
6320 TokenString *init_str = NULL;
6321 Section *sec;
6322 Sym *flexible_array;
6324 flexible_array = NULL;
6325 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6326 Sym *field = type->ref->next;
6327 if (field) {
6328 while (field->next)
6329 field = field->next;
6330 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6331 flexible_array = field;
6335 size = type_size(type, &align);
6336 /* If unknown size, we must evaluate it before
6337 evaluating initializers because
6338 initializers can generate global data too
6339 (e.g. string pointers or ISOC99 compound
6340 literals). It also simplifies local
6341 initializers handling */
6342 if (size < 0 || (flexible_array && has_init)) {
6343 if (!has_init)
6344 tcc_error("unknown type size");
6345 /* get all init string */
6346 init_str = tok_str_alloc();
6347 if (has_init == 2) {
6348 /* only get strings */
6349 while (tok == TOK_STR || tok == TOK_LSTR) {
6350 tok_str_add_tok(init_str);
6351 next();
6353 } else {
6354 level = 0;
6355 while (level > 0 || (tok != ',' && tok != ';')) {
6356 if (tok < 0)
6357 tcc_error("unexpected end of file in initializer");
6358 tok_str_add_tok(init_str);
6359 if (tok == '{')
6360 level++;
6361 else if (tok == '}') {
6362 level--;
6363 if (level <= 0) {
6364 next();
6365 break;
6368 next();
6371 tok_str_add(init_str, -1);
6372 tok_str_add(init_str, 0);
6374 /* compute size */
6375 save_parse_state(&saved_parse_state);
6377 begin_macro(init_str, 1);
6378 next();
6379 decl_initializer(type, NULL, 0, 1, 1);
6380 /* prepare second initializer parsing */
6381 macro_ptr = init_str->str;
6382 next();
6384 /* if still unknown size, error */
6385 size = type_size(type, &align);
6386 if (size < 0)
6387 tcc_error("unknown type size");
6389 /* If there's a flex member and it was used in the initializer
6390 adjust size. */
6391 if (flexible_array &&
6392 flexible_array->type.ref->c > 0)
6393 size += flexible_array->type.ref->c
6394 * pointed_size(&flexible_array->type);
6395 /* take into account specified alignment if bigger */
6396 if (ad->a.aligned) {
6397 if (ad->a.aligned > align)
6398 align = ad->a.aligned;
6399 } else if (ad->a.packed) {
6400 align = 1;
6402 if ((r & VT_VALMASK) == VT_LOCAL) {
6403 sec = NULL;
6404 #ifdef CONFIG_TCC_BCHECK
6405 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6406 loc--;
6408 #endif
6409 loc = (loc - size) & -align;
6410 addr = loc;
6411 #ifdef CONFIG_TCC_BCHECK
6412 /* handles bounds */
6413 /* XXX: currently, since we do only one pass, we cannot track
6414 '&' operators, so we add only arrays */
6415 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6416 addr_t *bounds_ptr;
6417 /* add padding between regions */
6418 loc--;
6419 /* then add local bound info */
6420 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6421 bounds_ptr[0] = addr;
6422 bounds_ptr[1] = size;
6424 #endif
6425 if (v) {
6426 /* local variable */
6427 sym_push(v, type, r, addr);
6428 } else {
6429 /* push local reference */
6430 vset(type, r, addr);
6432 } else {
6433 Sym *sym;
6435 sym = NULL;
6436 if (v && scope == VT_CONST) {
6437 /* see if the symbol was already defined */
6438 sym = sym_find(v);
6439 if (sym) {
6440 if (!is_compatible_types(&sym->type, type))
6441 tcc_error("incompatible types for redefinition of '%s'",
6442 get_tok_str(v, NULL));
6443 if (sym->type.t & VT_EXTERN) {
6444 /* if the variable is extern, it was not allocated */
6445 sym->type.t &= ~VT_EXTERN;
6446 /* set array size if it was omitted in extern
6447 declaration */
6448 if ((sym->type.t & VT_ARRAY) &&
6449 sym->type.ref->c < 0 &&
6450 type->ref->c >= 0)
6451 sym->type.ref->c = type->ref->c;
6452 } else {
6453 /* we accept several definitions of the same
6454 global variable. this is tricky, because we
6455 must play with the SHN_COMMON type of the symbol */
6456 /* XXX: should check if the variable was already
6457 initialized. It is incorrect to initialized it
6458 twice */
6459 /* no init data, we won't add more to the symbol */
6460 if (!has_init)
6461 goto no_alloc;
6466 /* allocate symbol in corresponding section */
6467 sec = ad->section;
6468 if (!sec) {
6469 if (has_init)
6470 sec = data_section;
6471 else if (tcc_state->nocommon)
6472 sec = bss_section;
6474 if (sec) {
6475 data_offset = sec->data_offset;
6476 data_offset = (data_offset + align - 1) & -align;
6477 addr = data_offset;
6478 /* very important to increment global pointer at this time
6479 because initializers themselves can create new initializers */
6480 data_offset += size;
6481 #ifdef CONFIG_TCC_BCHECK
6482 /* add padding if bound check */
6483 if (tcc_state->do_bounds_check)
6484 data_offset++;
6485 #endif
6486 sec->data_offset = data_offset;
6487 /* allocate section space to put the data */
6488 if (sec->sh_type != SHT_NOBITS &&
6489 data_offset > sec->data_allocated)
6490 section_realloc(sec, data_offset);
6491 /* align section if needed */
6492 if (align > sec->sh_addralign)
6493 sec->sh_addralign = align;
6494 } else {
6495 addr = 0; /* avoid warning */
6498 if (v) {
6499 if (scope != VT_CONST || !sym) {
6500 sym = sym_push(v, type, r | VT_SYM, 0);
6501 sym->asm_label = ad->asm_label;
6503 /* update symbol definition */
6504 if (sec) {
6505 put_extern_sym(sym, sec, addr, size);
6506 } else {
6507 ElfW(Sym) *esym;
6508 /* put a common area */
6509 put_extern_sym(sym, NULL, align, size);
6510 /* XXX: find a nicer way */
6511 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6512 esym->st_shndx = SHN_COMMON;
6514 } else {
6515 /* push global reference */
6516 sym = get_sym_ref(type, sec, addr, size);
6517 vpushsym(type, sym);
6519 /* patch symbol weakness */
6520 if (type->t & VT_WEAK)
6521 weaken_symbol(sym);
6522 apply_visibility(sym, type);
6523 #ifdef CONFIG_TCC_BCHECK
6524 /* handles bounds now because the symbol must be defined
6525 before for the relocation */
6526 if (tcc_state->do_bounds_check) {
6527 addr_t *bounds_ptr;
6529 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6530 /* then add global bound info */
6531 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6532 bounds_ptr[0] = 0; /* relocated */
6533 bounds_ptr[1] = size;
6535 #endif
6537 if (type->t & VT_VLA) {
6538 int a;
6540 /* save current stack pointer */
6541 if (vlas_in_scope == 0) {
6542 if (vla_sp_root_loc == -1)
6543 vla_sp_root_loc = (loc -= PTR_SIZE);
6544 gen_vla_sp_save(vla_sp_root_loc);
6547 vla_runtime_type_size(type, &a);
6548 gen_vla_alloc(type, a);
6549 gen_vla_sp_save(addr);
6550 vla_sp_loc = addr;
6551 vlas_in_scope++;
6552 } else if (has_init) {
6553 decl_initializer(type, sec, addr, 1, 0);
6554 /* patch flexible array member size back to -1, */
6555 /* for possible subsequent similar declarations */
6556 if (flexible_array)
6557 flexible_array->type.ref->c = -1;
6559 no_alloc: ;
6560 /* restore parse state if needed */
6561 if (init_str) {
6562 end_macro();
6563 restore_parse_state(&saved_parse_state);
6567 static void put_func_debug(Sym *sym)
6569 char buf[512];
6571 /* stabs info */
6572 /* XXX: we put here a dummy type */
6573 snprintf(buf, sizeof(buf), "%s:%c1",
6574 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6575 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6576 cur_text_section, sym->c);
6577 /* //gr gdb wants a line at the function */
6578 put_stabn(N_SLINE, 0, file->line_num, 0);
6579 last_ind = 0;
6580 last_line_num = 0;
6583 /* parse an old style function declaration list */
6584 /* XXX: check multiple parameter */
6585 static void func_decl_list(Sym *func_sym)
6587 AttributeDef ad;
6588 int v;
6589 Sym *s;
6590 CType btype, type;
6592 /* parse each declaration */
6593 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6594 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6595 if (!parse_btype(&btype, &ad))
6596 expect("declaration list");
6597 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6598 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6599 tok == ';') {
6600 /* we accept no variable after */
6601 } else {
6602 for(;;) {
6603 type = btype;
6604 type_decl(&type, &ad, &v, TYPE_DIRECT);
6605 /* find parameter in function parameter list */
6606 s = func_sym->next;
6607 while (s != NULL) {
6608 if ((s->v & ~SYM_FIELD) == v)
6609 goto found;
6610 s = s->next;
6612 tcc_error("declaration for parameter '%s' but no such parameter",
6613 get_tok_str(v, NULL));
6614 found:
6615 /* check that no storage specifier except 'register' was given */
6616 if (type.t & VT_STORAGE)
6617 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6618 convert_parameter_type(&type);
6619 /* we can add the type (NOTE: it could be local to the function) */
6620 s->type = type;
6621 /* accept other parameters */
6622 if (tok == ',')
6623 next();
6624 else
6625 break;
6628 skip(';');
6632 /* parse a function defined by symbol 'sym' and generate its code in
6633 'cur_text_section' */
6634 static void gen_function(Sym *sym)
6636 int saved_nocode_wanted = nocode_wanted;
6638 nocode_wanted = 0;
6639 ind = cur_text_section->data_offset;
6640 /* NOTE: we patch the symbol size later */
6641 put_extern_sym(sym, cur_text_section, ind, 0);
6642 funcname = get_tok_str(sym->v, NULL);
6643 func_ind = ind;
6644 /* Initialize VLA state */
6645 vla_sp_loc = -1;
6646 vla_sp_root_loc = -1;
6647 /* put debug symbol */
6648 if (tcc_state->do_debug)
6649 put_func_debug(sym);
6651 /* push a dummy symbol to enable local sym storage */
6652 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6653 local_scope = 1; /* for function parameters */
6654 gfunc_prolog(&sym->type);
6655 local_scope = 0;
6657 rsym = 0;
6658 block(NULL, NULL, 0);
6659 gsym(rsym);
6660 gfunc_epilog();
6661 cur_text_section->data_offset = ind;
6662 label_pop(&global_label_stack, NULL);
6663 /* reset local stack */
6664 local_scope = 0;
6665 sym_pop(&local_stack, NULL, 0);
6666 /* end of function */
6667 /* patch symbol size */
6668 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6669 ind - func_ind;
6670 /* patch symbol weakness (this definition overrules any prototype) */
6671 if (sym->type.t & VT_WEAK)
6672 weaken_symbol(sym);
6673 apply_visibility(sym, &sym->type);
6674 if (tcc_state->do_debug) {
6675 put_stabn(N_FUN, 0, 0, ind - func_ind);
6677 /* It's better to crash than to generate wrong code */
6678 cur_text_section = NULL;
6679 funcname = ""; /* for safety */
6680 func_vt.t = VT_VOID; /* for safety */
6681 func_var = 0; /* for safety */
6682 ind = 0; /* for safety */
6683 nocode_wanted = saved_nocode_wanted;
6684 check_vstack();
6687 static void gen_inline_functions(TCCState *s)
6689 Sym *sym;
6690 int inline_generated, i, ln;
6691 struct InlineFunc *fn;
6693 ln = file->line_num;
6694 /* iterate while inline function are referenced */
6695 for(;;) {
6696 inline_generated = 0;
6697 for (i = 0; i < s->nb_inline_fns; ++i) {
6698 fn = s->inline_fns[i];
6699 sym = fn->sym;
6700 if (sym && sym->c) {
6701 /* the function was used: generate its code and
6702 convert it to a normal function */
6703 fn->sym = NULL;
6704 if (file)
6705 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6706 sym->r = VT_SYM | VT_CONST;
6707 sym->type.t &= ~VT_INLINE;
6709 begin_macro(fn->func_str, 1);
6710 next();
6711 cur_text_section = text_section;
6712 gen_function(sym);
6713 end_macro();
6715 inline_generated = 1;
6718 if (!inline_generated)
6719 break;
6721 file->line_num = ln;
6724 ST_FUNC void free_inline_functions(TCCState *s)
6726 int i;
6727 /* free tokens of unused inline functions */
6728 for (i = 0; i < s->nb_inline_fns; ++i) {
6729 struct InlineFunc *fn = s->inline_fns[i];
6730 if (fn->sym)
6731 tok_str_free(fn->func_str);
6733 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6736 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6737 static int decl0(int l, int is_for_loop_init)
6739 int v, has_init, r;
6740 CType type, btype;
6741 Sym *sym;
6742 AttributeDef ad;
6744 while (1) {
6745 if (!parse_btype(&btype, &ad)) {
6746 if (is_for_loop_init)
6747 return 0;
6748 /* skip redundant ';' */
6749 /* XXX: find more elegant solution */
6750 if (tok == ';') {
6751 next();
6752 continue;
6754 if (l == VT_CONST &&
6755 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6756 /* global asm block */
6757 asm_global_instr();
6758 continue;
6760 /* special test for old K&R protos without explicit int
6761 type. Only accepted when defining global data */
6762 if (l == VT_LOCAL || tok < TOK_UIDENT)
6763 break;
6764 btype.t = VT_INT;
6766 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6767 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6768 tok == ';') {
6769 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6770 int v = btype.ref->v;
6771 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6772 tcc_warning("unnamed struct/union that defines no instances");
6774 next();
6775 continue;
6777 while (1) { /* iterate thru each declaration */
6778 type = btype;
6779 /* If the base type itself was an array type of unspecified
6780 size (like in 'typedef int arr[]; arr x = {1};') then
6781 we will overwrite the unknown size by the real one for
6782 this decl. We need to unshare the ref symbol holding
6783 that size. */
6784 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6785 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6787 type_decl(&type, &ad, &v, TYPE_DIRECT);
6788 #if 0
6790 char buf[500];
6791 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6792 printf("type = '%s'\n", buf);
6794 #endif
6795 if ((type.t & VT_BTYPE) == VT_FUNC) {
6796 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6797 tcc_error("function without file scope cannot be static");
6799 /* if old style function prototype, we accept a
6800 declaration list */
6801 sym = type.ref;
6802 if (sym->c == FUNC_OLD)
6803 func_decl_list(sym);
6806 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6807 ad.asm_label = asm_label_instr();
6808 /* parse one last attribute list, after asm label */
6809 parse_attribute(&ad);
6810 if (tok == '{')
6811 expect(";");
6814 if (ad.a.weak)
6815 type.t |= VT_WEAK;
6816 #ifdef TCC_TARGET_PE
6817 if (ad.a.func_import)
6818 type.t |= VT_IMPORT;
6819 if (ad.a.func_export)
6820 type.t |= VT_EXPORT;
6821 #endif
6822 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6824 if (tok == '{') {
6825 if (l == VT_LOCAL)
6826 tcc_error("cannot use local functions");
6827 if ((type.t & VT_BTYPE) != VT_FUNC)
6828 expect("function definition");
6830 /* reject abstract declarators in function definition */
6831 sym = type.ref;
6832 while ((sym = sym->next) != NULL)
6833 if (!(sym->v & ~SYM_FIELD))
6834 expect("identifier");
6836 /* XXX: cannot do better now: convert extern line to static inline */
6837 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
6838 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6840 sym = sym_find(v);
6841 if (sym) {
6842 Sym *ref;
6843 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
6844 goto func_error1;
6846 ref = sym->type.ref;
6847 if (0 == ref->a.func_proto)
6848 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6850 /* use func_call from prototype if not defined */
6851 if (ref->a.func_call != FUNC_CDECL
6852 && type.ref->a.func_call == FUNC_CDECL)
6853 type.ref->a.func_call = ref->a.func_call;
6855 /* use export from prototype */
6856 if (ref->a.func_export)
6857 type.ref->a.func_export = 1;
6859 /* use static from prototype */
6860 if (sym->type.t & VT_STATIC)
6861 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6863 /* If the definition has no visibility use the
6864 one from prototype. */
6865 if (! (type.t & VT_VIS_MASK))
6866 type.t |= sym->type.t & VT_VIS_MASK;
6868 if (!is_compatible_types(&sym->type, &type)) {
6869 func_error1:
6870 tcc_error("incompatible types for redefinition of '%s'",
6871 get_tok_str(v, NULL));
6873 type.ref->a.func_proto = 0;
6874 /* if symbol is already defined, then put complete type */
6875 sym->type = type;
6876 } else {
6877 /* put function symbol */
6878 sym = global_identifier_push(v, type.t, 0);
6879 sym->type.ref = type.ref;
6882 /* static inline functions are just recorded as a kind
6883 of macro. Their code will be emitted at the end of
6884 the compilation unit only if they are used */
6885 if ((type.t & (VT_INLINE | VT_STATIC)) ==
6886 (VT_INLINE | VT_STATIC)) {
6887 int block_level;
6888 struct InlineFunc *fn;
6889 const char *filename;
6891 filename = file ? file->filename : "";
6892 fn = tcc_malloc(sizeof *fn + strlen(filename));
6893 strcpy(fn->filename, filename);
6894 fn->sym = sym;
6895 fn->func_str = tok_str_alloc();
6897 block_level = 0;
6898 for(;;) {
6899 int t;
6900 if (tok == TOK_EOF)
6901 tcc_error("unexpected end of file");
6902 tok_str_add_tok(fn->func_str);
6903 t = tok;
6904 next();
6905 if (t == '{') {
6906 block_level++;
6907 } else if (t == '}') {
6908 block_level--;
6909 if (block_level == 0)
6910 break;
6913 tok_str_add(fn->func_str, -1);
6914 tok_str_add(fn->func_str, 0);
6915 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
6917 } else {
6918 /* compute text section */
6919 cur_text_section = ad.section;
6920 if (!cur_text_section)
6921 cur_text_section = text_section;
6922 sym->r = VT_SYM | VT_CONST;
6923 gen_function(sym);
6925 break;
6926 } else {
6927 if (btype.t & VT_TYPEDEF) {
6928 /* save typedefed type */
6929 /* XXX: test storage specifiers ? */
6930 sym = sym_find(v);
6931 if (sym && sym->scope == local_scope) {
6932 if (!is_compatible_types(&sym->type, &type)
6933 || !(sym->type.t & VT_TYPEDEF))
6934 tcc_error("incompatible redefinition of '%s'",
6935 get_tok_str(v, NULL));
6936 sym->type = type;
6937 } else {
6938 sym = sym_push(v, &type, 0, 0);
6940 sym->a = ad.a;
6941 sym->type.t |= VT_TYPEDEF;
6942 } else {
6943 r = 0;
6944 if ((type.t & VT_BTYPE) == VT_FUNC) {
6945 /* external function definition */
6946 /* specific case for func_call attribute */
6947 ad.a.func_proto = 1;
6948 type.ref->a = ad.a;
6949 } else if (!(type.t & VT_ARRAY)) {
6950 /* not lvalue if array */
6951 r |= lvalue_type(type.t);
6953 has_init = (tok == '=');
6954 if (has_init && (type.t & VT_VLA))
6955 tcc_error("variable length array cannot be initialized");
6956 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
6957 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
6958 !has_init && l == VT_CONST && type.ref->c < 0)) {
6959 /* external variable or function */
6960 /* NOTE: as GCC, uninitialized global static
6961 arrays of null size are considered as
6962 extern */
6963 sym = external_sym(v, &type, r);
6964 sym->asm_label = ad.asm_label;
6966 if (ad.alias_target) {
6967 Section tsec;
6968 ElfW(Sym) *esym;
6969 Sym *alias_target;
6971 alias_target = sym_find(ad.alias_target);
6972 if (!alias_target || !alias_target->c)
6973 tcc_error("unsupported forward __alias__ attribute");
6974 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
6975 tsec.sh_num = esym->st_shndx;
6976 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
6978 } else {
6979 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
6980 if (type.t & VT_STATIC)
6981 r |= VT_CONST;
6982 else
6983 r |= l;
6984 if (has_init)
6985 next();
6986 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
6989 if (tok != ',') {
6990 if (is_for_loop_init)
6991 return 1;
6992 skip(';');
6993 break;
6995 next();
6997 ad.a.aligned = 0;
7000 return 0;
7003 ST_FUNC void decl(int l)
7005 decl0(l, 0);
7008 /* ------------------------------------------------------------------------- */