Revert "partial revert of the commit 4ad186c5ef61"
[tinycc.git] / tccgen.c
blob70e6fb656665fbb63d750552d4fd2f04a93cc150
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC void tccgen_start(TCCState *s1)
145 cur_text_section = NULL;
146 funcname = "";
147 anon_sym = SYM_FIRST_ANOM;
148 section_sym = 0;
149 const_wanted = 0;
150 nocode_wanted = 1;
152 /* define some often used types */
153 int_type.t = VT_INT;
154 char_pointer_type.t = VT_BYTE;
155 mk_pointer(&char_pointer_type);
156 #if PTR_SIZE == 4
157 size_type.t = VT_INT;
158 #else
159 size_type.t = VT_LLONG;
160 #endif
161 func_old_type.t = VT_FUNC;
162 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
164 if (s1->do_debug) {
165 char buf[512];
167 /* file info: full path + filename */
168 section_sym = put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
170 text_section->sh_num, NULL);
171 getcwd(buf, sizeof(buf));
172 #ifdef _WIN32
173 normalize_slashes(buf);
174 #endif
175 pstrcat(buf, sizeof(buf), "/");
176 put_stabs_r(buf, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
178 put_stabs_r(file->filename, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
185 SHN_ABS, file->filename);
187 #ifdef TCC_TARGET_ARM
188 arm_init(s1);
189 #endif
192 ST_FUNC void tccgen_end(TCCState *s1)
194 gen_inline_functions(s1);
195 check_vstack();
196 /* end of translation unit info */
197 if (s1->do_debug) {
198 put_stabs_r(NULL, N_SO, 0, 0,
199 text_section->data_offset, text_section, section_sym);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
208 addr_t value, unsigned long size,
209 int can_add_underscore)
211 int sym_type, sym_bind, sh_num, info, other;
212 ElfW(Sym) *esym;
213 const char *name;
214 char buf1[256];
216 #ifdef CONFIG_TCC_BCHECK
217 char buf[32];
218 #endif
220 if (section == NULL)
221 sh_num = SHN_UNDEF;
222 else if (section == SECTION_ABS)
223 sh_num = SHN_ABS;
224 else
225 sh_num = section->sh_num;
227 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
228 sym_type = STT_FUNC;
229 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
230 sym_type = STT_NOTYPE;
231 } else {
232 sym_type = STT_OBJECT;
235 if (sym->type.t & VT_STATIC)
236 sym_bind = STB_LOCAL;
237 else {
238 if (sym->type.t & VT_WEAK)
239 sym_bind = STB_WEAK;
240 else
241 sym_bind = STB_GLOBAL;
244 if (!sym->c) {
245 name = get_tok_str(sym->v, NULL);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state->do_bounds_check) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
251 switch(sym->v) {
252 #ifdef TCC_TARGET_PE
253 /* XXX: we rely only on malloc hooks */
254 case TOK_malloc:
255 case TOK_free:
256 case TOK_realloc:
257 case TOK_memalign:
258 case TOK_calloc:
259 #endif
260 case TOK_memcpy:
261 case TOK_memmove:
262 case TOK_memset:
263 case TOK_strlen:
264 case TOK_strcpy:
265 case TOK_alloca:
266 strcpy(buf, "__bound_");
267 strcat(buf, name);
268 name = buf;
269 break;
272 #endif
273 other = 0;
275 #ifdef TCC_TARGET_PE
276 if (sym->type.t & VT_EXPORT)
277 other |= ST_PE_EXPORT;
278 if (sym_type == STT_FUNC && sym->type.ref) {
279 Sym *ref = sym->type.ref;
280 if (ref->a.func_export)
281 other |= ST_PE_EXPORT;
282 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
283 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
284 name = buf1;
285 other |= ST_PE_STDCALL;
286 can_add_underscore = 0;
288 } else {
289 if (find_elf_sym(tcc_state->dynsymtab_section, name))
290 other |= ST_PE_IMPORT;
291 if (sym->type.t & VT_IMPORT)
292 other |= ST_PE_IMPORT;
294 #else
295 if (! (sym->type.t & VT_STATIC))
296 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
297 #endif
298 if (tcc_state->leading_underscore && can_add_underscore) {
299 buf1[0] = '_';
300 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
301 name = buf1;
303 if (sym->asm_label) {
304 name = get_tok_str(sym->asm_label, NULL);
306 info = ELFW(ST_INFO)(sym_bind, sym_type);
307 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
308 } else {
309 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
310 esym->st_value = value;
311 esym->st_size = size;
312 esym->st_shndx = sh_num;
316 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
317 addr_t value, unsigned long size)
319 put_extern_sym2(sym, section, value, size, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
324 addr_t addend)
326 int c = 0;
328 if (nocode_wanted && s == cur_text_section)
329 return;
331 if (sym) {
332 if (0 == sym->c)
333 put_extern_sym(sym, NULL, 0, 0);
334 c = sym->c;
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section, s, offset, type, c, addend);
341 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
343 greloca(s, sym, offset, type, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym *__sym_malloc(void)
350 Sym *sym_pool, *sym, *last_sym;
351 int i;
353 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
354 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
356 last_sym = sym_free_first;
357 sym = sym_pool;
358 for(i = 0; i < SYM_POOL_NB; i++) {
359 sym->next = last_sym;
360 last_sym = sym;
361 sym++;
363 sym_free_first = last_sym;
364 return last_sym;
367 static inline Sym *sym_malloc(void)
369 Sym *sym;
370 #ifndef SYM_DEBUG
371 sym = sym_free_first;
372 if (!sym)
373 sym = __sym_malloc();
374 sym_free_first = sym->next;
375 return sym;
376 #else
377 sym = tcc_malloc(sizeof(Sym));
378 return sym;
379 #endif
382 ST_INLN void sym_free(Sym *sym)
384 #ifndef SYM_DEBUG
385 sym->next = sym_free_first;
386 sym_free_first = sym;
387 #else
388 tcc_free(sym);
389 #endif
392 /* push, without hashing */
393 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
395 Sym *s;
397 s = sym_malloc();
398 s->scope = 0;
399 s->v = v;
400 s->type.t = t;
401 s->type.ref = NULL;
402 #ifdef _WIN64
403 s->d = NULL;
404 #endif
405 s->c = c;
406 s->next = NULL;
407 /* add in stack */
408 s->prev = *ps;
409 *ps = s;
410 return s;
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym *sym_find2(Sym *s, int v)
417 while (s) {
418 if (s->v == v)
419 return s;
420 else if (s->v == -1)
421 return NULL;
422 s = s->prev;
424 return NULL;
427 /* structure lookup */
428 ST_INLN Sym *struct_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_struct;
436 /* find an identifier */
437 ST_INLN Sym *sym_find(int v)
439 v -= TOK_IDENT;
440 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
441 return NULL;
442 return table_ident[v]->sym_identifier;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
448 Sym *s, **ps;
449 TokenSym *ts;
451 if (local_stack)
452 ps = &local_stack;
453 else
454 ps = &global_stack;
455 s = sym_push2(ps, v, type->t, c);
456 s->type.ref = type->ref;
457 s->r = r;
458 /* don't record fields or anonymous symbols */
459 /* XXX: simplify */
460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
461 /* record symbol in token array */
462 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
463 if (v & SYM_STRUCT)
464 ps = &ts->sym_struct;
465 else
466 ps = &ts->sym_identifier;
467 s->prev_tok = *ps;
468 *ps = s;
469 s->scope = local_scope;
470 if (s->prev_tok && s->prev_tok->scope == s->scope)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v & ~SYM_STRUCT, NULL));
474 return s;
477 /* push a global identifier */
478 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
480 Sym *s, **ps;
481 s = sym_push2(&global_stack, v, t, c);
482 /* don't record anonymous symbol */
483 if (v < SYM_FIRST_ANOM) {
484 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
487 while (*ps != NULL)
488 ps = &(*ps)->prev_tok;
489 s->prev_tok = NULL;
490 *ps = s;
492 return s;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
499 Sym *s, *ss, **ps;
500 TokenSym *ts;
501 int v;
503 s = *ptop;
504 while(s != b) {
505 ss = s->prev;
506 v = s->v;
507 /* remove symbol in token array */
508 /* XXX: simplify */
509 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
510 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
511 if (v & SYM_STRUCT)
512 ps = &ts->sym_struct;
513 else
514 ps = &ts->sym_identifier;
515 *ps = s->prev_tok;
517 if (!keep)
518 sym_free(s);
519 s = ss;
521 if (!keep)
522 *ptop = b;
525 static void weaken_symbol(Sym *sym)
527 sym->type.t |= VT_WEAK;
528 if (sym->c > 0) {
529 int esym_type;
530 ElfW(Sym) *esym;
532 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
533 esym_type = ELFW(ST_TYPE)(esym->st_info);
534 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
538 static void apply_visibility(Sym *sym, CType *type)
540 int vis = sym->type.t & VT_VIS_MASK;
541 int vis2 = type->t & VT_VIS_MASK;
542 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
543 vis = vis2;
544 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
546 else
547 vis = (vis < vis2) ? vis : vis2;
548 sym->type.t &= ~VT_VIS_MASK;
549 sym->type.t |= vis;
551 if (sym->c > 0) {
552 ElfW(Sym) *esym;
554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
555 vis >>= VT_VIS_SHIFT;
556 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
560 /* ------------------------------------------------------------------------- */
562 ST_FUNC void swap(int *p, int *q)
564 int t;
565 t = *p;
566 *p = *q;
567 *q = t;
570 static void vsetc(CType *type, int r, CValue *vc)
572 int v;
574 if (vtop >= vstack + (VSTACK_SIZE - 1))
575 tcc_error("memory full (vstack)");
576 /* cannot let cpu flags if other instruction are generated. Also
577 avoid leaving VT_JMP anywhere except on the top of the stack
578 because it would complicate the code generator.
580 Don't do this when nocode_wanted. vtop might come from
581 !nocode_wanted regions (see 88_codeopt.c) and transforming
582 it to a register without actually generating code is wrong
583 as their value might still be used for real. All values
584 we push under nocode_wanted will eventually be popped
585 again, so that the VT_CMP/VT_JMP value will be in vtop
586 when code is unsuppressed again. */
587 if (vtop >= vstack && !nocode_wanted) {
588 v = vtop->r & VT_VALMASK;
589 if (v == VT_CMP || (v & ~1) == VT_JMP)
590 gv(RC_INT);
592 vtop++;
593 vtop->type = *type;
594 vtop->r = r;
595 vtop->r2 = VT_CONST;
596 vtop->c = *vc;
597 vtop->sym = NULL;
600 /* push constant of type "type" with useless value */
601 ST_FUNC void vpush(CType *type)
603 CValue cval;
604 vsetc(type, VT_CONST, &cval);
607 /* push integer constant */
608 ST_FUNC void vpushi(int v)
610 CValue cval;
611 cval.i = v;
612 vsetc(&int_type, VT_CONST, &cval);
615 /* push a pointer sized constant */
616 static void vpushs(addr_t v)
618 CValue cval;
619 cval.i = v;
620 vsetc(&size_type, VT_CONST, &cval);
623 /* push arbitrary 64bit constant */
624 ST_FUNC void vpush64(int ty, unsigned long long v)
626 CValue cval;
627 CType ctype;
628 ctype.t = ty;
629 ctype.ref = NULL;
630 cval.i = v;
631 vsetc(&ctype, VT_CONST, &cval);
634 /* push long long constant */
635 static inline void vpushll(long long v)
637 vpush64(VT_LLONG, v);
640 /* push a symbol value of TYPE */
641 static inline void vpushsym(CType *type, Sym *sym)
643 CValue cval;
644 cval.i = 0;
645 vsetc(type, VT_CONST | VT_SYM, &cval);
646 vtop->sym = sym;
649 /* Return a static symbol pointing to a section */
650 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
652 int v;
653 Sym *sym;
655 v = anon_sym++;
656 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
657 sym->type.ref = type->ref;
658 sym->r = VT_CONST | VT_SYM;
659 put_extern_sym(sym, sec, offset, size);
660 return sym;
663 /* push a reference to a section offset by adding a dummy symbol */
664 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
666 vpushsym(type, get_sym_ref(type, sec, offset, size));
669 /* define a new external reference to a symbol 'v' of type 'u' */
670 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
678 s->type.ref = type->ref;
679 s->r = r | VT_CONST | VT_SYM;
681 return s;
684 /* define a new external reference to a symbol 'v' */
685 static Sym *external_sym(int v, CType *type, int r)
687 Sym *s;
689 s = sym_find(v);
690 if (!s) {
691 /* push forward reference */
692 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
693 s->type.t |= VT_EXTERN;
694 } else if (s->type.ref == func_old_type.ref) {
695 s->type.ref = type->ref;
696 s->r = r | VT_CONST | VT_SYM;
697 s->type.t |= VT_EXTERN;
698 } else if (!is_compatible_types(&s->type, type)) {
699 tcc_error("incompatible types for redefinition of '%s'",
700 get_tok_str(v, NULL));
702 /* Merge some storage attributes. */
703 if (type->t & VT_WEAK)
704 weaken_symbol(s);
706 if (type->t & VT_VIS_MASK)
707 apply_visibility(s, type);
709 return s;
712 /* push a reference to global symbol v */
713 ST_FUNC void vpush_global_sym(CType *type, int v)
715 vpushsym(type, external_global_sym(v, type, 0));
718 ST_FUNC void vset(CType *type, int r, long v)
720 CValue cval;
722 cval.i = v;
723 vsetc(type, r, &cval);
726 static void vseti(int r, int v)
728 CType type;
729 type.t = VT_INT;
730 type.ref = 0;
731 vset(&type, r, v);
734 ST_FUNC void vswap(void)
736 SValue tmp;
737 /* cannot let cpu flags if other instruction are generated. Also
738 avoid leaving VT_JMP anywhere except on the top of the stack
739 because it would complicate the code generator. */
740 if (vtop >= vstack) {
741 int v = vtop->r & VT_VALMASK;
742 if (v == VT_CMP || (v & ~1) == VT_JMP)
743 gv(RC_INT);
745 tmp = vtop[0];
746 vtop[0] = vtop[-1];
747 vtop[-1] = tmp;
749 /* XXX: +2% overall speed possible with optimized memswap
751 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
755 ST_FUNC void vpushv(SValue *v)
757 if (vtop >= vstack + (VSTACK_SIZE - 1))
758 tcc_error("memory full (vstack)");
759 vtop++;
760 *vtop = *v;
763 static void vdup(void)
765 vpushv(vtop);
768 /* save registers up to (vtop - n) stack entry */
769 ST_FUNC void save_regs(int n)
771 SValue *p, *p1;
772 for(p = vstack, p1 = vtop - n; p <= p1; p++)
773 save_reg(p->r);
776 /* save r to the memory stack, and mark it as being free */
777 ST_FUNC void save_reg(int r)
779 save_reg_upstack(r, 0);
782 /* save r to the memory stack, and mark it as being free,
783 if seen up to (vtop - n) stack entry */
784 ST_FUNC void save_reg_upstack(int r, int n)
786 int l, saved, size, align;
787 SValue *p, *p1, sv;
788 CType *type;
790 if ((r &= VT_VALMASK) >= VT_CONST)
791 return;
792 if (nocode_wanted)
793 return;
795 /* modify all stack values */
796 saved = 0;
797 l = 0;
798 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
799 if ((p->r & VT_VALMASK) == r ||
800 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
801 /* must save value on stack if not already done */
802 if (!saved) {
803 /* NOTE: must reload 'r' because r might be equal to r2 */
804 r = p->r & VT_VALMASK;
805 /* store register in the stack */
806 type = &p->type;
807 if ((p->r & VT_LVAL) ||
808 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
809 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
810 type = &char_pointer_type;
811 #else
812 type = &int_type;
813 #endif
814 size = type_size(type, &align);
815 loc = (loc - size) & -align;
816 sv.type.t = type->t;
817 sv.r = VT_LOCAL | VT_LVAL;
818 sv.c.i = loc;
819 store(r, &sv);
820 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
821 /* x86 specific: need to pop fp register ST0 if saved */
822 if (r == TREG_ST0) {
823 o(0xd8dd); /* fstp %st(0) */
825 #endif
826 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
827 /* special long long case */
828 if ((type->t & VT_BTYPE) == VT_LLONG) {
829 sv.c.i += 4;
830 store(p->r2, &sv);
832 #endif
833 l = loc;
834 saved = 1;
836 /* mark that stack entry as being saved on the stack */
837 if (p->r & VT_LVAL) {
838 /* also clear the bounded flag because the
839 relocation address of the function was stored in
840 p->c.i */
841 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
842 } else {
843 p->r = lvalue_type(p->type.t) | VT_LOCAL;
845 p->r2 = VT_CONST;
846 p->c.i = l;
851 #ifdef TCC_TARGET_ARM
852 /* find a register of class 'rc2' with at most one reference on stack.
853 * If none, call get_reg(rc) */
854 ST_FUNC int get_reg_ex(int rc, int rc2)
856 int r;
857 SValue *p;
859 for(r=0;r<NB_REGS;r++) {
860 if (reg_classes[r] & rc2) {
861 int n;
862 n=0;
863 for(p = vstack; p <= vtop; p++) {
864 if ((p->r & VT_VALMASK) == r ||
865 (p->r2 & VT_VALMASK) == r)
866 n++;
868 if (n <= 1)
869 return r;
872 return get_reg(rc);
874 #endif
876 /* find a free register of class 'rc'. If none, save one register */
877 ST_FUNC int get_reg(int rc)
879 int r;
880 SValue *p;
882 /* find a free register */
883 for(r=0;r<NB_REGS;r++) {
884 if (reg_classes[r] & rc) {
885 if (nocode_wanted)
886 return r;
887 for(p=vstack;p<=vtop;p++) {
888 if ((p->r & VT_VALMASK) == r ||
889 (p->r2 & VT_VALMASK) == r)
890 goto notfound;
892 return r;
894 notfound: ;
897 /* no register left : free the first one on the stack (VERY
898 IMPORTANT to start from the bottom to ensure that we don't
899 spill registers used in gen_opi()) */
900 for(p=vstack;p<=vtop;p++) {
901 /* look at second register (if long long) */
902 r = p->r2 & VT_VALMASK;
903 if (r < VT_CONST && (reg_classes[r] & rc))
904 goto save_found;
905 r = p->r & VT_VALMASK;
906 if (r < VT_CONST && (reg_classes[r] & rc)) {
907 save_found:
908 save_reg(r);
909 return r;
912 /* Should never comes here */
913 return -1;
916 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
917 if needed */
918 static void move_reg(int r, int s, int t)
920 SValue sv;
922 if (r != s) {
923 save_reg(r);
924 sv.type.t = t;
925 sv.type.ref = NULL;
926 sv.r = s;
927 sv.c.i = 0;
928 load(r, &sv);
932 /* get address of vtop (vtop MUST BE an lvalue) */
933 ST_FUNC void gaddrof(void)
935 if (vtop->r & VT_REF)
936 gv(RC_INT);
937 vtop->r &= ~VT_LVAL;
938 /* tricky: if saved lvalue, then we can go back to lvalue */
939 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
940 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
945 #ifdef CONFIG_TCC_BCHECK
946 /* generate lvalue bound code */
947 static void gbound(void)
949 int lval_type;
950 CType type1;
952 vtop->r &= ~VT_MUSTBOUND;
953 /* if lvalue, then use checking code before dereferencing */
954 if (vtop->r & VT_LVAL) {
955 /* if not VT_BOUNDED value, then make one */
956 if (!(vtop->r & VT_BOUNDED)) {
957 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
958 /* must save type because we must set it to int to get pointer */
959 type1 = vtop->type;
960 vtop->type.t = VT_PTR;
961 gaddrof();
962 vpushi(0);
963 gen_bounded_ptr_add();
964 vtop->r |= lval_type;
965 vtop->type = type1;
967 /* then check for dereferencing */
968 gen_bounded_ptr_deref();
971 #endif
973 /* store vtop a register belonging to class 'rc'. lvalues are
974 converted to values. Cannot be used if cannot be converted to
975 register value (such as structures). */
976 ST_FUNC int gv(int rc)
978 int r, bit_pos, bit_size, size, align, i;
979 int rc2;
981 /* NOTE: get_reg can modify vstack[] */
982 if (vtop->type.t & VT_BITFIELD) {
983 CType type;
984 int bits = 32;
985 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
986 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
987 /* remove bit field info to avoid loops */
988 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
989 /* cast to int to propagate signedness in following ops */
990 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
991 type.t = VT_LLONG;
992 bits = 64;
993 } else
994 type.t = VT_INT;
995 if((vtop->type.t & VT_UNSIGNED) ||
996 (vtop->type.t & VT_BTYPE) == VT_BOOL)
997 type.t |= VT_UNSIGNED;
998 gen_cast(&type);
999 /* generate shifts */
1000 vpushi(bits - (bit_pos + bit_size));
1001 gen_op(TOK_SHL);
1002 vpushi(bits - bit_size);
1003 /* NOTE: transformed to SHR if unsigned */
1004 gen_op(TOK_SAR);
1005 r = gv(rc);
1006 } else {
1007 if (is_float(vtop->type.t) &&
1008 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1009 Sym *sym;
1010 int *ptr;
1011 unsigned long offset;
1012 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1013 CValue check;
1014 #endif
1016 /* XXX: unify with initializers handling ? */
1017 /* CPUs usually cannot use float constants, so we store them
1018 generically in data segment */
1019 size = type_size(&vtop->type, &align);
1020 offset = (data_section->data_offset + align - 1) & -align;
1021 data_section->data_offset = offset;
1022 /* XXX: not portable yet */
1023 #if defined(__i386__) || defined(__x86_64__)
1024 /* Zero pad x87 tenbyte long doubles */
1025 if (size == LDOUBLE_SIZE) {
1026 vtop->c.tab[2] &= 0xffff;
1027 #if LDOUBLE_SIZE == 16
1028 vtop->c.tab[3] = 0;
1029 #endif
1031 #endif
1032 ptr = section_ptr_add(data_section, size);
1033 size = size >> 2;
1034 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1035 check.d = 1;
1036 if(check.tab[0])
1037 for(i=0;i<size;i++)
1038 ptr[i] = vtop->c.tab[size-1-i];
1039 else
1040 #endif
1041 for(i=0;i<size;i++)
1042 ptr[i] = vtop->c.tab[i];
1043 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1044 vtop->r |= VT_LVAL | VT_SYM;
1045 vtop->sym = sym;
1046 vtop->c.i = 0;
1048 #ifdef CONFIG_TCC_BCHECK
1049 if (vtop->r & VT_MUSTBOUND)
1050 gbound();
1051 #endif
1053 r = vtop->r & VT_VALMASK;
1054 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1055 #ifndef TCC_TARGET_ARM64
1056 if (rc == RC_IRET)
1057 rc2 = RC_LRET;
1058 #ifdef TCC_TARGET_X86_64
1059 else if (rc == RC_FRET)
1060 rc2 = RC_QRET;
1061 #endif
1062 #endif
1064 /* need to reload if:
1065 - constant
1066 - lvalue (need to dereference pointer)
1067 - already a register, but not in the right class */
1068 if (r >= VT_CONST
1069 || (vtop->r & VT_LVAL)
1070 || !(reg_classes[r] & rc)
1071 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1072 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1073 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1074 #else
1075 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1076 #endif
1079 r = get_reg(rc);
1080 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1081 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1082 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1083 #else
1084 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1085 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1086 unsigned long long ll;
1087 #endif
1088 int r2, original_type;
1089 original_type = vtop->type.t;
1090 /* two register type load : expand to two words
1091 temporarily */
1092 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1093 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1094 /* load constant */
1095 ll = vtop->c.i;
1096 vtop->c.i = ll; /* first word */
1097 load(r, vtop);
1098 vtop->r = r; /* save register value */
1099 vpushi(ll >> 32); /* second word */
1100 } else
1101 #endif
1102 if (vtop->r & VT_LVAL) {
1103 /* We do not want to modifier the long long
1104 pointer here, so the safest (and less
1105 efficient) is to save all the other registers
1106 in the stack. XXX: totally inefficient. */
1107 #if 0
1108 save_regs(1);
1109 #else
1110 /* lvalue_save: save only if used further down the stack */
1111 save_reg_upstack(vtop->r, 1);
1112 #endif
1113 /* load from memory */
1114 vtop->type.t = load_type;
1115 load(r, vtop);
1116 vdup();
1117 vtop[-1].r = r; /* save register value */
1118 /* increment pointer to get second word */
1119 vtop->type.t = addr_type;
1120 gaddrof();
1121 vpushi(load_size);
1122 gen_op('+');
1123 vtop->r |= VT_LVAL;
1124 vtop->type.t = load_type;
1125 } else {
1126 /* move registers */
1127 load(r, vtop);
1128 vdup();
1129 vtop[-1].r = r; /* save register value */
1130 vtop->r = vtop[-1].r2;
1132 /* Allocate second register. Here we rely on the fact that
1133 get_reg() tries first to free r2 of an SValue. */
1134 r2 = get_reg(rc2);
1135 load(r2, vtop);
1136 vpop();
1137 /* write second register */
1138 vtop->r2 = r2;
1139 vtop->type.t = original_type;
1140 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1141 int t1, t;
1142 /* lvalue of scalar type : need to use lvalue type
1143 because of possible cast */
1144 t = vtop->type.t;
1145 t1 = t;
1146 /* compute memory access type */
1147 if (vtop->r & VT_REF)
1148 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1149 t = VT_PTR;
1150 #else
1151 t = VT_INT;
1152 #endif
1153 else if (vtop->r & VT_LVAL_BYTE)
1154 t = VT_BYTE;
1155 else if (vtop->r & VT_LVAL_SHORT)
1156 t = VT_SHORT;
1157 if (vtop->r & VT_LVAL_UNSIGNED)
1158 t |= VT_UNSIGNED;
1159 vtop->type.t = t;
1160 load(r, vtop);
1161 /* restore wanted type */
1162 vtop->type.t = t1;
1163 } else {
1164 /* one register type load */
1165 load(r, vtop);
1168 vtop->r = r;
1169 #ifdef TCC_TARGET_C67
1170 /* uses register pairs for doubles */
1171 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1172 vtop->r2 = r+1;
1173 #endif
1175 return r;
1178 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1179 ST_FUNC void gv2(int rc1, int rc2)
1181 int v;
1183 /* generate more generic register first. But VT_JMP or VT_CMP
1184 values must be generated first in all cases to avoid possible
1185 reload errors */
1186 v = vtop[0].r & VT_VALMASK;
1187 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1188 vswap();
1189 gv(rc1);
1190 vswap();
1191 gv(rc2);
1192 /* test if reload is needed for first register */
1193 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1194 vswap();
1195 gv(rc1);
1196 vswap();
1198 } else {
1199 gv(rc2);
1200 vswap();
1201 gv(rc1);
1202 vswap();
1203 /* test if reload is needed for first register */
1204 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1205 gv(rc2);
1210 #ifndef TCC_TARGET_ARM64
1211 /* wrapper around RC_FRET to return a register by type */
1212 static int rc_fret(int t)
1214 #ifdef TCC_TARGET_X86_64
1215 if (t == VT_LDOUBLE) {
1216 return RC_ST0;
1218 #endif
1219 return RC_FRET;
1221 #endif
1223 /* wrapper around REG_FRET to return a register by type */
1224 static int reg_fret(int t)
1226 #ifdef TCC_TARGET_X86_64
1227 if (t == VT_LDOUBLE) {
1228 return TREG_ST0;
1230 #endif
1231 return REG_FRET;
1234 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1235 /* expand 64bit on stack in two ints */
1236 static void lexpand(void)
1238 int u, v;
1239 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1240 v = vtop->r & (VT_VALMASK | VT_LVAL);
1241 if (v == VT_CONST) {
1242 vdup();
1243 vtop[0].c.i >>= 32;
1244 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1245 vdup();
1246 vtop[0].c.i += 4;
1247 } else {
1248 gv(RC_INT);
1249 vdup();
1250 vtop[0].r = vtop[-1].r2;
1251 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1253 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1255 #endif
1257 #ifdef TCC_TARGET_ARM
1258 /* expand long long on stack */
1259 ST_FUNC void lexpand_nr(void)
1261 int u,v;
1263 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1264 vdup();
1265 vtop->r2 = VT_CONST;
1266 vtop->type.t = VT_INT | u;
1267 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1268 if (v == VT_CONST) {
1269 vtop[-1].c.i = vtop->c.i;
1270 vtop->c.i = vtop->c.i >> 32;
1271 vtop->r = VT_CONST;
1272 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1273 vtop->c.i += 4;
1274 vtop->r = vtop[-1].r;
1275 } else if (v > VT_CONST) {
1276 vtop--;
1277 lexpand();
1278 } else
1279 vtop->r = vtop[-1].r2;
1280 vtop[-1].r2 = VT_CONST;
1281 vtop[-1].type.t = VT_INT | u;
1283 #endif
1285 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1286 /* build a long long from two ints */
1287 static void lbuild(int t)
1289 gv2(RC_INT, RC_INT);
1290 vtop[-1].r2 = vtop[0].r;
1291 vtop[-1].type.t = t;
1292 vpop();
1294 #endif
1296 /* rotate n first stack elements to the bottom
1297 I1 ... In -> I2 ... In I1 [top is right]
1299 ST_FUNC void vrotb(int n)
1301 int i;
1302 SValue tmp;
1304 tmp = vtop[-n + 1];
1305 for(i=-n+1;i!=0;i++)
1306 vtop[i] = vtop[i+1];
1307 vtop[0] = tmp;
1310 /* rotate the n elements before entry e towards the top
1311 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1313 ST_FUNC void vrote(SValue *e, int n)
1315 int i;
1316 SValue tmp;
1318 tmp = *e;
1319 for(i = 0;i < n - 1; i++)
1320 e[-i] = e[-i - 1];
1321 e[-n + 1] = tmp;
1324 /* rotate n first stack elements to the top
1325 I1 ... In -> In I1 ... I(n-1) [top is right]
1327 ST_FUNC void vrott(int n)
1329 vrote(vtop, n);
1332 /* pop stack value */
1333 ST_FUNC void vpop(void)
1335 int v;
1336 v = vtop->r & VT_VALMASK;
1337 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1338 /* for x86, we need to pop the FP stack */
1339 if (v == TREG_ST0) {
1340 o(0xd8dd); /* fstp %st(0) */
1341 } else
1342 #endif
1343 if (v == VT_JMP || v == VT_JMPI) {
1344 /* need to put correct jump if && or || without test */
1345 gsym(vtop->c.i);
1347 vtop--;
1350 /* convert stack entry to register and duplicate its value in another
1351 register */
1352 static void gv_dup(void)
1354 int rc, t, r, r1;
1355 SValue sv;
1357 t = vtop->type.t;
1358 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1359 if ((t & VT_BTYPE) == VT_LLONG) {
1360 lexpand();
1361 gv_dup();
1362 vswap();
1363 vrotb(3);
1364 gv_dup();
1365 vrotb(4);
1366 /* stack: H L L1 H1 */
1367 lbuild(t);
1368 vrotb(3);
1369 vrotb(3);
1370 vswap();
1371 lbuild(t);
1372 vswap();
1373 } else
1374 #endif
1376 /* duplicate value */
1377 rc = RC_INT;
1378 sv.type.t = VT_INT;
1379 if (is_float(t)) {
1380 rc = RC_FLOAT;
1381 #ifdef TCC_TARGET_X86_64
1382 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1383 rc = RC_ST0;
1385 #endif
1386 sv.type.t = t;
1388 r = gv(rc);
1389 r1 = get_reg(rc);
1390 sv.r = r;
1391 sv.c.i = 0;
1392 load(r1, &sv); /* move r to r1 */
1393 vdup();
1394 /* duplicates value */
1395 if (r != r1)
1396 vtop->r = r1;
1400 /* Generate value test
1402 * Generate a test for any value (jump, comparison and integers) */
1403 ST_FUNC int gvtst(int inv, int t)
1405 int v = vtop->r & VT_VALMASK;
1406 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1407 vpushi(0);
1408 gen_op(TOK_NE);
1410 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1411 /* constant jmp optimization */
1412 if ((vtop->c.i != 0) != inv)
1413 t = gjmp(t);
1414 vtop--;
1415 return t;
1417 return gtst(inv, t);
1420 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1421 /* generate CPU independent (unsigned) long long operations */
1422 static void gen_opl(int op)
1424 int t, a, b, op1, c, i;
1425 int func;
1426 unsigned short reg_iret = REG_IRET;
1427 unsigned short reg_lret = REG_LRET;
1428 SValue tmp;
1430 switch(op) {
1431 case '/':
1432 case TOK_PDIV:
1433 func = TOK___divdi3;
1434 goto gen_func;
1435 case TOK_UDIV:
1436 func = TOK___udivdi3;
1437 goto gen_func;
1438 case '%':
1439 func = TOK___moddi3;
1440 goto gen_mod_func;
1441 case TOK_UMOD:
1442 func = TOK___umoddi3;
1443 gen_mod_func:
1444 #ifdef TCC_ARM_EABI
1445 reg_iret = TREG_R2;
1446 reg_lret = TREG_R3;
1447 #endif
1448 gen_func:
1449 /* call generic long long function */
1450 vpush_global_sym(&func_old_type, func);
1451 vrott(3);
1452 gfunc_call(2);
1453 vpushi(0);
1454 vtop->r = reg_iret;
1455 vtop->r2 = reg_lret;
1456 break;
1457 case '^':
1458 case '&':
1459 case '|':
1460 case '*':
1461 case '+':
1462 case '-':
1463 //pv("gen_opl A",0,2);
1464 t = vtop->type.t;
1465 vswap();
1466 lexpand();
1467 vrotb(3);
1468 lexpand();
1469 /* stack: L1 H1 L2 H2 */
1470 tmp = vtop[0];
1471 vtop[0] = vtop[-3];
1472 vtop[-3] = tmp;
1473 tmp = vtop[-2];
1474 vtop[-2] = vtop[-3];
1475 vtop[-3] = tmp;
1476 vswap();
1477 /* stack: H1 H2 L1 L2 */
1478 //pv("gen_opl B",0,4);
1479 if (op == '*') {
1480 vpushv(vtop - 1);
1481 vpushv(vtop - 1);
1482 gen_op(TOK_UMULL);
1483 lexpand();
1484 /* stack: H1 H2 L1 L2 ML MH */
1485 for(i=0;i<4;i++)
1486 vrotb(6);
1487 /* stack: ML MH H1 H2 L1 L2 */
1488 tmp = vtop[0];
1489 vtop[0] = vtop[-2];
1490 vtop[-2] = tmp;
1491 /* stack: ML MH H1 L2 H2 L1 */
1492 gen_op('*');
1493 vrotb(3);
1494 vrotb(3);
1495 gen_op('*');
1496 /* stack: ML MH M1 M2 */
1497 gen_op('+');
1498 gen_op('+');
1499 } else if (op == '+' || op == '-') {
1500 /* XXX: add non carry method too (for MIPS or alpha) */
1501 if (op == '+')
1502 op1 = TOK_ADDC1;
1503 else
1504 op1 = TOK_SUBC1;
1505 gen_op(op1);
1506 /* stack: H1 H2 (L1 op L2) */
1507 vrotb(3);
1508 vrotb(3);
1509 gen_op(op1 + 1); /* TOK_xxxC2 */
1510 } else {
1511 gen_op(op);
1512 /* stack: H1 H2 (L1 op L2) */
1513 vrotb(3);
1514 vrotb(3);
1515 /* stack: (L1 op L2) H1 H2 */
1516 gen_op(op);
1517 /* stack: (L1 op L2) (H1 op H2) */
1519 /* stack: L H */
1520 lbuild(t);
1521 break;
1522 case TOK_SAR:
1523 case TOK_SHR:
1524 case TOK_SHL:
1525 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1526 t = vtop[-1].type.t;
1527 vswap();
1528 lexpand();
1529 vrotb(3);
1530 /* stack: L H shift */
1531 c = (int)vtop->c.i;
1532 /* constant: simpler */
1533 /* NOTE: all comments are for SHL. the other cases are
1534 done by swaping words */
1535 vpop();
1536 if (op != TOK_SHL)
1537 vswap();
1538 if (c >= 32) {
1539 /* stack: L H */
1540 vpop();
1541 if (c > 32) {
1542 vpushi(c - 32);
1543 gen_op(op);
1545 if (op != TOK_SAR) {
1546 vpushi(0);
1547 } else {
1548 gv_dup();
1549 vpushi(31);
1550 gen_op(TOK_SAR);
1552 vswap();
1553 } else {
1554 vswap();
1555 gv_dup();
1556 /* stack: H L L */
1557 vpushi(c);
1558 gen_op(op);
1559 vswap();
1560 vpushi(32 - c);
1561 if (op == TOK_SHL)
1562 gen_op(TOK_SHR);
1563 else
1564 gen_op(TOK_SHL);
1565 vrotb(3);
1566 /* stack: L L H */
1567 vpushi(c);
1568 if (op == TOK_SHL)
1569 gen_op(TOK_SHL);
1570 else
1571 gen_op(TOK_SHR);
1572 gen_op('|');
1574 if (op != TOK_SHL)
1575 vswap();
1576 lbuild(t);
1577 } else {
1578 /* XXX: should provide a faster fallback on x86 ? */
1579 switch(op) {
1580 case TOK_SAR:
1581 func = TOK___ashrdi3;
1582 goto gen_func;
1583 case TOK_SHR:
1584 func = TOK___lshrdi3;
1585 goto gen_func;
1586 case TOK_SHL:
1587 func = TOK___ashldi3;
1588 goto gen_func;
1591 break;
1592 default:
1593 /* compare operations */
1594 t = vtop->type.t;
1595 vswap();
1596 lexpand();
1597 vrotb(3);
1598 lexpand();
1599 /* stack: L1 H1 L2 H2 */
1600 tmp = vtop[-1];
1601 vtop[-1] = vtop[-2];
1602 vtop[-2] = tmp;
1603 /* stack: L1 L2 H1 H2 */
1604 /* compare high */
1605 op1 = op;
1606 /* when values are equal, we need to compare low words. since
1607 the jump is inverted, we invert the test too. */
1608 if (op1 == TOK_LT)
1609 op1 = TOK_LE;
1610 else if (op1 == TOK_GT)
1611 op1 = TOK_GE;
1612 else if (op1 == TOK_ULT)
1613 op1 = TOK_ULE;
1614 else if (op1 == TOK_UGT)
1615 op1 = TOK_UGE;
1616 a = 0;
1617 b = 0;
1618 gen_op(op1);
1619 if (op1 != TOK_NE) {
1620 a = gvtst(1, 0);
1622 if (op != TOK_EQ) {
1623 /* generate non equal test */
1624 /* XXX: NOT PORTABLE yet */
1625 if (a == 0) {
1626 b = gvtst(0, 0);
1627 } else {
1628 #if defined(TCC_TARGET_I386)
1629 b = gjmp2(0x850f, 0);
1630 #elif defined(TCC_TARGET_ARM)
1631 b = ind;
1632 o(0x1A000000 | encbranch(ind, 0, 1));
1633 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1634 tcc_error("not implemented");
1635 #else
1636 #error not supported
1637 #endif
1640 /* compare low. Always unsigned */
1641 op1 = op;
1642 if (op1 == TOK_LT)
1643 op1 = TOK_ULT;
1644 else if (op1 == TOK_LE)
1645 op1 = TOK_ULE;
1646 else if (op1 == TOK_GT)
1647 op1 = TOK_UGT;
1648 else if (op1 == TOK_GE)
1649 op1 = TOK_UGE;
1650 gen_op(op1);
1651 a = gvtst(1, a);
1652 gsym(b);
1653 vseti(VT_JMPI, a);
1654 break;
1657 #endif
1659 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1661 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1662 return (a ^ b) >> 63 ? -x : x;
1665 static int gen_opic_lt(uint64_t a, uint64_t b)
1667 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1670 /* handle integer constant optimizations and various machine
1671 independent opt */
1672 static void gen_opic(int op)
1674 SValue *v1 = vtop - 1;
1675 SValue *v2 = vtop;
1676 int t1 = v1->type.t & VT_BTYPE;
1677 int t2 = v2->type.t & VT_BTYPE;
1678 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1679 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1680 uint64_t l1 = c1 ? v1->c.i : 0;
1681 uint64_t l2 = c2 ? v2->c.i : 0;
1682 int shm = (t1 == VT_LLONG) ? 63 : 31;
1684 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1685 l1 = ((uint32_t)l1 |
1686 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1687 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1688 l2 = ((uint32_t)l2 |
1689 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1691 if (c1 && c2) {
1692 switch(op) {
1693 case '+': l1 += l2; break;
1694 case '-': l1 -= l2; break;
1695 case '&': l1 &= l2; break;
1696 case '^': l1 ^= l2; break;
1697 case '|': l1 |= l2; break;
1698 case '*': l1 *= l2; break;
1700 case TOK_PDIV:
1701 case '/':
1702 case '%':
1703 case TOK_UDIV:
1704 case TOK_UMOD:
1705 /* if division by zero, generate explicit division */
1706 if (l2 == 0) {
1707 if (const_wanted)
1708 tcc_error("division by zero in constant");
1709 goto general_case;
1711 switch(op) {
1712 default: l1 = gen_opic_sdiv(l1, l2); break;
1713 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1714 case TOK_UDIV: l1 = l1 / l2; break;
1715 case TOK_UMOD: l1 = l1 % l2; break;
1717 break;
1718 case TOK_SHL: l1 <<= (l2 & shm); break;
1719 case TOK_SHR: l1 >>= (l2 & shm); break;
1720 case TOK_SAR:
1721 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1722 break;
1723 /* tests */
1724 case TOK_ULT: l1 = l1 < l2; break;
1725 case TOK_UGE: l1 = l1 >= l2; break;
1726 case TOK_EQ: l1 = l1 == l2; break;
1727 case TOK_NE: l1 = l1 != l2; break;
1728 case TOK_ULE: l1 = l1 <= l2; break;
1729 case TOK_UGT: l1 = l1 > l2; break;
1730 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1731 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1732 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1733 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1734 /* logical */
1735 case TOK_LAND: l1 = l1 && l2; break;
1736 case TOK_LOR: l1 = l1 || l2; break;
1737 default:
1738 goto general_case;
1740 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1741 l1 = ((uint32_t)l1 |
1742 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1743 v1->c.i = l1;
1744 vtop--;
1745 } else {
1746 /* if commutative ops, put c2 as constant */
1747 if (c1 && (op == '+' || op == '&' || op == '^' ||
1748 op == '|' || op == '*')) {
1749 vswap();
1750 c2 = c1; //c = c1, c1 = c2, c2 = c;
1751 l2 = l1; //l = l1, l1 = l2, l2 = l;
1753 if (!const_wanted &&
1754 c1 && ((l1 == 0 &&
1755 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1756 (l1 == -1 && op == TOK_SAR))) {
1757 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1758 vtop--;
1759 } else if (!const_wanted &&
1760 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1761 (l2 == -1 && op == '|') ||
1762 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1763 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1764 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1765 if (l2 == 1)
1766 vtop->c.i = 0;
1767 vswap();
1768 vtop--;
1769 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1770 op == TOK_PDIV) &&
1771 l2 == 1) ||
1772 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1773 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1774 l2 == 0) ||
1775 (op == '&' &&
1776 l2 == -1))) {
1777 /* filter out NOP operations like x*1, x-0, x&-1... */
1778 vtop--;
1779 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1780 /* try to use shifts instead of muls or divs */
1781 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1782 int n = -1;
1783 while (l2) {
1784 l2 >>= 1;
1785 n++;
1787 vtop->c.i = n;
1788 if (op == '*')
1789 op = TOK_SHL;
1790 else if (op == TOK_PDIV)
1791 op = TOK_SAR;
1792 else
1793 op = TOK_SHR;
1795 goto general_case;
1796 } else if (c2 && (op == '+' || op == '-') &&
1797 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1798 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1799 /* symbol + constant case */
1800 if (op == '-')
1801 l2 = -l2;
1802 l2 += vtop[-1].c.i;
1803 /* The backends can't always deal with addends to symbols
1804 larger than +-1<<31. Don't construct such. */
1805 if ((int)l2 != l2)
1806 goto general_case;
1807 vtop--;
1808 vtop->c.i = l2;
1809 } else {
1810 general_case:
1811 /* call low level op generator */
1812 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1813 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1814 gen_opl(op);
1815 else
1816 gen_opi(op);
1821 /* generate a floating point operation with constant propagation */
1822 static void gen_opif(int op)
1824 int c1, c2;
1825 SValue *v1, *v2;
1826 long double f1, f2;
1828 v1 = vtop - 1;
1829 v2 = vtop;
1830 /* currently, we cannot do computations with forward symbols */
1831 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1832 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1833 if (c1 && c2) {
1834 if (v1->type.t == VT_FLOAT) {
1835 f1 = v1->c.f;
1836 f2 = v2->c.f;
1837 } else if (v1->type.t == VT_DOUBLE) {
1838 f1 = v1->c.d;
1839 f2 = v2->c.d;
1840 } else {
1841 f1 = v1->c.ld;
1842 f2 = v2->c.ld;
1845 /* NOTE: we only do constant propagation if finite number (not
1846 NaN or infinity) (ANSI spec) */
1847 if (!ieee_finite(f1) || !ieee_finite(f2))
1848 goto general_case;
1850 switch(op) {
1851 case '+': f1 += f2; break;
1852 case '-': f1 -= f2; break;
1853 case '*': f1 *= f2; break;
1854 case '/':
1855 if (f2 == 0.0) {
1856 if (const_wanted)
1857 tcc_error("division by zero in constant");
1858 goto general_case;
1860 f1 /= f2;
1861 break;
1862 /* XXX: also handles tests ? */
1863 default:
1864 goto general_case;
1866 /* XXX: overflow test ? */
1867 if (v1->type.t == VT_FLOAT) {
1868 v1->c.f = f1;
1869 } else if (v1->type.t == VT_DOUBLE) {
1870 v1->c.d = f1;
1871 } else {
1872 v1->c.ld = f1;
1874 vtop--;
1875 } else {
1876 general_case:
1877 gen_opf(op);
1881 static int pointed_size(CType *type)
1883 int align;
1884 return type_size(pointed_type(type), &align);
1887 static void vla_runtime_pointed_size(CType *type)
1889 int align;
1890 vla_runtime_type_size(pointed_type(type), &align);
1893 static inline int is_null_pointer(SValue *p)
1895 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1896 return 0;
1897 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1898 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1899 ((p->type.t & VT_BTYPE) == VT_PTR &&
1900 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1903 static inline int is_integer_btype(int bt)
1905 return (bt == VT_BYTE || bt == VT_SHORT ||
1906 bt == VT_INT || bt == VT_LLONG);
1909 /* check types for comparison or subtraction of pointers */
1910 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1912 CType *type1, *type2, tmp_type1, tmp_type2;
1913 int bt1, bt2;
1915 /* null pointers are accepted for all comparisons as gcc */
1916 if (is_null_pointer(p1) || is_null_pointer(p2))
1917 return;
1918 type1 = &p1->type;
1919 type2 = &p2->type;
1920 bt1 = type1->t & VT_BTYPE;
1921 bt2 = type2->t & VT_BTYPE;
1922 /* accept comparison between pointer and integer with a warning */
1923 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1924 if (op != TOK_LOR && op != TOK_LAND )
1925 tcc_warning("comparison between pointer and integer");
1926 return;
1929 /* both must be pointers or implicit function pointers */
1930 if (bt1 == VT_PTR) {
1931 type1 = pointed_type(type1);
1932 } else if (bt1 != VT_FUNC)
1933 goto invalid_operands;
1935 if (bt2 == VT_PTR) {
1936 type2 = pointed_type(type2);
1937 } else if (bt2 != VT_FUNC) {
1938 invalid_operands:
1939 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1941 if ((type1->t & VT_BTYPE) == VT_VOID ||
1942 (type2->t & VT_BTYPE) == VT_VOID)
1943 return;
1944 tmp_type1 = *type1;
1945 tmp_type2 = *type2;
1946 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1947 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1948 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1949 /* gcc-like error if '-' is used */
1950 if (op == '-')
1951 goto invalid_operands;
1952 else
1953 tcc_warning("comparison of distinct pointer types lacks a cast");
1957 /* generic gen_op: handles types problems */
1958 ST_FUNC void gen_op(int op)
1960 int u, t1, t2, bt1, bt2, t;
1961 CType type1;
1963 redo:
1964 t1 = vtop[-1].type.t;
1965 t2 = vtop[0].type.t;
1966 bt1 = t1 & VT_BTYPE;
1967 bt2 = t2 & VT_BTYPE;
1969 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1970 tcc_error("operation on a struct");
1971 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1972 if (bt2 == VT_FUNC) {
1973 mk_pointer(&vtop->type);
1974 gaddrof();
1976 if (bt1 == VT_FUNC) {
1977 vswap();
1978 mk_pointer(&vtop->type);
1979 gaddrof();
1980 vswap();
1982 goto redo;
1983 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1984 /* at least one operand is a pointer */
1985 /* relationnal op: must be both pointers */
1986 if (op >= TOK_ULT && op <= TOK_LOR) {
1987 check_comparison_pointer_types(vtop - 1, vtop, op);
1988 /* pointers are handled are unsigned */
1989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1990 t = VT_LLONG | VT_UNSIGNED;
1991 #else
1992 t = VT_INT | VT_UNSIGNED;
1993 #endif
1994 goto std_op;
1996 /* if both pointers, then it must be the '-' op */
1997 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1998 if (op != '-')
1999 tcc_error("cannot use pointers here");
2000 check_comparison_pointer_types(vtop - 1, vtop, op);
2001 /* XXX: check that types are compatible */
2002 if (vtop[-1].type.t & VT_VLA) {
2003 vla_runtime_pointed_size(&vtop[-1].type);
2004 } else {
2005 vpushi(pointed_size(&vtop[-1].type));
2007 vrott(3);
2008 gen_opic(op);
2009 /* set to integer type */
2010 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2011 vtop->type.t = VT_LLONG;
2012 #else
2013 vtop->type.t = VT_INT;
2014 #endif
2015 vswap();
2016 gen_op(TOK_PDIV);
2017 } else {
2018 /* exactly one pointer : must be '+' or '-'. */
2019 if (op != '-' && op != '+')
2020 tcc_error("cannot use pointers here");
2021 /* Put pointer as first operand */
2022 if (bt2 == VT_PTR) {
2023 vswap();
2024 swap(&t1, &t2);
2026 #if PTR_SIZE == 4
2027 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2028 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2029 gen_cast(&int_type);
2030 #endif
2031 type1 = vtop[-1].type;
2032 type1.t &= ~VT_ARRAY;
2033 if (vtop[-1].type.t & VT_VLA)
2034 vla_runtime_pointed_size(&vtop[-1].type);
2035 else {
2036 u = pointed_size(&vtop[-1].type);
2037 if (u < 0)
2038 tcc_error("unknown array element size");
2039 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2040 vpushll(u);
2041 #else
2042 /* XXX: cast to int ? (long long case) */
2043 vpushi(u);
2044 #endif
2046 gen_op('*');
2047 #if 0
2048 /* #ifdef CONFIG_TCC_BCHECK
2049 The main reason to removing this code:
2050 #include <stdio.h>
2051 int main ()
2053 int v[10];
2054 int i = 10;
2055 int j = 9;
2056 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2057 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2059 When this code is on. then the output looks like
2060 v+i-j = 0xfffffffe
2061 v+(i-j) = 0xbff84000
2063 /* if evaluating constant expression, no code should be
2064 generated, so no bound check */
2065 if (tcc_state->do_bounds_check && !const_wanted) {
2066 /* if bounded pointers, we generate a special code to
2067 test bounds */
2068 if (op == '-') {
2069 vpushi(0);
2070 vswap();
2071 gen_op('-');
2073 gen_bounded_ptr_add();
2074 } else
2075 #endif
2077 gen_opic(op);
2079 /* put again type if gen_opic() swaped operands */
2080 vtop->type = type1;
2082 } else if (is_float(bt1) || is_float(bt2)) {
2083 /* compute bigger type and do implicit casts */
2084 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2085 t = VT_LDOUBLE;
2086 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2087 t = VT_DOUBLE;
2088 } else {
2089 t = VT_FLOAT;
2091 /* floats can only be used for a few operations */
2092 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2093 (op < TOK_ULT || op > TOK_GT))
2094 tcc_error("invalid operands for binary operation");
2095 goto std_op;
2096 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2097 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2098 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2099 t |= VT_UNSIGNED;
2100 goto std_op;
2101 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2102 /* cast to biggest op */
2103 t = VT_LLONG;
2104 /* convert to unsigned if it does not fit in a long long */
2105 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2106 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2107 t |= VT_UNSIGNED;
2108 goto std_op;
2109 } else {
2110 /* integer operations */
2111 t = VT_INT;
2112 /* convert to unsigned if it does not fit in an integer */
2113 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2114 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2115 t |= VT_UNSIGNED;
2116 std_op:
2117 /* XXX: currently, some unsigned operations are explicit, so
2118 we modify them here */
2119 if (t & VT_UNSIGNED) {
2120 if (op == TOK_SAR)
2121 op = TOK_SHR;
2122 else if (op == '/')
2123 op = TOK_UDIV;
2124 else if (op == '%')
2125 op = TOK_UMOD;
2126 else if (op == TOK_LT)
2127 op = TOK_ULT;
2128 else if (op == TOK_GT)
2129 op = TOK_UGT;
2130 else if (op == TOK_LE)
2131 op = TOK_ULE;
2132 else if (op == TOK_GE)
2133 op = TOK_UGE;
2135 vswap();
2136 type1.t = t;
2137 gen_cast(&type1);
2138 vswap();
2139 /* special case for shifts and long long: we keep the shift as
2140 an integer */
2141 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2142 type1.t = VT_INT;
2143 gen_cast(&type1);
2144 if (is_float(t))
2145 gen_opif(op);
2146 else
2147 gen_opic(op);
2148 if (op >= TOK_ULT && op <= TOK_GT) {
2149 /* relationnal op: the result is an int */
2150 vtop->type.t = VT_INT;
2151 } else {
2152 vtop->type.t = t;
2155 // Make sure that we have converted to an rvalue:
2156 if (vtop->r & VT_LVAL)
2157 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2160 #ifndef TCC_TARGET_ARM
2161 /* generic itof for unsigned long long case */
2162 static void gen_cvt_itof1(int t)
2164 #ifdef TCC_TARGET_ARM64
2165 gen_cvt_itof(t);
2166 #else
2167 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2168 (VT_LLONG | VT_UNSIGNED)) {
2170 if (t == VT_FLOAT)
2171 vpush_global_sym(&func_old_type, TOK___floatundisf);
2172 #if LDOUBLE_SIZE != 8
2173 else if (t == VT_LDOUBLE)
2174 vpush_global_sym(&func_old_type, TOK___floatundixf);
2175 #endif
2176 else
2177 vpush_global_sym(&func_old_type, TOK___floatundidf);
2178 vrott(2);
2179 gfunc_call(1);
2180 vpushi(0);
2181 vtop->r = reg_fret(t);
2182 } else {
2183 gen_cvt_itof(t);
2185 #endif
2187 #endif
2189 /* generic ftoi for unsigned long long case */
2190 static void gen_cvt_ftoi1(int t)
2192 #ifdef TCC_TARGET_ARM64
2193 gen_cvt_ftoi(t);
2194 #else
2195 int st;
2197 if (t == (VT_LLONG | VT_UNSIGNED)) {
2198 /* not handled natively */
2199 st = vtop->type.t & VT_BTYPE;
2200 if (st == VT_FLOAT)
2201 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2202 #if LDOUBLE_SIZE != 8
2203 else if (st == VT_LDOUBLE)
2204 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2205 #endif
2206 else
2207 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2208 vrott(2);
2209 gfunc_call(1);
2210 vpushi(0);
2211 vtop->r = REG_IRET;
2212 vtop->r2 = REG_LRET;
2213 } else {
2214 gen_cvt_ftoi(t);
2216 #endif
2219 /* force char or short cast */
2220 static void force_charshort_cast(int t)
2222 int bits, dbt;
2223 dbt = t & VT_BTYPE;
2224 /* XXX: add optimization if lvalue : just change type and offset */
2225 if (dbt == VT_BYTE)
2226 bits = 8;
2227 else
2228 bits = 16;
2229 if (t & VT_UNSIGNED) {
2230 vpushi((1 << bits) - 1);
2231 gen_op('&');
2232 } else {
2233 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2234 bits = 64 - bits;
2235 else
2236 bits = 32 - bits;
2237 vpushi(bits);
2238 gen_op(TOK_SHL);
2239 /* result must be signed or the SAR is converted to an SHL
2240 This was not the case when "t" was a signed short
2241 and the last value on the stack was an unsigned int */
2242 vtop->type.t &= ~VT_UNSIGNED;
2243 vpushi(bits);
2244 gen_op(TOK_SAR);
2248 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2249 static void gen_cast(CType *type)
2251 int sbt, dbt, sf, df, c, p;
2253 /* special delayed cast for char/short */
2254 /* XXX: in some cases (multiple cascaded casts), it may still
2255 be incorrect */
2256 if (vtop->r & VT_MUSTCAST) {
2257 vtop->r &= ~VT_MUSTCAST;
2258 force_charshort_cast(vtop->type.t);
2261 /* bitfields first get cast to ints */
2262 if (vtop->type.t & VT_BITFIELD) {
2263 gv(RC_INT);
2266 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2267 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2269 if (sbt != dbt) {
2270 sf = is_float(sbt);
2271 df = is_float(dbt);
2272 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2273 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2274 if (c) {
2275 /* constant case: we can do it now */
2276 /* XXX: in ISOC, cannot do it if error in convert */
2277 if (sbt == VT_FLOAT)
2278 vtop->c.ld = vtop->c.f;
2279 else if (sbt == VT_DOUBLE)
2280 vtop->c.ld = vtop->c.d;
2282 if (df) {
2283 if ((sbt & VT_BTYPE) == VT_LLONG) {
2284 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2285 vtop->c.ld = vtop->c.i;
2286 else
2287 vtop->c.ld = -(long double)-vtop->c.i;
2288 } else if(!sf) {
2289 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2290 vtop->c.ld = (uint32_t)vtop->c.i;
2291 else
2292 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2295 if (dbt == VT_FLOAT)
2296 vtop->c.f = (float)vtop->c.ld;
2297 else if (dbt == VT_DOUBLE)
2298 vtop->c.d = (double)vtop->c.ld;
2299 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2300 vtop->c.i = vtop->c.ld;
2301 } else if (sf && dbt == VT_BOOL) {
2302 vtop->c.i = (vtop->c.ld != 0);
2303 } else {
2304 if(sf)
2305 vtop->c.i = vtop->c.ld;
2306 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2308 else if (sbt & VT_UNSIGNED)
2309 vtop->c.i = (uint32_t)vtop->c.i;
2310 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2311 else if (sbt == VT_PTR)
2313 #endif
2314 else if (sbt != VT_LLONG)
2315 vtop->c.i = ((uint32_t)vtop->c.i |
2316 -(vtop->c.i & 0x80000000));
2318 if (dbt == (VT_LLONG|VT_UNSIGNED))
2320 else if (dbt == VT_BOOL)
2321 vtop->c.i = (vtop->c.i != 0);
2322 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2323 else if (dbt == VT_PTR)
2325 #endif
2326 else if (dbt != VT_LLONG) {
2327 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2328 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2329 0xffffffff);
2330 vtop->c.i &= m;
2331 if (!(dbt & VT_UNSIGNED))
2332 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2335 } else if (p && dbt == VT_BOOL) {
2336 vtop->r = VT_CONST;
2337 vtop->c.i = 1;
2338 } else {
2339 /* non constant case: generate code */
2340 if (sf && df) {
2341 /* convert from fp to fp */
2342 gen_cvt_ftof(dbt);
2343 } else if (df) {
2344 /* convert int to fp */
2345 gen_cvt_itof1(dbt);
2346 } else if (sf) {
2347 /* convert fp to int */
2348 if (dbt == VT_BOOL) {
2349 vpushi(0);
2350 gen_op(TOK_NE);
2351 } else {
2352 /* we handle char/short/etc... with generic code */
2353 if (dbt != (VT_INT | VT_UNSIGNED) &&
2354 dbt != (VT_LLONG | VT_UNSIGNED) &&
2355 dbt != VT_LLONG)
2356 dbt = VT_INT;
2357 gen_cvt_ftoi1(dbt);
2358 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2359 /* additional cast for char/short... */
2360 vtop->type.t = dbt;
2361 gen_cast(type);
2364 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2365 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2366 if ((sbt & VT_BTYPE) != VT_LLONG) {
2367 /* scalar to long long */
2368 /* machine independent conversion */
2369 gv(RC_INT);
2370 /* generate high word */
2371 if (sbt == (VT_INT | VT_UNSIGNED)) {
2372 vpushi(0);
2373 gv(RC_INT);
2374 } else {
2375 if (sbt == VT_PTR) {
2376 /* cast from pointer to int before we apply
2377 shift operation, which pointers don't support*/
2378 gen_cast(&int_type);
2380 gv_dup();
2381 vpushi(31);
2382 gen_op(TOK_SAR);
2384 /* patch second register */
2385 vtop[-1].r2 = vtop->r;
2386 vpop();
2388 #else
2389 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2390 (dbt & VT_BTYPE) == VT_PTR ||
2391 (dbt & VT_BTYPE) == VT_FUNC) {
2392 if ((sbt & VT_BTYPE) != VT_LLONG &&
2393 (sbt & VT_BTYPE) != VT_PTR &&
2394 (sbt & VT_BTYPE) != VT_FUNC) {
2395 /* need to convert from 32bit to 64bit */
2396 gv(RC_INT);
2397 if (sbt != (VT_INT | VT_UNSIGNED)) {
2398 #if defined(TCC_TARGET_ARM64)
2399 gen_cvt_sxtw();
2400 #elif defined(TCC_TARGET_X86_64)
2401 int r = gv(RC_INT);
2402 /* x86_64 specific: movslq */
2403 o(0x6348);
2404 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2405 #else
2406 #error
2407 #endif
2410 #endif
2411 } else if (dbt == VT_BOOL) {
2412 /* scalar to bool */
2413 vpushi(0);
2414 gen_op(TOK_NE);
2415 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2416 (dbt & VT_BTYPE) == VT_SHORT) {
2417 if (sbt == VT_PTR) {
2418 vtop->type.t = VT_INT;
2419 tcc_warning("nonportable conversion from pointer to char/short");
2421 force_charshort_cast(dbt);
2422 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2423 } else if ((dbt & VT_BTYPE) == VT_INT) {
2424 /* scalar to int */
2425 if ((sbt & VT_BTYPE) == VT_LLONG) {
2426 /* from long long: just take low order word */
2427 lexpand();
2428 vpop();
2430 /* if lvalue and single word type, nothing to do because
2431 the lvalue already contains the real type size (see
2432 VT_LVAL_xxx constants) */
2433 #endif
2436 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2437 /* if we are casting between pointer types,
2438 we must update the VT_LVAL_xxx size */
2439 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2440 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2442 vtop->type = *type;
2445 /* return type size as known at compile time. Put alignment at 'a' */
2446 ST_FUNC int type_size(CType *type, int *a)
2448 Sym *s;
2449 int bt;
2451 bt = type->t & VT_BTYPE;
2452 if (bt == VT_STRUCT) {
2453 /* struct/union */
2454 s = type->ref;
2455 *a = s->r;
2456 return s->c;
2457 } else if (bt == VT_PTR) {
2458 if (type->t & VT_ARRAY) {
2459 int ts;
2461 s = type->ref;
2462 ts = type_size(&s->type, a);
2464 if (ts < 0 && s->c < 0)
2465 ts = -ts;
2467 return ts * s->c;
2468 } else {
2469 *a = PTR_SIZE;
2470 return PTR_SIZE;
2472 } else if (bt == VT_LDOUBLE) {
2473 *a = LDOUBLE_ALIGN;
2474 return LDOUBLE_SIZE;
2475 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2476 #ifdef TCC_TARGET_I386
2477 #ifdef TCC_TARGET_PE
2478 *a = 8;
2479 #else
2480 *a = 4;
2481 #endif
2482 #elif defined(TCC_TARGET_ARM)
2483 #ifdef TCC_ARM_EABI
2484 *a = 8;
2485 #else
2486 *a = 4;
2487 #endif
2488 #else
2489 *a = 8;
2490 #endif
2491 return 8;
2492 } else if (bt == VT_INT || bt == VT_FLOAT) {
2493 *a = 4;
2494 return 4;
2495 } else if (bt == VT_SHORT) {
2496 *a = 2;
2497 return 2;
2498 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2499 *a = 8;
2500 return 16;
2501 } else if (bt == VT_ENUM) {
2502 *a = 4;
2503 /* Enums might be incomplete, so don't just return '4' here. */
2504 return type->ref->c;
2505 } else {
2506 /* char, void, function, _Bool */
2507 *a = 1;
2508 return 1;
2512 /* push type size as known at runtime time on top of value stack. Put
2513 alignment at 'a' */
2514 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2516 if (type->t & VT_VLA) {
2517 type_size(&type->ref->type, a);
2518 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2519 } else {
2520 vpushi(type_size(type, a));
2524 static void vla_sp_restore(void) {
2525 if (vlas_in_scope) {
2526 gen_vla_sp_restore(vla_sp_loc);
2530 static void vla_sp_restore_root(void) {
2531 if (vlas_in_scope) {
2532 gen_vla_sp_restore(vla_sp_root_loc);
2536 /* return the pointed type of t */
2537 static inline CType *pointed_type(CType *type)
2539 return &type->ref->type;
2542 /* modify type so that its it is a pointer to type. */
2543 ST_FUNC void mk_pointer(CType *type)
2545 Sym *s;
2546 s = sym_push(SYM_FIELD, type, 0, -1);
2547 type->t = VT_PTR | (type->t & ~VT_TYPE);
2548 type->ref = s;
2551 /* compare function types. OLD functions match any new functions */
2552 static int is_compatible_func(CType *type1, CType *type2)
2554 Sym *s1, *s2;
2556 s1 = type1->ref;
2557 s2 = type2->ref;
2558 if (!is_compatible_types(&s1->type, &s2->type))
2559 return 0;
2560 /* check func_call */
2561 if (s1->a.func_call != s2->a.func_call)
2562 return 0;
2563 /* XXX: not complete */
2564 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2565 return 1;
2566 if (s1->c != s2->c)
2567 return 0;
2568 while (s1 != NULL) {
2569 if (s2 == NULL)
2570 return 0;
2571 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2572 return 0;
2573 s1 = s1->next;
2574 s2 = s2->next;
2576 if (s2)
2577 return 0;
2578 return 1;
2581 /* return true if type1 and type2 are the same. If unqualified is
2582 true, qualifiers on the types are ignored.
2584 - enums are not checked as gcc __builtin_types_compatible_p ()
2586 static int compare_types(CType *type1, CType *type2, int unqualified)
2588 int bt1, t1, t2;
2590 t1 = type1->t & VT_TYPE;
2591 t2 = type2->t & VT_TYPE;
2592 if (unqualified) {
2593 /* strip qualifiers before comparing */
2594 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2595 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2597 /* Default Vs explicit signedness only matters for char */
2598 if ((t1 & VT_BTYPE) != VT_BYTE) {
2599 t1 &= ~VT_DEFSIGN;
2600 t2 &= ~VT_DEFSIGN;
2602 /* An enum is compatible with (unsigned) int. Ideally we would
2603 store the enums signedness in type->ref.a.<some_bit> and
2604 only accept unsigned enums with unsigned int and vice versa.
2605 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2606 from pointer target types, so we can't add it here either. */
2607 if ((t1 & VT_BTYPE) == VT_ENUM) {
2608 t1 = VT_INT;
2609 if (type1->ref->a.unsigned_enum)
2610 t1 |= VT_UNSIGNED;
2612 if ((t2 & VT_BTYPE) == VT_ENUM) {
2613 t2 = VT_INT;
2614 if (type2->ref->a.unsigned_enum)
2615 t2 |= VT_UNSIGNED;
2617 /* XXX: bitfields ? */
2618 if (t1 != t2)
2619 return 0;
2620 /* test more complicated cases */
2621 bt1 = t1 & VT_BTYPE;
2622 if (bt1 == VT_PTR) {
2623 type1 = pointed_type(type1);
2624 type2 = pointed_type(type2);
2625 return is_compatible_types(type1, type2);
2626 } else if (bt1 == VT_STRUCT) {
2627 return (type1->ref == type2->ref);
2628 } else if (bt1 == VT_FUNC) {
2629 return is_compatible_func(type1, type2);
2630 } else {
2631 return 1;
2635 /* return true if type1 and type2 are exactly the same (including
2636 qualifiers).
2638 static int is_compatible_types(CType *type1, CType *type2)
2640 return compare_types(type1,type2,0);
2643 /* return true if type1 and type2 are the same (ignoring qualifiers).
2645 static int is_compatible_parameter_types(CType *type1, CType *type2)
2647 return compare_types(type1,type2,1);
2650 /* print a type. If 'varstr' is not NULL, then the variable is also
2651 printed in the type */
2652 /* XXX: union */
2653 /* XXX: add array and function pointers */
2654 static void type_to_str(char *buf, int buf_size,
2655 CType *type, const char *varstr)
2657 int bt, v, t;
2658 Sym *s, *sa;
2659 char buf1[256];
2660 const char *tstr;
2662 t = type->t & VT_TYPE;
2663 bt = t & VT_BTYPE;
2664 buf[0] = '\0';
2665 if (t & VT_CONSTANT)
2666 pstrcat(buf, buf_size, "const ");
2667 if (t & VT_VOLATILE)
2668 pstrcat(buf, buf_size, "volatile ");
2669 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2670 pstrcat(buf, buf_size, "unsigned ");
2671 else if (t & VT_DEFSIGN)
2672 pstrcat(buf, buf_size, "signed ");
2673 switch(bt) {
2674 case VT_VOID:
2675 tstr = "void";
2676 goto add_tstr;
2677 case VT_BOOL:
2678 tstr = "_Bool";
2679 goto add_tstr;
2680 case VT_BYTE:
2681 tstr = "char";
2682 goto add_tstr;
2683 case VT_SHORT:
2684 tstr = "short";
2685 goto add_tstr;
2686 case VT_INT:
2687 tstr = "int";
2688 goto add_tstr;
2689 case VT_LONG:
2690 tstr = "long";
2691 goto add_tstr;
2692 case VT_LLONG:
2693 tstr = "long long";
2694 goto add_tstr;
2695 case VT_FLOAT:
2696 tstr = "float";
2697 goto add_tstr;
2698 case VT_DOUBLE:
2699 tstr = "double";
2700 goto add_tstr;
2701 case VT_LDOUBLE:
2702 tstr = "long double";
2703 add_tstr:
2704 pstrcat(buf, buf_size, tstr);
2705 break;
2706 case VT_ENUM:
2707 case VT_STRUCT:
2708 if (bt == VT_STRUCT)
2709 tstr = "struct ";
2710 else
2711 tstr = "enum ";
2712 pstrcat(buf, buf_size, tstr);
2713 v = type->ref->v & ~SYM_STRUCT;
2714 if (v >= SYM_FIRST_ANOM)
2715 pstrcat(buf, buf_size, "<anonymous>");
2716 else
2717 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2718 break;
2719 case VT_FUNC:
2720 s = type->ref;
2721 type_to_str(buf, buf_size, &s->type, varstr);
2722 pstrcat(buf, buf_size, "(");
2723 sa = s->next;
2724 while (sa != NULL) {
2725 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2726 pstrcat(buf, buf_size, buf1);
2727 sa = sa->next;
2728 if (sa)
2729 pstrcat(buf, buf_size, ", ");
2731 pstrcat(buf, buf_size, ")");
2732 goto no_var;
2733 case VT_PTR:
2734 s = type->ref;
2735 if (t & VT_ARRAY) {
2736 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2737 type_to_str(buf, buf_size, &s->type, buf1);
2738 goto no_var;
2740 pstrcpy(buf1, sizeof(buf1), "*");
2741 if (t & VT_CONSTANT)
2742 pstrcat(buf1, buf_size, "const ");
2743 if (t & VT_VOLATILE)
2744 pstrcat(buf1, buf_size, "volatile ");
2745 if (varstr)
2746 pstrcat(buf1, sizeof(buf1), varstr);
2747 type_to_str(buf, buf_size, &s->type, buf1);
2748 goto no_var;
2750 if (varstr) {
2751 pstrcat(buf, buf_size, " ");
2752 pstrcat(buf, buf_size, varstr);
2754 no_var: ;
2757 /* verify type compatibility to store vtop in 'dt' type, and generate
2758 casts if needed. */
2759 static void gen_assign_cast(CType *dt)
2761 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2762 char buf1[256], buf2[256];
2763 int dbt, sbt;
2765 st = &vtop->type; /* source type */
2766 dbt = dt->t & VT_BTYPE;
2767 sbt = st->t & VT_BTYPE;
2768 if (sbt == VT_VOID || dbt == VT_VOID) {
2769 if (sbt == VT_VOID && dbt == VT_VOID)
2770 ; /*
2771 It is Ok if both are void
2772 A test program:
2773 void func1() {}
2774 void func2() {
2775 return func1();
2777 gcc accepts this program
2779 else
2780 tcc_error("cannot cast from/to void");
2782 if (dt->t & VT_CONSTANT)
2783 tcc_warning("assignment of read-only location");
2784 switch(dbt) {
2785 case VT_PTR:
2786 /* special cases for pointers */
2787 /* '0' can also be a pointer */
2788 if (is_null_pointer(vtop))
2789 goto type_ok;
2790 /* accept implicit pointer to integer cast with warning */
2791 if (is_integer_btype(sbt)) {
2792 tcc_warning("assignment makes pointer from integer without a cast");
2793 goto type_ok;
2795 type1 = pointed_type(dt);
2796 /* a function is implicitely a function pointer */
2797 if (sbt == VT_FUNC) {
2798 if ((type1->t & VT_BTYPE) != VT_VOID &&
2799 !is_compatible_types(pointed_type(dt), st))
2800 tcc_warning("assignment from incompatible pointer type");
2801 goto type_ok;
2803 if (sbt != VT_PTR)
2804 goto error;
2805 type2 = pointed_type(st);
2806 if ((type1->t & VT_BTYPE) == VT_VOID ||
2807 (type2->t & VT_BTYPE) == VT_VOID) {
2808 /* void * can match anything */
2809 } else {
2810 /* exact type match, except for qualifiers */
2811 tmp_type1 = *type1;
2812 tmp_type2 = *type2;
2813 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2814 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2815 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2816 /* Like GCC don't warn by default for merely changes
2817 in pointer target signedness. Do warn for different
2818 base types, though, in particular for unsigned enums
2819 and signed int targets. */
2820 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2821 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2822 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2824 else
2825 tcc_warning("assignment from incompatible pointer type");
2828 /* check const and volatile */
2829 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2830 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2831 tcc_warning("assignment discards qualifiers from pointer target type");
2832 break;
2833 case VT_BYTE:
2834 case VT_SHORT:
2835 case VT_INT:
2836 case VT_LLONG:
2837 if (sbt == VT_PTR || sbt == VT_FUNC) {
2838 tcc_warning("assignment makes integer from pointer without a cast");
2839 } else if (sbt == VT_STRUCT) {
2840 goto case_VT_STRUCT;
2842 /* XXX: more tests */
2843 break;
2844 case VT_STRUCT:
2845 case_VT_STRUCT:
2846 tmp_type1 = *dt;
2847 tmp_type2 = *st;
2848 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2849 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2850 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2851 error:
2852 type_to_str(buf1, sizeof(buf1), st, NULL);
2853 type_to_str(buf2, sizeof(buf2), dt, NULL);
2854 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2856 break;
2858 type_ok:
2859 gen_cast(dt);
2862 /* store vtop in lvalue pushed on stack */
2863 ST_FUNC void vstore(void)
2865 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2867 ft = vtop[-1].type.t;
2868 sbt = vtop->type.t & VT_BTYPE;
2869 dbt = ft & VT_BTYPE;
2870 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2871 (sbt == VT_INT && dbt == VT_SHORT))
2872 && !(vtop->type.t & VT_BITFIELD)) {
2873 /* optimize char/short casts */
2874 delayed_cast = VT_MUSTCAST;
2875 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2876 ((1 << VT_STRUCT_SHIFT) - 1));
2877 /* XXX: factorize */
2878 if (ft & VT_CONSTANT)
2879 tcc_warning("assignment of read-only location");
2880 } else {
2881 delayed_cast = 0;
2882 if (!(ft & VT_BITFIELD))
2883 gen_assign_cast(&vtop[-1].type);
2886 if (sbt == VT_STRUCT) {
2887 /* if structure, only generate pointer */
2888 /* structure assignment : generate memcpy */
2889 /* XXX: optimize if small size */
2890 size = type_size(&vtop->type, &align);
2892 /* destination */
2893 vswap();
2894 vtop->type.t = VT_PTR;
2895 gaddrof();
2897 /* address of memcpy() */
2898 #ifdef TCC_ARM_EABI
2899 if(!(align & 7))
2900 vpush_global_sym(&func_old_type, TOK_memcpy8);
2901 else if(!(align & 3))
2902 vpush_global_sym(&func_old_type, TOK_memcpy4);
2903 else
2904 #endif
2905 /* Use memmove, rather than memcpy, as dest and src may be same: */
2906 vpush_global_sym(&func_old_type, TOK_memmove);
2908 vswap();
2909 /* source */
2910 vpushv(vtop - 2);
2911 vtop->type.t = VT_PTR;
2912 gaddrof();
2913 /* type size */
2914 vpushi(size);
2915 gfunc_call(3);
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 #ifdef CONFIG_TCC_BCHECK
2965 /* bound check case */
2966 if (vtop[-1].r & VT_MUSTBOUND) {
2967 vswap();
2968 gbound();
2969 vswap();
2971 #endif
2972 rc = RC_INT;
2973 if (is_float(ft)) {
2974 rc = RC_FLOAT;
2975 #ifdef TCC_TARGET_X86_64
2976 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2977 rc = RC_ST0;
2978 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2979 rc = RC_FRET;
2981 #endif
2983 r = gv(rc); /* generate value */
2984 /* if lvalue was saved on stack, must read it */
2985 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2986 SValue sv;
2987 t = get_reg(RC_INT);
2988 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2989 sv.type.t = VT_PTR;
2990 #else
2991 sv.type.t = VT_INT;
2992 #endif
2993 sv.r = VT_LOCAL | VT_LVAL;
2994 sv.c.i = vtop[-1].c.i;
2995 load(t, &sv);
2996 vtop[-1].r = t | VT_LVAL;
2998 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2999 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3000 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3001 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3002 #else
3003 if ((ft & VT_BTYPE) == VT_LLONG) {
3004 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3005 #endif
3006 vtop[-1].type.t = load_type;
3007 store(r, vtop - 1);
3008 vswap();
3009 /* convert to int to increment easily */
3010 vtop->type.t = addr_type;
3011 gaddrof();
3012 vpushi(load_size);
3013 gen_op('+');
3014 vtop->r |= VT_LVAL;
3015 vswap();
3016 vtop[-1].type.t = load_type;
3017 /* XXX: it works because r2 is spilled last ! */
3018 store(vtop->r2, vtop - 1);
3019 } else {
3020 store(r, vtop - 1);
3023 vswap();
3024 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3025 vtop->r |= delayed_cast;
3029 /* post defines POST/PRE add. c is the token ++ or -- */
3030 ST_FUNC void inc(int post, int c)
3032 test_lvalue();
3033 vdup(); /* save lvalue */
3034 if (post) {
3035 gv_dup(); /* duplicate value */
3036 vrotb(3);
3037 vrotb(3);
3039 /* add constant */
3040 vpushi(c - TOK_MID);
3041 gen_op('+');
3042 vstore(); /* store value */
3043 if (post)
3044 vpop(); /* if post op, return saved value */
3047 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3049 /* read the string */
3050 if (tok != TOK_STR)
3051 expect(msg);
3052 cstr_new(astr);
3053 while (tok == TOK_STR) {
3054 /* XXX: add \0 handling too ? */
3055 cstr_cat(astr, tokc.str.data, -1);
3056 next();
3058 cstr_ccat(astr, '\0');
3061 /* If I is >= 1 and a power of two, returns log2(i)+1.
3062 If I is 0 returns 0. */
3063 static int exact_log2p1(int i)
3065 int ret;
3066 if (!i)
3067 return 0;
3068 for (ret = 1; i >= 1 << 8; ret += 8)
3069 i >>= 8;
3070 if (i >= 1 << 4)
3071 ret += 4, i >>= 4;
3072 if (i >= 1 << 2)
3073 ret += 2, i >>= 2;
3074 if (i >= 1 << 1)
3075 ret++;
3076 return ret;
3079 /* Parse GNUC __attribute__ extension. Currently, the following
3080 extensions are recognized:
3081 - aligned(n) : set data/function alignment.
3082 - packed : force data alignment to 1
3083 - section(x) : generate data/code in this section.
3084 - unused : currently ignored, but may be used someday.
3085 - regparm(n) : pass function parameters in registers (i386 only)
3087 static void parse_attribute(AttributeDef *ad)
3089 int t, n;
3090 CString astr;
3092 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3093 next();
3094 skip('(');
3095 skip('(');
3096 while (tok != ')') {
3097 if (tok < TOK_IDENT)
3098 expect("attribute name");
3099 t = tok;
3100 next();
3101 switch(t) {
3102 case TOK_SECTION1:
3103 case TOK_SECTION2:
3104 skip('(');
3105 parse_mult_str(&astr, "section name");
3106 ad->section = find_section(tcc_state, (char *)astr.data);
3107 skip(')');
3108 cstr_free(&astr);
3109 break;
3110 case TOK_ALIAS1:
3111 case TOK_ALIAS2:
3112 skip('(');
3113 parse_mult_str(&astr, "alias(\"target\")");
3114 ad->alias_target = /* save string as token, for later */
3115 tok_alloc((char*)astr.data, astr.size-1)->tok;
3116 skip(')');
3117 cstr_free(&astr);
3118 break;
3119 case TOK_VISIBILITY1:
3120 case TOK_VISIBILITY2:
3121 skip('(');
3122 parse_mult_str(&astr,
3123 "visibility(\"default|hidden|internal|protected\")");
3124 if (!strcmp (astr.data, "default"))
3125 ad->a.visibility = STV_DEFAULT;
3126 else if (!strcmp (astr.data, "hidden"))
3127 ad->a.visibility = STV_HIDDEN;
3128 else if (!strcmp (astr.data, "internal"))
3129 ad->a.visibility = STV_INTERNAL;
3130 else if (!strcmp (astr.data, "protected"))
3131 ad->a.visibility = STV_PROTECTED;
3132 else
3133 expect("visibility(\"default|hidden|internal|protected\")");
3134 skip(')');
3135 cstr_free(&astr);
3136 break;
3137 case TOK_ALIGNED1:
3138 case TOK_ALIGNED2:
3139 if (tok == '(') {
3140 next();
3141 n = expr_const();
3142 if (n <= 0 || (n & (n - 1)) != 0)
3143 tcc_error("alignment must be a positive power of two");
3144 skip(')');
3145 } else {
3146 n = MAX_ALIGN;
3148 ad->a.aligned = exact_log2p1(n);
3149 if (n != 1 << (ad->a.aligned - 1))
3150 tcc_error("alignment of %d is larger than implemented", n);
3151 break;
3152 case TOK_PACKED1:
3153 case TOK_PACKED2:
3154 ad->a.packed = 1;
3155 break;
3156 case TOK_WEAK1:
3157 case TOK_WEAK2:
3158 ad->a.weak = 1;
3159 break;
3160 case TOK_UNUSED1:
3161 case TOK_UNUSED2:
3162 /* currently, no need to handle it because tcc does not
3163 track unused objects */
3164 break;
3165 case TOK_NORETURN1:
3166 case TOK_NORETURN2:
3167 /* currently, no need to handle it because tcc does not
3168 track unused objects */
3169 break;
3170 case TOK_CDECL1:
3171 case TOK_CDECL2:
3172 case TOK_CDECL3:
3173 ad->a.func_call = FUNC_CDECL;
3174 break;
3175 case TOK_STDCALL1:
3176 case TOK_STDCALL2:
3177 case TOK_STDCALL3:
3178 ad->a.func_call = FUNC_STDCALL;
3179 break;
3180 #ifdef TCC_TARGET_I386
3181 case TOK_REGPARM1:
3182 case TOK_REGPARM2:
3183 skip('(');
3184 n = expr_const();
3185 if (n > 3)
3186 n = 3;
3187 else if (n < 0)
3188 n = 0;
3189 if (n > 0)
3190 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3191 skip(')');
3192 break;
3193 case TOK_FASTCALL1:
3194 case TOK_FASTCALL2:
3195 case TOK_FASTCALL3:
3196 ad->a.func_call = FUNC_FASTCALLW;
3197 break;
3198 #endif
3199 case TOK_MODE:
3200 skip('(');
3201 switch(tok) {
3202 case TOK_MODE_DI:
3203 ad->a.mode = VT_LLONG + 1;
3204 break;
3205 case TOK_MODE_QI:
3206 ad->a.mode = VT_BYTE + 1;
3207 break;
3208 case TOK_MODE_HI:
3209 ad->a.mode = VT_SHORT + 1;
3210 break;
3211 case TOK_MODE_SI:
3212 case TOK_MODE_word:
3213 ad->a.mode = VT_INT + 1;
3214 break;
3215 default:
3216 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3217 break;
3219 next();
3220 skip(')');
3221 break;
3222 case TOK_DLLEXPORT:
3223 ad->a.func_export = 1;
3224 break;
3225 case TOK_DLLIMPORT:
3226 ad->a.func_import = 1;
3227 break;
3228 default:
3229 if (tcc_state->warn_unsupported)
3230 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3231 /* skip parameters */
3232 if (tok == '(') {
3233 int parenthesis = 0;
3234 do {
3235 if (tok == '(')
3236 parenthesis++;
3237 else if (tok == ')')
3238 parenthesis--;
3239 next();
3240 } while (parenthesis && tok != -1);
3242 break;
3244 if (tok != ',')
3245 break;
3246 next();
3248 skip(')');
3249 skip(')');
3253 static Sym * find_field (CType *type, int v)
3255 Sym *s = type->ref;
3256 v |= SYM_FIELD;
3257 while ((s = s->next) != NULL) {
3258 if ((s->v & SYM_FIELD) &&
3259 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3260 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3261 Sym *ret = find_field (&s->type, v);
3262 if (ret)
3263 return ret;
3265 if (s->v == v)
3266 break;
3268 return s;
3271 static void struct_add_offset (Sym *s, int offset)
3273 while ((s = s->next) != NULL) {
3274 if ((s->v & SYM_FIELD) &&
3275 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3276 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3277 struct_add_offset(s->type.ref, offset);
3278 } else
3279 s->c += offset;
3283 static void struct_layout(CType *type, AttributeDef *ad)
3285 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3286 int pcc = !tcc_state->ms_bitfields;
3287 Sym *f;
3288 if (ad->a.aligned)
3289 maxalign = 1 << (ad->a.aligned - 1);
3290 else
3291 maxalign = 1;
3292 offset = 0;
3293 c = 0;
3294 bit_pos = 0;
3295 prevbt = VT_STRUCT; /* make it never match */
3296 prev_bit_size = 0;
3297 for (f = type->ref->next; f; f = f->next) {
3298 int typealign, bit_size;
3299 int size = type_size(&f->type, &typealign);
3300 if (f->type.t & VT_BITFIELD)
3301 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3302 else
3303 bit_size = -1;
3304 if (bit_size == 0 && pcc) {
3305 /* Zero-width bit-fields in PCC mode aren't affected
3306 by any packing (attribute or pragma). */
3307 align = typealign;
3308 } else if (f->r > 1) {
3309 align = f->r;
3310 } else if (ad->a.packed || f->r == 1) {
3311 align = 1;
3312 /* Packed fields or packed records don't let the base type
3313 influence the records type alignment. */
3314 typealign = 1;
3315 } else {
3316 align = typealign;
3318 if (type->ref->type.t != TOK_STRUCT) {
3319 if (pcc && bit_size >= 0)
3320 size = (bit_size + 7) >> 3;
3321 /* Bit position is already zero from our caller. */
3322 offset = 0;
3323 if (size > c)
3324 c = size;
3325 } else if (bit_size < 0) {
3326 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3327 prevbt = VT_STRUCT;
3328 prev_bit_size = 0;
3329 c = (c + addbytes + align - 1) & -align;
3330 offset = c;
3331 if (size > 0)
3332 c += size;
3333 bit_pos = 0;
3334 } else {
3335 /* A bit-field. Layout is more complicated. There are two
3336 options TCC implements: PCC compatible and MS compatible
3337 (PCC compatible is what GCC uses for almost all targets).
3338 In PCC layout the overall size of the struct (in c) is
3339 _excluding_ the current run of bit-fields (that is,
3340 there's at least additional bit_pos bits after c). In
3341 MS layout c does include the current run of bit-fields.
3343 This matters for calculating the natural alignment buckets
3344 in PCC mode. */
3346 /* 'align' will be used to influence records alignment,
3347 so it's the max of specified and type alignment, except
3348 in certain cases that depend on the mode. */
3349 if (align < typealign)
3350 align = typealign;
3351 if (pcc) {
3352 /* In PCC layout a non-packed bit-field is placed adjacent
3353 to the preceding bit-fields, except if it would overflow
3354 its container (depending on base type) or it's a zero-width
3355 bit-field. Packed non-zero-width bit-fields always are
3356 placed adjacent. */
3357 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3358 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3359 if (bit_size == 0 ||
3360 (typealign != 1 &&
3361 (ofs2 / (typealign * 8)) > (size/typealign))) {
3362 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3363 bit_pos = 0;
3365 offset = c;
3366 /* In PCC layout named bit-fields influence the alignment
3367 of the containing struct using the base types alignment,
3368 except for packed fields (which here have correct
3369 align/typealign). */
3370 if ((f->v & SYM_FIRST_ANOM))
3371 align = 1;
3372 } else {
3373 bt = f->type.t & VT_BTYPE;
3374 if ((bit_pos + bit_size > size * 8) ||
3375 (bit_size > 0) == (bt != prevbt)) {
3376 c = (c + typealign - 1) & -typealign;
3377 offset = c;
3378 bit_pos = 0;
3379 /* In MS bitfield mode a bit-field run always uses
3380 at least as many bits as the underlying type.
3381 To start a new run it's also required that this
3382 or the last bit-field had non-zero width. */
3383 if (bit_size || prev_bit_size)
3384 c += size;
3386 /* In MS layout the records alignment is normally
3387 influenced by the field, except for a zero-width
3388 field at the start of a run (but by further zero-width
3389 fields it is again). */
3390 if (bit_size == 0 && prevbt != bt)
3391 align = 1;
3392 prevbt = bt;
3393 prev_bit_size = bit_size;
3395 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3396 | (bit_pos << VT_STRUCT_SHIFT);
3397 bit_pos += bit_size;
3398 if (pcc && bit_pos >= size * 8) {
3399 c += size;
3400 bit_pos -= size * 8;
3403 if (align > maxalign)
3404 maxalign = align;
3405 #if 0
3406 printf("set field %s offset=%d c=%d",
3407 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3408 if (f->type.t & VT_BITFIELD) {
3409 printf(" pos=%d size=%d",
3410 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3411 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3413 printf("\n");
3414 #endif
3416 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3417 Sym *ass;
3418 /* An anonymous struct/union. Adjust member offsets
3419 to reflect the real offset of our containing struct.
3420 Also set the offset of this anon member inside
3421 the outer struct to be zero. Via this it
3422 works when accessing the field offset directly
3423 (from base object), as well as when recursing
3424 members in initializer handling. */
3425 int v2 = f->type.ref->v;
3426 if (!(v2 & SYM_FIELD) &&
3427 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3428 Sym **pps;
3429 /* This happens only with MS extensions. The
3430 anon member has a named struct type, so it
3431 potentially is shared with other references.
3432 We need to unshare members so we can modify
3433 them. */
3434 ass = f->type.ref;
3435 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3436 &f->type.ref->type, 0,
3437 f->type.ref->c);
3438 pps = &f->type.ref->next;
3439 while ((ass = ass->next) != NULL) {
3440 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3441 pps = &((*pps)->next);
3443 *pps = NULL;
3445 struct_add_offset(f->type.ref, offset);
3446 f->c = 0;
3447 } else {
3448 f->c = offset;
3451 f->r = 0;
3453 /* store size and alignment */
3454 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3455 + maxalign - 1) & -maxalign;
3456 type->ref->r = maxalign;
3459 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3460 static void struct_decl(CType *type, AttributeDef *ad, int u)
3462 int a, v, size, align, flexible, alignoverride;
3463 long c;
3464 int bit_size, bsize, bt;
3465 Sym *s, *ss, **ps;
3466 AttributeDef ad1;
3467 CType type1, btype;
3469 a = tok; /* save decl type */
3470 next();
3471 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3472 parse_attribute(ad);
3473 if (tok != '{') {
3474 v = tok;
3475 next();
3476 /* struct already defined ? return it */
3477 if (v < TOK_IDENT)
3478 expect("struct/union/enum name");
3479 s = struct_find(v);
3480 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3481 if (s->type.t != a)
3482 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3483 goto do_decl;
3485 } else {
3486 v = anon_sym++;
3488 /* Record the original enum/struct/union token. */
3489 type1.t = a;
3490 type1.ref = NULL;
3491 /* we put an undefined size for struct/union */
3492 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3493 s->r = 0; /* default alignment is zero as gcc */
3494 /* put struct/union/enum name in type */
3495 do_decl:
3496 type->t = u;
3497 type->ref = s;
3499 if (tok == '{') {
3500 next();
3501 if (s->c != -1)
3502 tcc_error("struct/union/enum already defined");
3503 /* cannot be empty */
3504 c = 0;
3505 /* non empty enums are not allowed */
3506 if (a == TOK_ENUM) {
3507 int seen_neg = 0;
3508 int seen_wide = 0;
3509 for(;;) {
3510 CType *t = &int_type;
3511 v = tok;
3512 if (v < TOK_UIDENT)
3513 expect("identifier");
3514 ss = sym_find(v);
3515 if (ss && !local_stack)
3516 tcc_error("redefinition of enumerator '%s'",
3517 get_tok_str(v, NULL));
3518 next();
3519 if (tok == '=') {
3520 next();
3521 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3522 c = expr_const64();
3523 #else
3524 /* We really want to support long long enums
3525 on i386 as well, but the Sym structure only
3526 holds a 'long' for associated constants,
3527 and enlarging it would bump its size (no
3528 available padding). So punt for now. */
3529 c = expr_const();
3530 #endif
3532 if (c < 0)
3533 seen_neg = 1;
3534 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3535 seen_wide = 1, t = &size_type;
3536 /* enum symbols have static storage */
3537 ss = sym_push(v, t, VT_CONST, c);
3538 ss->type.t |= VT_STATIC;
3539 if (tok != ',')
3540 break;
3541 next();
3542 c++;
3543 /* NOTE: we accept a trailing comma */
3544 if (tok == '}')
3545 break;
3547 if (!seen_neg)
3548 s->a.unsigned_enum = 1;
3549 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3550 skip('}');
3551 } else {
3552 ps = &s->next;
3553 flexible = 0;
3554 while (tok != '}') {
3555 if (!parse_btype(&btype, &ad1)) {
3556 skip(';');
3557 continue;
3559 while (1) {
3560 if (flexible)
3561 tcc_error("flexible array member '%s' not at the end of struct",
3562 get_tok_str(v, NULL));
3563 bit_size = -1;
3564 v = 0;
3565 type1 = btype;
3566 if (tok != ':') {
3567 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3568 if (v == 0) {
3569 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3570 expect("identifier");
3571 else {
3572 int v = btype.ref->v;
3573 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3574 if (tcc_state->ms_extensions == 0)
3575 expect("identifier");
3579 if (type_size(&type1, &align) < 0) {
3580 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3581 flexible = 1;
3582 else
3583 tcc_error("field '%s' has incomplete type",
3584 get_tok_str(v, NULL));
3586 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3587 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3588 tcc_error("invalid type for '%s'",
3589 get_tok_str(v, NULL));
3591 if (tok == ':') {
3592 next();
3593 bit_size = expr_const();
3594 /* XXX: handle v = 0 case for messages */
3595 if (bit_size < 0)
3596 tcc_error("negative width in bit-field '%s'",
3597 get_tok_str(v, NULL));
3598 if (v && bit_size == 0)
3599 tcc_error("zero width for bit-field '%s'",
3600 get_tok_str(v, NULL));
3601 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3602 parse_attribute(&ad1);
3604 size = type_size(&type1, &align);
3605 /* Only remember non-default alignment. */
3606 alignoverride = 0;
3607 if (ad1.a.aligned) {
3608 int speca = 1 << (ad1.a.aligned - 1);
3609 alignoverride = speca;
3610 } else if (ad1.a.packed || ad->a.packed) {
3611 alignoverride = 1;
3612 } else if (*tcc_state->pack_stack_ptr) {
3613 if (align > *tcc_state->pack_stack_ptr)
3614 alignoverride = *tcc_state->pack_stack_ptr;
3616 if (bit_size >= 0) {
3617 bt = type1.t & VT_BTYPE;
3618 if (bt != VT_INT &&
3619 bt != VT_BYTE &&
3620 bt != VT_SHORT &&
3621 bt != VT_BOOL &&
3622 bt != VT_ENUM &&
3623 bt != VT_LLONG)
3624 tcc_error("bitfields must have scalar type");
3625 bsize = size * 8;
3626 if (bit_size > bsize) {
3627 tcc_error("width of '%s' exceeds its type",
3628 get_tok_str(v, NULL));
3629 } else if (bit_size == bsize) {
3630 /* no need for bit fields */
3632 } else {
3633 type1.t |= VT_BITFIELD |
3634 (0 << VT_STRUCT_SHIFT) |
3635 (bit_size << (VT_STRUCT_SHIFT + 6));
3638 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3639 /* Remember we've seen a real field to check
3640 for placement of flexible array member. */
3641 c = 1;
3643 /* If member is a struct or bit-field, enforce
3644 placing into the struct (as anonymous). */
3645 if (v == 0 &&
3646 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3647 bit_size >= 0)) {
3648 v = anon_sym++;
3650 if (v) {
3651 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3652 *ps = ss;
3653 ps = &ss->next;
3655 if (tok == ';' || tok == TOK_EOF)
3656 break;
3657 skip(',');
3659 skip(';');
3661 skip('}');
3662 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3663 parse_attribute(ad);
3664 struct_layout(type, ad);
3669 /* return 1 if basic type is a type size (short, long, long long) */
3670 ST_FUNC int is_btype_size(int bt)
3672 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3675 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3676 are added to the element type, copied because it could be a typedef. */
3677 static void parse_btype_qualify(CType *type, int qualifiers)
3679 while (type->t & VT_ARRAY) {
3680 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3681 type = &type->ref->type;
3683 type->t |= qualifiers;
3686 /* return 0 if no type declaration. otherwise, return the basic type
3687 and skip it.
3689 static int parse_btype(CType *type, AttributeDef *ad)
3691 int t, u, bt_size, complete, type_found, typespec_found;
3692 Sym *s;
3693 CType type1;
3695 memset(ad, 0, sizeof(AttributeDef));
3696 complete = 0;
3697 type_found = 0;
3698 typespec_found = 0;
3699 t = 0;
3700 while(1) {
3701 switch(tok) {
3702 case TOK_EXTENSION:
3703 /* currently, we really ignore extension */
3704 next();
3705 continue;
3707 /* basic types */
3708 case TOK_CHAR:
3709 u = VT_BYTE;
3710 basic_type:
3711 next();
3712 basic_type1:
3713 if (complete)
3714 tcc_error("too many basic types");
3715 t |= u;
3716 bt_size = is_btype_size (u & VT_BTYPE);
3717 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3718 complete = 1;
3719 typespec_found = 1;
3720 break;
3721 case TOK_VOID:
3722 u = VT_VOID;
3723 goto basic_type;
3724 case TOK_SHORT:
3725 u = VT_SHORT;
3726 goto basic_type;
3727 case TOK_INT:
3728 u = VT_INT;
3729 goto basic_type;
3730 case TOK_LONG:
3731 next();
3732 if ((t & VT_BTYPE) == VT_DOUBLE) {
3733 #ifndef TCC_TARGET_PE
3734 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3735 #endif
3736 } else if ((t & VT_BTYPE) == VT_LONG) {
3737 t = (t & ~VT_BTYPE) | VT_LLONG;
3738 } else {
3739 u = VT_LONG;
3740 goto basic_type1;
3742 break;
3743 #ifdef TCC_TARGET_ARM64
3744 case TOK_UINT128:
3745 /* GCC's __uint128_t appears in some Linux header files. Make it a
3746 synonym for long double to get the size and alignment right. */
3747 u = VT_LDOUBLE;
3748 goto basic_type;
3749 #endif
3750 case TOK_BOOL:
3751 u = VT_BOOL;
3752 goto basic_type;
3753 case TOK_FLOAT:
3754 u = VT_FLOAT;
3755 goto basic_type;
3756 case TOK_DOUBLE:
3757 next();
3758 if ((t & VT_BTYPE) == VT_LONG) {
3759 #ifdef TCC_TARGET_PE
3760 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3761 #else
3762 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3763 #endif
3764 } else {
3765 u = VT_DOUBLE;
3766 goto basic_type1;
3768 break;
3769 case TOK_ENUM:
3770 struct_decl(&type1, ad, VT_ENUM);
3771 basic_type2:
3772 u = type1.t;
3773 type->ref = type1.ref;
3774 goto basic_type1;
3775 case TOK_STRUCT:
3776 case TOK_UNION:
3777 struct_decl(&type1, ad, VT_STRUCT);
3778 goto basic_type2;
3780 /* type modifiers */
3781 case TOK_CONST1:
3782 case TOK_CONST2:
3783 case TOK_CONST3:
3784 type->t = t;
3785 parse_btype_qualify(type, VT_CONSTANT);
3786 t = type->t;
3787 next();
3788 break;
3789 case TOK_VOLATILE1:
3790 case TOK_VOLATILE2:
3791 case TOK_VOLATILE3:
3792 type->t = t;
3793 parse_btype_qualify(type, VT_VOLATILE);
3794 t = type->t;
3795 next();
3796 break;
3797 case TOK_SIGNED1:
3798 case TOK_SIGNED2:
3799 case TOK_SIGNED3:
3800 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3801 tcc_error("signed and unsigned modifier");
3802 typespec_found = 1;
3803 t |= VT_DEFSIGN;
3804 next();
3805 break;
3806 case TOK_REGISTER:
3807 case TOK_AUTO:
3808 case TOK_RESTRICT1:
3809 case TOK_RESTRICT2:
3810 case TOK_RESTRICT3:
3811 next();
3812 break;
3813 case TOK_UNSIGNED:
3814 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3815 tcc_error("signed and unsigned modifier");
3816 t |= VT_DEFSIGN | VT_UNSIGNED;
3817 next();
3818 typespec_found = 1;
3819 break;
3821 /* storage */
3822 case TOK_EXTERN:
3823 t |= VT_EXTERN;
3824 next();
3825 break;
3826 case TOK_STATIC:
3827 t |= VT_STATIC;
3828 next();
3829 break;
3830 case TOK_TYPEDEF:
3831 t |= VT_TYPEDEF;
3832 next();
3833 break;
3834 case TOK_INLINE1:
3835 case TOK_INLINE2:
3836 case TOK_INLINE3:
3837 t |= VT_INLINE;
3838 next();
3839 break;
3841 /* GNUC attribute */
3842 case TOK_ATTRIBUTE1:
3843 case TOK_ATTRIBUTE2:
3844 parse_attribute(ad);
3845 if (ad->a.mode) {
3846 u = ad->a.mode -1;
3847 t = (t & ~VT_BTYPE) | u;
3849 break;
3850 /* GNUC typeof */
3851 case TOK_TYPEOF1:
3852 case TOK_TYPEOF2:
3853 case TOK_TYPEOF3:
3854 next();
3855 parse_expr_type(&type1);
3856 /* remove all storage modifiers except typedef */
3857 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3858 goto basic_type2;
3859 default:
3860 if (typespec_found)
3861 goto the_end;
3862 s = sym_find(tok);
3863 if (!s || !(s->type.t & VT_TYPEDEF))
3864 goto the_end;
3866 type->t = ((s->type.t & ~VT_TYPEDEF) |
3867 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3868 type->ref = s->type.ref;
3869 if (t & (VT_CONSTANT | VT_VOLATILE))
3870 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3871 t = type->t;
3873 if (s->r) {
3874 /* get attributes from typedef */
3875 if (0 == ad->a.aligned)
3876 ad->a.aligned = s->a.aligned;
3877 if (0 == ad->a.func_call)
3878 ad->a.func_call = s->a.func_call;
3879 ad->a.packed |= s->a.packed;
3881 next();
3882 typespec_found = 1;
3883 break;
3885 type_found = 1;
3887 the_end:
3888 if (tcc_state->char_is_unsigned) {
3889 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3890 t |= VT_UNSIGNED;
3893 /* long is never used as type */
3894 if ((t & VT_BTYPE) == VT_LONG)
3895 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3896 defined TCC_TARGET_PE
3897 t = (t & ~VT_BTYPE) | VT_INT;
3898 #else
3899 t = (t & ~VT_BTYPE) | VT_LLONG;
3900 #endif
3901 type->t = t;
3902 return type_found;
3905 /* convert a function parameter type (array to pointer and function to
3906 function pointer) */
3907 static inline void convert_parameter_type(CType *pt)
3909 /* remove const and volatile qualifiers (XXX: const could be used
3910 to indicate a const function parameter */
3911 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3912 /* array must be transformed to pointer according to ANSI C */
3913 pt->t &= ~VT_ARRAY;
3914 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3915 mk_pointer(pt);
3919 ST_FUNC void parse_asm_str(CString *astr)
3921 skip('(');
3922 parse_mult_str(astr, "string constant");
3925 /* Parse an asm label and return the token */
3926 static int asm_label_instr(void)
3928 int v;
3929 CString astr;
3931 next();
3932 parse_asm_str(&astr);
3933 skip(')');
3934 #ifdef ASM_DEBUG
3935 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3936 #endif
3937 v = tok_alloc(astr.data, astr.size - 1)->tok;
3938 cstr_free(&astr);
3939 return v;
3942 static void post_type(CType *type, AttributeDef *ad, int storage)
3944 int n, l, t1, arg_size, align;
3945 Sym **plast, *s, *first;
3946 AttributeDef ad1;
3947 CType pt;
3949 if (tok == '(') {
3950 /* function declaration */
3951 next();
3952 l = 0;
3953 first = NULL;
3954 plast = &first;
3955 arg_size = 0;
3956 if (tok != ')') {
3957 for(;;) {
3958 /* read param name and compute offset */
3959 if (l != FUNC_OLD) {
3960 if (!parse_btype(&pt, &ad1)) {
3961 if (l) {
3962 tcc_error("invalid type");
3963 } else {
3964 l = FUNC_OLD;
3965 goto old_proto;
3968 l = FUNC_NEW;
3969 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3970 break;
3971 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3972 if ((pt.t & VT_BTYPE) == VT_VOID)
3973 tcc_error("parameter declared as void");
3974 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3975 } else {
3976 old_proto:
3977 n = tok;
3978 if (n < TOK_UIDENT)
3979 expect("identifier");
3980 pt.t = VT_INT;
3981 next();
3983 convert_parameter_type(&pt);
3984 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3985 *plast = s;
3986 plast = &s->next;
3987 if (tok == ')')
3988 break;
3989 skip(',');
3990 if (l == FUNC_NEW && tok == TOK_DOTS) {
3991 l = FUNC_ELLIPSIS;
3992 next();
3993 break;
3997 /* if no parameters, then old type prototype */
3998 if (l == 0)
3999 l = FUNC_OLD;
4000 skip(')');
4001 /* NOTE: const is ignored in returned type as it has a special
4002 meaning in gcc / C++ */
4003 type->t &= ~VT_CONSTANT;
4004 /* some ancient pre-K&R C allows a function to return an array
4005 and the array brackets to be put after the arguments, such
4006 that "int c()[]" means something like "int[] c()" */
4007 if (tok == '[') {
4008 next();
4009 skip(']'); /* only handle simple "[]" */
4010 type->t |= VT_PTR;
4012 /* we push a anonymous symbol which will contain the function prototype */
4013 ad->a.func_args = arg_size;
4014 s = sym_push(SYM_FIELD, type, 0, l);
4015 s->a = ad->a;
4016 s->next = first;
4017 type->t = VT_FUNC;
4018 type->ref = s;
4019 } else if (tok == '[') {
4020 int saved_nocode_wanted = nocode_wanted;
4021 /* array definition */
4022 next();
4023 if (tok == TOK_RESTRICT1)
4024 next();
4025 n = -1;
4026 t1 = 0;
4027 if (tok != ']') {
4028 if (!local_stack || (storage & VT_STATIC))
4029 vpushi(expr_const());
4030 else {
4031 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4032 length must always be evaluated, even under nocode_wanted,
4033 so that its size slot is initialized (e.g. under sizeof
4034 or typeof). */
4035 nocode_wanted = 0;
4036 gexpr();
4038 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4039 n = vtop->c.i;
4040 if (n < 0)
4041 tcc_error("invalid array size");
4042 } else {
4043 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4044 tcc_error("size of variable length array should be an integer");
4045 t1 = VT_VLA;
4048 skip(']');
4049 /* parse next post type */
4050 post_type(type, ad, storage);
4051 if (type->t == VT_FUNC)
4052 tcc_error("declaration of an array of functions");
4053 t1 |= type->t & VT_VLA;
4055 if (t1 & VT_VLA) {
4056 loc -= type_size(&int_type, &align);
4057 loc &= -align;
4058 n = loc;
4060 vla_runtime_type_size(type, &align);
4061 gen_op('*');
4062 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4063 vswap();
4064 vstore();
4066 if (n != -1)
4067 vpop();
4068 nocode_wanted = saved_nocode_wanted;
4070 /* we push an anonymous symbol which will contain the array
4071 element type */
4072 s = sym_push(SYM_FIELD, type, 0, n);
4073 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4074 type->ref = s;
4078 /* Parse a type declaration (except basic type), and return the type
4079 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4080 expected. 'type' should contain the basic type. 'ad' is the
4081 attribute definition of the basic type. It can be modified by
4082 type_decl().
4084 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4086 Sym *s;
4087 CType type1, *type2;
4088 int qualifiers, storage;
4090 while (tok == '*') {
4091 qualifiers = 0;
4092 redo:
4093 next();
4094 switch(tok) {
4095 case TOK_CONST1:
4096 case TOK_CONST2:
4097 case TOK_CONST3:
4098 qualifiers |= VT_CONSTANT;
4099 goto redo;
4100 case TOK_VOLATILE1:
4101 case TOK_VOLATILE2:
4102 case TOK_VOLATILE3:
4103 qualifiers |= VT_VOLATILE;
4104 goto redo;
4105 case TOK_RESTRICT1:
4106 case TOK_RESTRICT2:
4107 case TOK_RESTRICT3:
4108 goto redo;
4109 /* XXX: clarify attribute handling */
4110 case TOK_ATTRIBUTE1:
4111 case TOK_ATTRIBUTE2:
4112 parse_attribute(ad);
4113 break;
4115 mk_pointer(type);
4116 type->t |= qualifiers;
4119 /* recursive type */
4120 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4121 type1.t = 0; /* XXX: same as int */
4122 if (tok == '(') {
4123 next();
4124 /* XXX: this is not correct to modify 'ad' at this point, but
4125 the syntax is not clear */
4126 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4127 parse_attribute(ad);
4128 type_decl(&type1, ad, v, td);
4129 skip(')');
4130 } else {
4131 /* type identifier */
4132 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4133 *v = tok;
4134 next();
4135 } else {
4136 if (!(td & TYPE_ABSTRACT))
4137 expect("identifier");
4138 *v = 0;
4141 storage = type->t & VT_STORAGE;
4142 type->t &= ~VT_STORAGE;
4143 post_type(type, ad, storage);
4144 type->t |= storage;
4145 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4146 parse_attribute(ad);
4148 if (!type1.t)
4149 return;
4150 /* append type at the end of type1 */
4151 type2 = &type1;
4152 for(;;) {
4153 s = type2->ref;
4154 type2 = &s->type;
4155 if (!type2->t) {
4156 *type2 = *type;
4157 break;
4160 *type = type1;
4163 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4164 ST_FUNC int lvalue_type(int t)
4166 int bt, r;
4167 r = VT_LVAL;
4168 bt = t & VT_BTYPE;
4169 if (bt == VT_BYTE || bt == VT_BOOL)
4170 r |= VT_LVAL_BYTE;
4171 else if (bt == VT_SHORT)
4172 r |= VT_LVAL_SHORT;
4173 else
4174 return r;
4175 if (t & VT_UNSIGNED)
4176 r |= VT_LVAL_UNSIGNED;
4177 return r;
4180 /* indirection with full error checking and bound check */
4181 ST_FUNC void indir(void)
4183 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4184 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4185 return;
4186 expect("pointer");
4188 if (vtop->r & VT_LVAL)
4189 gv(RC_INT);
4190 vtop->type = *pointed_type(&vtop->type);
4191 /* Arrays and functions are never lvalues */
4192 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4193 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4194 vtop->r |= lvalue_type(vtop->type.t);
4195 /* if bound checking, the referenced pointer must be checked */
4196 #ifdef CONFIG_TCC_BCHECK
4197 if (tcc_state->do_bounds_check)
4198 vtop->r |= VT_MUSTBOUND;
4199 #endif
4203 /* pass a parameter to a function and do type checking and casting */
4204 static void gfunc_param_typed(Sym *func, Sym *arg)
4206 int func_type;
4207 CType type;
4209 func_type = func->c;
4210 if (func_type == FUNC_OLD ||
4211 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4212 /* default casting : only need to convert float to double */
4213 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4214 type.t = VT_DOUBLE;
4215 gen_cast(&type);
4216 } else if (vtop->type.t & VT_BITFIELD) {
4217 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4218 type.ref = vtop->type.ref;
4219 gen_cast(&type);
4221 } else if (arg == NULL) {
4222 tcc_error("too many arguments to function");
4223 } else {
4224 type = arg->type;
4225 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4226 gen_assign_cast(&type);
4230 /* parse an expression of the form '(type)' or '(expr)' and return its
4231 type */
4232 static void parse_expr_type(CType *type)
4234 int n;
4235 AttributeDef ad;
4237 skip('(');
4238 if (parse_btype(type, &ad)) {
4239 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4240 } else {
4241 expr_type(type);
4243 skip(')');
4246 static void parse_type(CType *type)
4248 AttributeDef ad;
4249 int n;
4251 if (!parse_btype(type, &ad)) {
4252 expect("type");
4254 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4257 static void vpush_tokc(int t)
4259 CType type;
4260 type.t = t;
4261 type.ref = 0;
4262 vsetc(&type, VT_CONST, &tokc);
4265 ST_FUNC void unary(void)
4267 int n, t, align, size, r, sizeof_caller;
4268 CType type;
4269 Sym *s;
4270 AttributeDef ad;
4272 sizeof_caller = in_sizeof;
4273 in_sizeof = 0;
4274 /* XXX: GCC 2.95.3 does not generate a table although it should be
4275 better here */
4276 tok_next:
4277 switch(tok) {
4278 case TOK_EXTENSION:
4279 next();
4280 goto tok_next;
4281 case TOK_CINT:
4282 case TOK_CCHAR:
4283 case TOK_LCHAR:
4284 vpushi(tokc.i);
4285 next();
4286 break;
4287 case TOK_CUINT:
4288 vpush_tokc(VT_INT | VT_UNSIGNED);
4289 next();
4290 break;
4291 case TOK_CLLONG:
4292 vpush_tokc(VT_LLONG);
4293 next();
4294 break;
4295 case TOK_CULLONG:
4296 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4297 next();
4298 break;
4299 case TOK_CFLOAT:
4300 vpush_tokc(VT_FLOAT);
4301 next();
4302 break;
4303 case TOK_CDOUBLE:
4304 vpush_tokc(VT_DOUBLE);
4305 next();
4306 break;
4307 case TOK_CLDOUBLE:
4308 vpush_tokc(VT_LDOUBLE);
4309 next();
4310 break;
4311 case TOK___FUNCTION__:
4312 if (!gnu_ext)
4313 goto tok_identifier;
4314 /* fall thru */
4315 case TOK___FUNC__:
4317 void *ptr;
4318 int len;
4319 /* special function name identifier */
4320 len = strlen(funcname) + 1;
4321 /* generate char[len] type */
4322 type.t = VT_BYTE;
4323 mk_pointer(&type);
4324 type.t |= VT_ARRAY;
4325 type.ref->c = len;
4326 vpush_ref(&type, data_section, data_section->data_offset, len);
4327 ptr = section_ptr_add(data_section, len);
4328 memcpy(ptr, funcname, len);
4329 next();
4331 break;
4332 case TOK_LSTR:
4333 #ifdef TCC_TARGET_PE
4334 t = VT_SHORT | VT_UNSIGNED;
4335 #else
4336 t = VT_INT;
4337 #endif
4338 goto str_init;
4339 case TOK_STR:
4340 /* string parsing */
4341 t = VT_BYTE;
4342 str_init:
4343 if (tcc_state->warn_write_strings)
4344 t |= VT_CONSTANT;
4345 type.t = t;
4346 mk_pointer(&type);
4347 type.t |= VT_ARRAY;
4348 memset(&ad, 0, sizeof(AttributeDef));
4349 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4350 break;
4351 case '(':
4352 next();
4353 /* cast ? */
4354 if (parse_btype(&type, &ad)) {
4355 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4356 skip(')');
4357 /* check ISOC99 compound literal */
4358 if (tok == '{') {
4359 /* data is allocated locally by default */
4360 if (global_expr)
4361 r = VT_CONST;
4362 else
4363 r = VT_LOCAL;
4364 /* all except arrays are lvalues */
4365 if (!(type.t & VT_ARRAY))
4366 r |= lvalue_type(type.t);
4367 memset(&ad, 0, sizeof(AttributeDef));
4368 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4369 } else {
4370 if (sizeof_caller) {
4371 vpush(&type);
4372 return;
4374 unary();
4375 gen_cast(&type);
4377 } else if (tok == '{') {
4378 int saved_nocode_wanted = nocode_wanted;
4379 if (const_wanted)
4380 tcc_error("expected constant");
4381 /* save all registers */
4382 save_regs(0);
4383 /* statement expression : we do not accept break/continue
4384 inside as GCC does. We do retain the nocode_wanted state,
4385 as statement expressions can't ever be entered from the
4386 outside, so any reactivation of code emission (from labels
4387 or loop heads) can be disabled again after the end of it. */
4388 block(NULL, NULL, 1);
4389 nocode_wanted = saved_nocode_wanted;
4390 skip(')');
4391 } else {
4392 gexpr();
4393 skip(')');
4395 break;
4396 case '*':
4397 next();
4398 unary();
4399 indir();
4400 break;
4401 case '&':
4402 next();
4403 unary();
4404 /* functions names must be treated as function pointers,
4405 except for unary '&' and sizeof. Since we consider that
4406 functions are not lvalues, we only have to handle it
4407 there and in function calls. */
4408 /* arrays can also be used although they are not lvalues */
4409 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4410 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4411 test_lvalue();
4412 mk_pointer(&vtop->type);
4413 gaddrof();
4414 break;
4415 case '!':
4416 next();
4417 unary();
4418 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4419 CType boolean;
4420 boolean.t = VT_BOOL;
4421 gen_cast(&boolean);
4422 vtop->c.i = !vtop->c.i;
4423 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4424 vtop->c.i ^= 1;
4425 else {
4426 save_regs(1);
4427 vseti(VT_JMP, gvtst(1, 0));
4429 break;
4430 case '~':
4431 next();
4432 unary();
4433 vpushi(-1);
4434 gen_op('^');
4435 break;
4436 case '+':
4437 next();
4438 unary();
4439 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4440 tcc_error("pointer not accepted for unary plus");
4441 /* In order to force cast, we add zero, except for floating point
4442 where we really need an noop (otherwise -0.0 will be transformed
4443 into +0.0). */
4444 if (!is_float(vtop->type.t)) {
4445 vpushi(0);
4446 gen_op('+');
4448 break;
4449 case TOK_SIZEOF:
4450 case TOK_ALIGNOF1:
4451 case TOK_ALIGNOF2:
4452 t = tok;
4453 next();
4454 in_sizeof++;
4455 unary_type(&type); // Perform a in_sizeof = 0;
4456 size = type_size(&type, &align);
4457 if (t == TOK_SIZEOF) {
4458 if (!(type.t & VT_VLA)) {
4459 if (size < 0)
4460 tcc_error("sizeof applied to an incomplete type");
4461 vpushs(size);
4462 } else {
4463 vla_runtime_type_size(&type, &align);
4465 } else {
4466 vpushs(align);
4468 vtop->type.t |= VT_UNSIGNED;
4469 break;
4471 case TOK_builtin_expect:
4473 /* __builtin_expect is a no-op for now */
4474 next();
4475 skip('(');
4476 expr_eq();
4477 skip(',');
4478 nocode_wanted++;
4479 expr_lor_const();
4480 vpop();
4481 nocode_wanted--;
4482 skip(')');
4484 break;
4485 case TOK_builtin_types_compatible_p:
4487 CType type1, type2;
4488 next();
4489 skip('(');
4490 parse_type(&type1);
4491 skip(',');
4492 parse_type(&type2);
4493 skip(')');
4494 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4495 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4496 vpushi(is_compatible_types(&type1, &type2));
4498 break;
4499 case TOK_builtin_choose_expr:
4501 int64_t c;
4502 next();
4503 skip('(');
4504 c = expr_const64();
4505 skip(',');
4506 if (!c) {
4507 nocode_wanted++;
4509 expr_eq();
4510 if (!c) {
4511 vpop();
4512 nocode_wanted--;
4514 skip(',');
4515 if (c) {
4516 nocode_wanted++;
4518 expr_eq();
4519 if (c) {
4520 vpop();
4521 nocode_wanted--;
4523 skip(')');
4525 break;
4526 case TOK_builtin_constant_p:
4528 int res;
4529 next();
4530 skip('(');
4531 nocode_wanted++;
4532 gexpr();
4533 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4534 vpop();
4535 nocode_wanted--;
4536 skip(')');
4537 vpushi(res);
4539 break;
4540 case TOK_builtin_frame_address:
4541 case TOK_builtin_return_address:
4543 int tok1 = tok;
4544 int level;
4545 CType type;
4546 next();
4547 skip('(');
4548 if (tok != TOK_CINT) {
4549 tcc_error("%s only takes positive integers",
4550 tok1 == TOK_builtin_return_address ?
4551 "__builtin_return_address" :
4552 "__builtin_frame_address");
4554 level = (uint32_t)tokc.i;
4555 next();
4556 skip(')');
4557 type.t = VT_VOID;
4558 mk_pointer(&type);
4559 vset(&type, VT_LOCAL, 0); /* local frame */
4560 while (level--) {
4561 mk_pointer(&vtop->type);
4562 indir(); /* -> parent frame */
4564 if (tok1 == TOK_builtin_return_address) {
4565 // assume return address is just above frame pointer on stack
4566 vpushi(PTR_SIZE);
4567 gen_op('+');
4568 mk_pointer(&vtop->type);
4569 indir();
4572 break;
4573 #ifdef TCC_TARGET_X86_64
4574 #ifdef TCC_TARGET_PE
4575 case TOK_builtin_va_start:
4577 next();
4578 skip('(');
4579 expr_eq();
4580 skip(',');
4581 expr_eq();
4582 skip(')');
4583 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4584 tcc_error("__builtin_va_start expects a local variable");
4585 vtop->r &= ~(VT_LVAL | VT_REF);
4586 vtop->type = char_pointer_type;
4587 vtop->c.i += 8;
4588 vstore();
4590 break;
4591 #else
4592 case TOK_builtin_va_arg_types:
4594 CType type;
4595 next();
4596 skip('(');
4597 parse_type(&type);
4598 skip(')');
4599 vpushi(classify_x86_64_va_arg(&type));
4601 break;
4602 #endif
4603 #endif
4605 #ifdef TCC_TARGET_ARM64
4606 case TOK___va_start: {
4607 next();
4608 skip('(');
4609 expr_eq();
4610 skip(',');
4611 expr_eq();
4612 skip(')');
4613 //xx check types
4614 gen_va_start();
4615 vpushi(0);
4616 vtop->type.t = VT_VOID;
4617 break;
4619 case TOK___va_arg: {
4620 CType type;
4621 next();
4622 skip('(');
4623 expr_eq();
4624 skip(',');
4625 parse_type(&type);
4626 skip(')');
4627 //xx check types
4628 gen_va_arg(&type);
4629 vtop->type = type;
4630 break;
4632 case TOK___arm64_clear_cache: {
4633 next();
4634 skip('(');
4635 expr_eq();
4636 skip(',');
4637 expr_eq();
4638 skip(')');
4639 gen_clear_cache();
4640 vpushi(0);
4641 vtop->type.t = VT_VOID;
4642 break;
4644 #endif
4645 /* pre operations */
4646 case TOK_INC:
4647 case TOK_DEC:
4648 t = tok;
4649 next();
4650 unary();
4651 inc(0, t);
4652 break;
4653 case '-':
4654 next();
4655 unary();
4656 t = vtop->type.t & VT_BTYPE;
4657 if (is_float(t)) {
4658 /* In IEEE negate(x) isn't subtract(0,x), but rather
4659 subtract(-0, x). */
4660 vpush(&vtop->type);
4661 if (t == VT_FLOAT)
4662 vtop->c.f = -0.0f;
4663 else if (t == VT_DOUBLE)
4664 vtop->c.d = -0.0;
4665 else
4666 vtop->c.ld = -0.0;
4667 } else
4668 vpushi(0);
4669 vswap();
4670 gen_op('-');
4671 break;
4672 case TOK_LAND:
4673 if (!gnu_ext)
4674 goto tok_identifier;
4675 next();
4676 /* allow to take the address of a label */
4677 if (tok < TOK_UIDENT)
4678 expect("label identifier");
4679 s = label_find(tok);
4680 if (!s) {
4681 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4682 } else {
4683 if (s->r == LABEL_DECLARED)
4684 s->r = LABEL_FORWARD;
4686 if (!s->type.t) {
4687 s->type.t = VT_VOID;
4688 mk_pointer(&s->type);
4689 s->type.t |= VT_STATIC;
4691 vpushsym(&s->type, s);
4692 next();
4693 break;
4695 // special qnan , snan and infinity values
4696 case TOK___NAN__:
4697 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4698 next();
4699 break;
4700 case TOK___SNAN__:
4701 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4702 next();
4703 break;
4704 case TOK___INF__:
4705 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4706 next();
4707 break;
4709 default:
4710 tok_identifier:
4711 t = tok;
4712 next();
4713 if (t < TOK_UIDENT)
4714 expect("identifier");
4715 s = sym_find(t);
4716 if (!s) {
4717 const char *name = get_tok_str(t, NULL);
4718 if (tok != '(')
4719 tcc_error("'%s' undeclared", name);
4720 /* for simple function calls, we tolerate undeclared
4721 external reference to int() function */
4722 if (tcc_state->warn_implicit_function_declaration
4723 #ifdef TCC_TARGET_PE
4724 /* people must be warned about using undeclared WINAPI functions
4725 (which usually start with uppercase letter) */
4726 || (name[0] >= 'A' && name[0] <= 'Z')
4727 #endif
4729 tcc_warning("implicit declaration of function '%s'", name);
4730 s = external_global_sym(t, &func_old_type, 0);
4732 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4733 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4734 /* if referencing an inline function, then we generate a
4735 symbol to it if not already done. It will have the
4736 effect to generate code for it at the end of the
4737 compilation unit. Inline function as always
4738 generated in the text section. */
4739 if (!s->c && !nocode_wanted)
4740 put_extern_sym(s, text_section, 0, 0);
4741 r = VT_SYM | VT_CONST;
4742 } else {
4743 r = s->r;
4744 /* A symbol that has a register is a local register variable,
4745 which starts out as VT_LOCAL value. */
4746 if ((r & VT_VALMASK) < VT_CONST)
4747 r = (r & ~VT_VALMASK) | VT_LOCAL;
4749 vset(&s->type, r, s->c);
4750 /* Point to s as backpointer (even without r&VT_SYM).
4751 Will be used by at least the x86 inline asm parser for
4752 regvars. */
4753 vtop->sym = s;
4754 if (vtop->r & VT_SYM) {
4755 vtop->c.i = 0;
4757 break;
4760 /* post operations */
4761 while (1) {
4762 if (tok == TOK_INC || tok == TOK_DEC) {
4763 inc(1, tok);
4764 next();
4765 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4766 int qualifiers;
4767 /* field */
4768 if (tok == TOK_ARROW)
4769 indir();
4770 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4771 test_lvalue();
4772 gaddrof();
4773 /* expect pointer on structure */
4774 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4775 expect("struct or union");
4776 if (tok == TOK_CDOUBLE)
4777 expect("field name");
4778 next();
4779 if (tok == TOK_CINT || tok == TOK_CUINT)
4780 expect("field name");
4781 s = find_field(&vtop->type, tok);
4782 if (!s)
4783 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4784 /* add field offset to pointer */
4785 vtop->type = char_pointer_type; /* change type to 'char *' */
4786 vpushi(s->c);
4787 gen_op('+');
4788 /* change type to field type, and set to lvalue */
4789 vtop->type = s->type;
4790 vtop->type.t |= qualifiers;
4791 /* an array is never an lvalue */
4792 if (!(vtop->type.t & VT_ARRAY)) {
4793 vtop->r |= lvalue_type(vtop->type.t);
4794 #ifdef CONFIG_TCC_BCHECK
4795 /* if bound checking, the referenced pointer must be checked */
4796 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4797 vtop->r |= VT_MUSTBOUND;
4798 #endif
4800 next();
4801 } else if (tok == '[') {
4802 next();
4803 gexpr();
4804 gen_op('+');
4805 indir();
4806 skip(']');
4807 } else if (tok == '(') {
4808 SValue ret;
4809 Sym *sa;
4810 int nb_args, ret_nregs, ret_align, regsize, variadic;
4812 /* function call */
4813 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4814 /* pointer test (no array accepted) */
4815 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4816 vtop->type = *pointed_type(&vtop->type);
4817 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4818 goto error_func;
4819 } else {
4820 error_func:
4821 expect("function pointer");
4823 } else {
4824 vtop->r &= ~VT_LVAL; /* no lvalue */
4826 /* get return type */
4827 s = vtop->type.ref;
4828 next();
4829 sa = s->next; /* first parameter */
4830 nb_args = 0;
4831 ret.r2 = VT_CONST;
4832 /* compute first implicit argument if a structure is returned */
4833 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4834 variadic = (s->c == FUNC_ELLIPSIS);
4835 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4836 &ret_align, &regsize);
4837 if (!ret_nregs) {
4838 /* get some space for the returned structure */
4839 size = type_size(&s->type, &align);
4840 #ifdef TCC_TARGET_ARM64
4841 /* On arm64, a small struct is return in registers.
4842 It is much easier to write it to memory if we know
4843 that we are allowed to write some extra bytes, so
4844 round the allocated space up to a power of 2: */
4845 if (size < 16)
4846 while (size & (size - 1))
4847 size = (size | (size - 1)) + 1;
4848 #endif
4849 loc = (loc - size) & -align;
4850 ret.type = s->type;
4851 ret.r = VT_LOCAL | VT_LVAL;
4852 /* pass it as 'int' to avoid structure arg passing
4853 problems */
4854 vseti(VT_LOCAL, loc);
4855 ret.c = vtop->c;
4856 nb_args++;
4858 } else {
4859 ret_nregs = 1;
4860 ret.type = s->type;
4863 if (ret_nregs) {
4864 /* return in register */
4865 if (is_float(ret.type.t)) {
4866 ret.r = reg_fret(ret.type.t);
4867 #ifdef TCC_TARGET_X86_64
4868 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4869 ret.r2 = REG_QRET;
4870 #endif
4871 } else {
4872 #ifndef TCC_TARGET_ARM64
4873 #ifdef TCC_TARGET_X86_64
4874 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4875 #else
4876 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4877 #endif
4878 ret.r2 = REG_LRET;
4879 #endif
4880 ret.r = REG_IRET;
4882 ret.c.i = 0;
4884 if (tok != ')') {
4885 for(;;) {
4886 expr_eq();
4887 gfunc_param_typed(s, sa);
4888 nb_args++;
4889 if (sa)
4890 sa = sa->next;
4891 if (tok == ')')
4892 break;
4893 skip(',');
4896 if (sa)
4897 tcc_error("too few arguments to function");
4898 skip(')');
4899 gfunc_call(nb_args);
4901 /* return value */
4902 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4903 vsetc(&ret.type, r, &ret.c);
4904 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4907 /* handle packed struct return */
4908 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4909 int addr, offset;
4911 size = type_size(&s->type, &align);
4912 /* We're writing whole regs often, make sure there's enough
4913 space. Assume register size is power of 2. */
4914 if (regsize > align)
4915 align = regsize;
4916 loc = (loc - size) & -align;
4917 addr = loc;
4918 offset = 0;
4919 for (;;) {
4920 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4921 vswap();
4922 vstore();
4923 vtop--;
4924 if (--ret_nregs == 0)
4925 break;
4926 offset += regsize;
4928 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4930 } else {
4931 break;
4936 ST_FUNC void expr_prod(void)
4938 int t;
4940 unary();
4941 while (tok == '*' || tok == '/' || tok == '%') {
4942 t = tok;
4943 next();
4944 unary();
4945 gen_op(t);
4949 ST_FUNC void expr_sum(void)
4951 int t;
4953 expr_prod();
4954 while (tok == '+' || tok == '-') {
4955 t = tok;
4956 next();
4957 expr_prod();
4958 gen_op(t);
4962 static void expr_shift(void)
4964 int t;
4966 expr_sum();
4967 while (tok == TOK_SHL || tok == TOK_SAR) {
4968 t = tok;
4969 next();
4970 expr_sum();
4971 gen_op(t);
4975 static void expr_cmp(void)
4977 int t;
4979 expr_shift();
4980 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4981 tok == TOK_ULT || tok == TOK_UGE) {
4982 t = tok;
4983 next();
4984 expr_shift();
4985 gen_op(t);
4989 static void expr_cmpeq(void)
4991 int t;
4993 expr_cmp();
4994 while (tok == TOK_EQ || tok == TOK_NE) {
4995 t = tok;
4996 next();
4997 expr_cmp();
4998 gen_op(t);
5002 static void expr_and(void)
5004 expr_cmpeq();
5005 while (tok == '&') {
5006 next();
5007 expr_cmpeq();
5008 gen_op('&');
5012 static void expr_xor(void)
5014 expr_and();
5015 while (tok == '^') {
5016 next();
5017 expr_and();
5018 gen_op('^');
5022 static void expr_or(void)
5024 expr_xor();
5025 while (tok == '|') {
5026 next();
5027 expr_xor();
5028 gen_op('|');
5032 /* XXX: fix this mess */
5033 static void expr_land_const(void)
5035 expr_or();
5036 while (tok == TOK_LAND) {
5037 next();
5038 expr_or();
5039 gen_op(TOK_LAND);
5042 static void expr_lor_const(void)
5044 expr_land_const();
5045 while (tok == TOK_LOR) {
5046 next();
5047 expr_land_const();
5048 gen_op(TOK_LOR);
5052 static void expr_land(void)
5054 expr_or();
5055 if (tok == TOK_LAND) {
5056 int t = 0;
5057 for(;;) {
5058 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5059 CType ctb;
5060 ctb.t = VT_BOOL;
5061 gen_cast(&ctb);
5062 if (vtop->c.i) {
5063 vpop();
5064 } else {
5065 nocode_wanted++;
5066 while (tok == TOK_LAND) {
5067 next();
5068 expr_or();
5069 vpop();
5071 nocode_wanted--;
5072 if (t)
5073 gsym(t);
5074 gen_cast(&int_type);
5075 break;
5077 } else {
5078 if (!t)
5079 save_regs(1);
5080 t = gvtst(1, t);
5082 if (tok != TOK_LAND) {
5083 if (t)
5084 vseti(VT_JMPI, t);
5085 else
5086 vpushi(1);
5087 break;
5089 next();
5090 expr_or();
5095 static void expr_lor(void)
5097 expr_land();
5098 if (tok == TOK_LOR) {
5099 int t = 0;
5100 for(;;) {
5101 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5102 CType ctb;
5103 ctb.t = VT_BOOL;
5104 gen_cast(&ctb);
5105 if (!vtop->c.i) {
5106 vpop();
5107 } else {
5108 nocode_wanted++;
5109 while (tok == TOK_LOR) {
5110 next();
5111 expr_land();
5112 vpop();
5114 nocode_wanted--;
5115 if (t)
5116 gsym(t);
5117 gen_cast(&int_type);
5118 break;
5120 } else {
5121 if (!t)
5122 save_regs(1);
5123 t = gvtst(0, t);
5125 if (tok != TOK_LOR) {
5126 if (t)
5127 vseti(VT_JMP, t);
5128 else
5129 vpushi(0);
5130 break;
5132 next();
5133 expr_land();
5138 /* Assuming vtop is a value used in a conditional context
5139 (i.e. compared with zero) return 0 if it's false, 1 if
5140 true and -1 if it can't be statically determined. */
5141 static int condition_3way(void)
5143 int c = -1;
5144 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5145 (!(vtop->r & VT_SYM) ||
5146 !(vtop->sym->type.t & VT_WEAK))) {
5147 CType boolean;
5148 boolean.t = VT_BOOL;
5149 vdup();
5150 gen_cast(&boolean);
5151 c = vtop->c.i;
5152 vpop();
5154 return c;
5157 static void expr_cond(void)
5159 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5160 SValue sv;
5161 CType type, type1, type2;
5163 expr_lor();
5164 if (tok == '?') {
5165 next();
5166 c = condition_3way();
5167 g = (tok == ':' && gnu_ext);
5168 if (c < 0) {
5169 /* needed to avoid having different registers saved in
5170 each branch */
5171 if (is_float(vtop->type.t)) {
5172 rc = RC_FLOAT;
5173 #ifdef TCC_TARGET_X86_64
5174 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5175 rc = RC_ST0;
5177 #endif
5178 } else
5179 rc = RC_INT;
5180 gv(rc);
5181 save_regs(1);
5182 if (g)
5183 gv_dup();
5184 tt = gvtst(1, 0);
5186 } else {
5187 if (!g)
5188 vpop();
5189 tt = 0;
5192 if (1) {
5193 if (c == 0)
5194 nocode_wanted++;
5195 if (!g)
5196 gexpr();
5198 type1 = vtop->type;
5199 sv = *vtop; /* save value to handle it later */
5200 vtop--; /* no vpop so that FP stack is not flushed */
5201 skip(':');
5203 u = 0;
5204 if (c < 0)
5205 u = gjmp(0);
5206 gsym(tt);
5208 if (c == 0)
5209 nocode_wanted--;
5210 if (c == 1)
5211 nocode_wanted++;
5212 expr_cond();
5213 if (c == 1)
5214 nocode_wanted--;
5216 type2 = vtop->type;
5217 t1 = type1.t;
5218 bt1 = t1 & VT_BTYPE;
5219 t2 = type2.t;
5220 bt2 = t2 & VT_BTYPE;
5221 /* cast operands to correct type according to ISOC rules */
5222 if (is_float(bt1) || is_float(bt2)) {
5223 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5224 type.t = VT_LDOUBLE;
5226 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5227 type.t = VT_DOUBLE;
5228 } else {
5229 type.t = VT_FLOAT;
5231 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5232 /* cast to biggest op */
5233 type.t = VT_LLONG;
5234 /* convert to unsigned if it does not fit in a long long */
5235 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5236 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5237 type.t |= VT_UNSIGNED;
5238 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5239 /* If one is a null ptr constant the result type
5240 is the other. */
5241 if (is_null_pointer (vtop))
5242 type = type1;
5243 else if (is_null_pointer (&sv))
5244 type = type2;
5245 /* XXX: test pointer compatibility, C99 has more elaborate
5246 rules here. */
5247 else
5248 type = type1;
5249 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5250 /* XXX: test function pointer compatibility */
5251 type = bt1 == VT_FUNC ? type1 : type2;
5252 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5253 /* XXX: test structure compatibility */
5254 type = bt1 == VT_STRUCT ? type1 : type2;
5255 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5256 /* NOTE: as an extension, we accept void on only one side */
5257 type.t = VT_VOID;
5258 } else {
5259 /* integer operations */
5260 type.t = VT_INT;
5261 /* convert to unsigned if it does not fit in an integer */
5262 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5263 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5264 type.t |= VT_UNSIGNED;
5266 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5267 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5268 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5269 islv &= c < 0;
5271 /* now we convert second operand */
5272 if (c != 1) {
5273 gen_cast(&type);
5274 if (islv) {
5275 mk_pointer(&vtop->type);
5276 gaddrof();
5277 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5278 gaddrof();
5281 rc = RC_INT;
5282 if (is_float(type.t)) {
5283 rc = RC_FLOAT;
5284 #ifdef TCC_TARGET_X86_64
5285 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5286 rc = RC_ST0;
5288 #endif
5289 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5290 /* for long longs, we use fixed registers to avoid having
5291 to handle a complicated move */
5292 rc = RC_IRET;
5295 tt = r2 = 0;
5296 if (c < 0) {
5297 r2 = gv(rc);
5298 tt = gjmp(0);
5300 gsym(u);
5302 /* this is horrible, but we must also convert first
5303 operand */
5304 if (c != 0) {
5305 *vtop = sv;
5306 gen_cast(&type);
5307 if (islv) {
5308 mk_pointer(&vtop->type);
5309 gaddrof();
5310 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5311 gaddrof();
5314 if (c < 0) {
5315 r1 = gv(rc);
5316 move_reg(r2, r1, type.t);
5317 vtop->r = r2;
5318 gsym(tt);
5319 if (islv)
5320 indir();
5326 static void expr_eq(void)
5328 int t;
5330 expr_cond();
5331 if (tok == '=' ||
5332 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5333 tok == TOK_A_XOR || tok == TOK_A_OR ||
5334 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5335 test_lvalue();
5336 t = tok;
5337 next();
5338 if (t == '=') {
5339 expr_eq();
5340 } else {
5341 vdup();
5342 expr_eq();
5343 gen_op(t & 0x7f);
5345 vstore();
5349 ST_FUNC void gexpr(void)
5351 while (1) {
5352 expr_eq();
5353 if (tok != ',')
5354 break;
5355 vpop();
5356 next();
5360 /* parse an expression and return its type without any side effect. */
5361 static void expr_type(CType *type)
5364 nocode_wanted++;
5365 gexpr();
5366 *type = vtop->type;
5367 vpop();
5368 nocode_wanted--;
5371 /* parse a unary expression and return its type without any side
5372 effect. */
5373 static void unary_type(CType *type)
5375 nocode_wanted++;
5376 unary();
5377 *type = vtop->type;
5378 vpop();
5379 nocode_wanted--;
5382 /* parse a constant expression and return value in vtop. */
5383 static void expr_const1(void)
5385 const_wanted++;
5386 expr_cond();
5387 const_wanted--;
5390 /* parse an integer constant and return its value. */
5391 static inline int64_t expr_const64(void)
5393 int64_t c;
5394 expr_const1();
5395 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5396 expect("constant expression");
5397 c = vtop->c.i;
5398 vpop();
5399 return c;
5402 /* parse an integer constant and return its value.
5403 Complain if it doesn't fit 32bit (signed or unsigned). */
5404 ST_FUNC int expr_const(void)
5406 int c;
5407 int64_t wc = expr_const64();
5408 c = wc;
5409 if (c != wc && (unsigned)c != wc)
5410 tcc_error("constant exceeds 32 bit");
5411 return c;
5414 /* return the label token if current token is a label, otherwise
5415 return zero */
5416 static int is_label(void)
5418 int last_tok;
5420 /* fast test first */
5421 if (tok < TOK_UIDENT)
5422 return 0;
5423 /* no need to save tokc because tok is an identifier */
5424 last_tok = tok;
5425 next();
5426 if (tok == ':') {
5427 next();
5428 return last_tok;
5429 } else {
5430 unget_tok(last_tok);
5431 return 0;
5435 static void label_or_decl(int l)
5437 int last_tok;
5439 /* fast test first */
5440 if (tok >= TOK_UIDENT)
5442 /* no need to save tokc because tok is an identifier */
5443 last_tok = tok;
5444 next();
5445 if (tok == ':') {
5446 unget_tok(last_tok);
5447 return;
5449 unget_tok(last_tok);
5451 decl(l);
5454 static int case_cmp(const void *pa, const void *pb)
5456 int64_t a = (*(struct case_t**) pa)->v1;
5457 int64_t b = (*(struct case_t**) pb)->v1;
5458 return a < b ? -1 : a > b;
5461 static void gcase(struct case_t **base, int len, int *bsym)
5463 struct case_t *p;
5464 int e;
5465 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5466 gv(RC_INT);
5467 while (len > 4) {
5468 /* binary search */
5469 p = base[len/2];
5470 vdup();
5471 if (ll)
5472 vpushll(p->v2);
5473 else
5474 vpushi(p->v2);
5475 gen_op(TOK_LE);
5476 e = gtst(1, 0);
5477 vdup();
5478 if (ll)
5479 vpushll(p->v1);
5480 else
5481 vpushi(p->v1);
5482 gen_op(TOK_GE);
5483 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5484 /* x < v1 */
5485 gcase(base, len/2, bsym);
5486 if (cur_switch->def_sym)
5487 gjmp_addr(cur_switch->def_sym);
5488 else
5489 *bsym = gjmp(*bsym);
5490 /* x > v2 */
5491 gsym(e);
5492 e = len/2 + 1;
5493 base += e; len -= e;
5495 /* linear scan */
5496 while (len--) {
5497 p = *base++;
5498 vdup();
5499 if (ll)
5500 vpushll(p->v2);
5501 else
5502 vpushi(p->v2);
5503 if (p->v1 == p->v2) {
5504 gen_op(TOK_EQ);
5505 gtst_addr(0, p->sym);
5506 } else {
5507 gen_op(TOK_LE);
5508 e = gtst(1, 0);
5509 vdup();
5510 if (ll)
5511 vpushll(p->v1);
5512 else
5513 vpushi(p->v1);
5514 gen_op(TOK_GE);
5515 gtst_addr(0, p->sym);
5516 gsym(e);
5521 static void block(int *bsym, int *csym, int is_expr)
5523 int a, b, c, d, cond;
5524 Sym *s;
5526 /* generate line number info */
5527 if (tcc_state->do_debug &&
5528 (last_line_num != file->line_num || last_ind != ind)) {
5529 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5530 last_ind = ind;
5531 last_line_num = file->line_num;
5534 if (is_expr) {
5535 /* default return value is (void) */
5536 vpushi(0);
5537 vtop->type.t = VT_VOID;
5540 if (tok == TOK_IF) {
5541 /* if test */
5542 int saved_nocode_wanted = nocode_wanted;
5543 next();
5544 skip('(');
5545 gexpr();
5546 skip(')');
5547 cond = condition_3way();
5548 if (cond == 1)
5549 a = 0, vpop();
5550 else
5551 a = gvtst(1, 0);
5552 if (cond == 0)
5553 nocode_wanted |= 0x20000000;
5554 block(bsym, csym, 0);
5555 if (cond != 1)
5556 nocode_wanted = saved_nocode_wanted;
5557 c = tok;
5558 if (c == TOK_ELSE) {
5559 next();
5560 d = gjmp(0);
5561 gsym(a);
5562 if (cond == 1)
5563 nocode_wanted |= 0x20000000;
5564 block(bsym, csym, 0);
5565 gsym(d); /* patch else jmp */
5566 if (cond != 0)
5567 nocode_wanted = saved_nocode_wanted;
5568 } else
5569 gsym(a);
5570 } else if (tok == TOK_WHILE) {
5571 int saved_nocode_wanted;
5572 nocode_wanted &= ~0x20000000;
5573 next();
5574 d = ind;
5575 vla_sp_restore();
5576 skip('(');
5577 gexpr();
5578 skip(')');
5579 a = gvtst(1, 0);
5580 b = 0;
5581 ++local_scope;
5582 saved_nocode_wanted = nocode_wanted;
5583 block(&a, &b, 0);
5584 nocode_wanted = saved_nocode_wanted;
5585 --local_scope;
5586 gjmp_addr(d);
5587 gsym(a);
5588 gsym_addr(b, d);
5589 } else if (tok == '{') {
5590 Sym *llabel;
5591 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5593 next();
5594 /* record local declaration stack position */
5595 s = local_stack;
5596 llabel = local_label_stack;
5597 ++local_scope;
5599 /* handle local labels declarations */
5600 if (tok == TOK_LABEL) {
5601 next();
5602 for(;;) {
5603 if (tok < TOK_UIDENT)
5604 expect("label identifier");
5605 label_push(&local_label_stack, tok, LABEL_DECLARED);
5606 next();
5607 if (tok == ',') {
5608 next();
5609 } else {
5610 skip(';');
5611 break;
5615 while (tok != '}') {
5616 label_or_decl(VT_LOCAL);
5617 if (tok != '}') {
5618 if (is_expr)
5619 vpop();
5620 block(bsym, csym, is_expr);
5623 /* pop locally defined labels */
5624 label_pop(&local_label_stack, llabel);
5625 /* pop locally defined symbols */
5626 --local_scope;
5627 /* In the is_expr case (a statement expression is finished here),
5628 vtop might refer to symbols on the local_stack. Either via the
5629 type or via vtop->sym. We can't pop those nor any that in turn
5630 might be referred to. To make it easier we don't roll back
5631 any symbols in that case; some upper level call to block() will
5632 do that. We do have to remove such symbols from the lookup
5633 tables, though. sym_pop will do that. */
5634 sym_pop(&local_stack, s, is_expr);
5636 /* Pop VLA frames and restore stack pointer if required */
5637 if (vlas_in_scope > saved_vlas_in_scope) {
5638 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5639 vla_sp_restore();
5641 vlas_in_scope = saved_vlas_in_scope;
5643 next();
5644 } else if (tok == TOK_RETURN) {
5645 next();
5646 if (tok != ';') {
5647 gexpr();
5648 gen_assign_cast(&func_vt);
5649 #ifdef TCC_TARGET_ARM64
5650 // Perhaps it would be better to use this for all backends:
5651 greturn();
5652 #else
5653 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5654 CType type, ret_type;
5655 int ret_align, ret_nregs, regsize;
5656 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5657 &ret_align, &regsize);
5658 if (0 == ret_nregs) {
5659 /* if returning structure, must copy it to implicit
5660 first pointer arg location */
5661 type = func_vt;
5662 mk_pointer(&type);
5663 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5664 indir();
5665 vswap();
5666 /* copy structure value to pointer */
5667 vstore();
5668 } else {
5669 /* returning structure packed into registers */
5670 int r, size, addr, align;
5671 size = type_size(&func_vt,&align);
5672 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5673 (vtop->c.i & (ret_align-1)))
5674 && (align & (ret_align-1))) {
5675 loc = (loc - size) & -ret_align;
5676 addr = loc;
5677 type = func_vt;
5678 vset(&type, VT_LOCAL | VT_LVAL, addr);
5679 vswap();
5680 vstore();
5681 vpop();
5682 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5684 vtop->type = ret_type;
5685 if (is_float(ret_type.t))
5686 r = rc_fret(ret_type.t);
5687 else
5688 r = RC_IRET;
5690 if (ret_nregs == 1)
5691 gv(r);
5692 else {
5693 for (;;) {
5694 vdup();
5695 gv(r);
5696 vpop();
5697 if (--ret_nregs == 0)
5698 break;
5699 /* We assume that when a structure is returned in multiple
5700 registers, their classes are consecutive values of the
5701 suite s(n) = 2^n */
5702 r <<= 1;
5703 vtop->c.i += regsize;
5707 } else if (is_float(func_vt.t)) {
5708 gv(rc_fret(func_vt.t));
5709 } else {
5710 gv(RC_IRET);
5712 #endif
5713 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5715 skip(';');
5716 /* jump unless last stmt in top-level block */
5717 if (tok != '}' || local_scope != 1)
5718 rsym = gjmp(rsym);
5719 nocode_wanted |= 0x20000000;
5720 } else if (tok == TOK_BREAK) {
5721 /* compute jump */
5722 if (!bsym)
5723 tcc_error("cannot break");
5724 *bsym = gjmp(*bsym);
5725 next();
5726 skip(';');
5727 nocode_wanted |= 0x20000000;
5728 } else if (tok == TOK_CONTINUE) {
5729 /* compute jump */
5730 if (!csym)
5731 tcc_error("cannot continue");
5732 vla_sp_restore_root();
5733 *csym = gjmp(*csym);
5734 next();
5735 skip(';');
5736 } else if (tok == TOK_FOR) {
5737 int e;
5738 int saved_nocode_wanted;
5739 nocode_wanted &= ~0x20000000;
5740 next();
5741 skip('(');
5742 s = local_stack;
5743 ++local_scope;
5744 if (tok != ';') {
5745 /* c99 for-loop init decl? */
5746 if (!decl0(VT_LOCAL, 1)) {
5747 /* no, regular for-loop init expr */
5748 gexpr();
5749 vpop();
5752 skip(';');
5753 d = ind;
5754 c = ind;
5755 vla_sp_restore();
5756 a = 0;
5757 b = 0;
5758 if (tok != ';') {
5759 gexpr();
5760 a = gvtst(1, 0);
5762 skip(';');
5763 if (tok != ')') {
5764 e = gjmp(0);
5765 c = ind;
5766 vla_sp_restore();
5767 gexpr();
5768 vpop();
5769 gjmp_addr(d);
5770 gsym(e);
5772 skip(')');
5773 saved_nocode_wanted = nocode_wanted;
5774 block(&a, &b, 0);
5775 nocode_wanted = saved_nocode_wanted;
5776 gjmp_addr(c);
5777 gsym(a);
5778 gsym_addr(b, c);
5779 --local_scope;
5780 sym_pop(&local_stack, s, 0);
5782 } else
5783 if (tok == TOK_DO) {
5784 int saved_nocode_wanted;
5785 nocode_wanted &= ~0x20000000;
5786 next();
5787 a = 0;
5788 b = 0;
5789 d = ind;
5790 vla_sp_restore();
5791 saved_nocode_wanted = nocode_wanted;
5792 block(&a, &b, 0);
5793 skip(TOK_WHILE);
5794 skip('(');
5795 gsym(b);
5796 gexpr();
5797 c = gvtst(0, 0);
5798 gsym_addr(c, d);
5799 nocode_wanted = saved_nocode_wanted;
5800 skip(')');
5801 gsym(a);
5802 skip(';');
5803 } else
5804 if (tok == TOK_SWITCH) {
5805 struct switch_t *saved, sw;
5806 int saved_nocode_wanted = nocode_wanted;
5807 SValue switchval;
5808 next();
5809 skip('(');
5810 gexpr();
5811 skip(')');
5812 switchval = *vtop--;
5813 a = 0;
5814 b = gjmp(0); /* jump to first case */
5815 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5816 saved = cur_switch;
5817 cur_switch = &sw;
5818 block(&a, csym, 0);
5819 nocode_wanted = saved_nocode_wanted;
5820 a = gjmp(a); /* add implicit break */
5821 /* case lookup */
5822 gsym(b);
5823 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5824 for (b = 1; b < sw.n; b++)
5825 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5826 tcc_error("duplicate case value");
5827 /* Our switch table sorting is signed, so the compared
5828 value needs to be as well when it's 64bit. */
5829 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5830 switchval.type.t &= ~VT_UNSIGNED;
5831 vpushv(&switchval);
5832 gcase(sw.p, sw.n, &a);
5833 vpop();
5834 if (sw.def_sym)
5835 gjmp_addr(sw.def_sym);
5836 dynarray_reset(&sw.p, &sw.n);
5837 cur_switch = saved;
5838 /* break label */
5839 gsym(a);
5840 } else
5841 if (tok == TOK_CASE) {
5842 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5843 if (!cur_switch)
5844 expect("switch");
5845 nocode_wanted &= ~0x20000000;
5846 next();
5847 cr->v1 = cr->v2 = expr_const64();
5848 if (gnu_ext && tok == TOK_DOTS) {
5849 next();
5850 cr->v2 = expr_const64();
5851 if (cr->v2 < cr->v1)
5852 tcc_warning("empty case range");
5854 cr->sym = ind;
5855 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5856 skip(':');
5857 is_expr = 0;
5858 goto block_after_label;
5859 } else
5860 if (tok == TOK_DEFAULT) {
5861 next();
5862 skip(':');
5863 if (!cur_switch)
5864 expect("switch");
5865 if (cur_switch->def_sym)
5866 tcc_error("too many 'default'");
5867 cur_switch->def_sym = ind;
5868 is_expr = 0;
5869 goto block_after_label;
5870 } else
5871 if (tok == TOK_GOTO) {
5872 next();
5873 if (tok == '*' && gnu_ext) {
5874 /* computed goto */
5875 next();
5876 gexpr();
5877 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5878 expect("pointer");
5879 ggoto();
5880 } else if (tok >= TOK_UIDENT) {
5881 s = label_find(tok);
5882 /* put forward definition if needed */
5883 if (!s) {
5884 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5885 } else {
5886 if (s->r == LABEL_DECLARED)
5887 s->r = LABEL_FORWARD;
5889 vla_sp_restore_root();
5890 if (s->r & LABEL_FORWARD)
5891 s->jnext = gjmp(s->jnext);
5892 else
5893 gjmp_addr(s->jnext);
5894 next();
5895 } else {
5896 expect("label identifier");
5898 skip(';');
5899 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5900 asm_instr();
5901 } else {
5902 b = is_label();
5903 if (b) {
5904 /* label case */
5905 s = label_find(b);
5906 if (s) {
5907 if (s->r == LABEL_DEFINED)
5908 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5909 gsym(s->jnext);
5910 s->r = LABEL_DEFINED;
5911 } else {
5912 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5914 s->jnext = ind;
5915 vla_sp_restore();
5916 /* we accept this, but it is a mistake */
5917 block_after_label:
5918 nocode_wanted &= ~0x20000000;
5919 if (tok == '}') {
5920 tcc_warning("deprecated use of label at end of compound statement");
5921 } else {
5922 if (is_expr)
5923 vpop();
5924 block(bsym, csym, is_expr);
5926 } else {
5927 /* expression case */
5928 if (tok != ';') {
5929 if (is_expr) {
5930 vpop();
5931 gexpr();
5932 } else {
5933 gexpr();
5934 vpop();
5937 skip(';');
5942 #define EXPR_CONST 1
5943 #define EXPR_ANY 2
5945 static void parse_init_elem(int expr_type)
5947 int saved_global_expr;
5948 switch(expr_type) {
5949 case EXPR_CONST:
5950 /* compound literals must be allocated globally in this case */
5951 saved_global_expr = global_expr;
5952 global_expr = 1;
5953 expr_const1();
5954 global_expr = saved_global_expr;
5955 /* NOTE: symbols are accepted */
5956 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5957 tcc_error("initializer element is not constant");
5958 break;
5959 case EXPR_ANY:
5960 expr_eq();
5961 break;
5965 /* t is the array or struct type. c is the array or struct
5966 address. cur_field is the pointer to the current
5967 value, for arrays the 'c' member contains the current start
5968 index and the 'r' contains the end index (in case of range init).
5969 'size_only' is true if only size info is needed (only used
5970 in arrays) */
5971 static void decl_designator(CType *type, Section *sec, unsigned long c,
5972 Sym **cur_field, int size_only)
5974 Sym *s, *f;
5975 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5976 CType type1;
5978 notfirst = 0;
5979 elem_size = 0;
5980 nb_elems = 1;
5981 if (gnu_ext && (l = is_label()) != 0)
5982 goto struct_field;
5983 while (tok == '[' || tok == '.') {
5984 if (tok == '[') {
5985 if (!(type->t & VT_ARRAY))
5986 expect("array type");
5987 s = type->ref;
5988 next();
5989 index = expr_const();
5990 if (index < 0 || (s->c >= 0 && index >= s->c))
5991 tcc_error("invalid index");
5992 if (tok == TOK_DOTS && gnu_ext) {
5993 next();
5994 index_last = expr_const();
5995 if (index_last < 0 ||
5996 (s->c >= 0 && index_last >= s->c) ||
5997 index_last < index)
5998 tcc_error("invalid index");
5999 } else {
6000 index_last = index;
6002 skip(']');
6003 if (!notfirst) {
6004 (*cur_field)->c = index;
6005 (*cur_field)->r = index_last;
6007 type = pointed_type(type);
6008 elem_size = type_size(type, &align);
6009 c += index * elem_size;
6010 /* NOTE: we only support ranges for last designator */
6011 nb_elems = index_last - index + 1;
6012 if (nb_elems != 1) {
6013 notfirst = 1;
6014 break;
6016 } else {
6017 next();
6018 l = tok;
6019 next();
6020 struct_field:
6021 if ((type->t & VT_BTYPE) != VT_STRUCT)
6022 expect("struct/union type");
6023 f = find_field(type, l);
6024 if (!f)
6025 expect("field");
6026 if (!notfirst)
6027 *cur_field = f;
6028 /* XXX: fix this mess by using explicit storage field */
6029 type1 = f->type;
6030 type1.t |= (type->t & ~VT_TYPE);
6031 type = &type1;
6032 c += f->c;
6034 notfirst = 1;
6036 if (notfirst) {
6037 if (tok == '=') {
6038 next();
6039 } else {
6040 if (!gnu_ext)
6041 expect("=");
6043 } else {
6044 if (type->t & VT_ARRAY) {
6045 index = (*cur_field)->c;
6046 if (type->ref->c >= 0 && index >= type->ref->c)
6047 tcc_error("index too large");
6048 type = pointed_type(type);
6049 c += index * type_size(type, &align);
6050 } else {
6051 f = *cur_field;
6052 if (!f)
6053 tcc_error("too many field init");
6054 /* XXX: fix this mess by using explicit storage field */
6055 type1 = f->type;
6056 type1.t |= (type->t & ~VT_TYPE);
6057 type = &type1;
6058 c += f->c;
6061 decl_initializer(type, sec, c, 0, size_only);
6063 /* XXX: make it more general */
6064 if (!size_only && nb_elems > 1) {
6065 unsigned long c_end;
6066 uint8_t *src, *dst;
6067 int i;
6069 if (!sec) {
6070 vset(type, VT_LOCAL|VT_LVAL, c);
6071 for (i = 1; i < nb_elems; i++) {
6072 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6073 vswap();
6074 vstore();
6076 vpop();
6077 } else {
6078 c_end = c + nb_elems * elem_size;
6079 if (c_end > sec->data_allocated)
6080 section_realloc(sec, c_end);
6081 src = sec->data + c;
6082 dst = src;
6083 for(i = 1; i < nb_elems; i++) {
6084 dst += elem_size;
6085 memcpy(dst, src, elem_size);
6091 /* store a value or an expression directly in global data or in local array */
6092 static void init_putv(CType *type, Section *sec, unsigned long c)
6094 int bt, bit_pos, bit_size;
6095 void *ptr;
6096 unsigned long long bit_mask;
6097 CType dtype;
6099 dtype = *type;
6100 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6102 if (sec) {
6103 int size, align;
6104 /* XXX: not portable */
6105 /* XXX: generate error if incorrect relocation */
6106 gen_assign_cast(&dtype);
6107 bt = type->t & VT_BTYPE;
6108 size = type_size(type, &align);
6109 if (c + size > sec->data_allocated) {
6110 section_realloc(sec, c + size);
6112 ptr = sec->data + c;
6113 /* XXX: make code faster ? */
6114 if (!(type->t & VT_BITFIELD)) {
6115 bit_pos = 0;
6116 bit_size = PTR_SIZE * 8;
6117 bit_mask = -1LL;
6118 } else {
6119 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6120 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6121 bit_mask = (1LL << bit_size) - 1;
6123 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6124 vtop->sym->v >= SYM_FIRST_ANOM &&
6125 /* XXX This rejects compount literals like
6126 '(void *){ptr}'. The problem is that '&sym' is
6127 represented the same way, which would be ruled out
6128 by the SYM_FIRST_ANOM check above, but also '"string"'
6129 in 'char *p = "string"' is represented the same
6130 with the type being VT_PTR and the symbol being an
6131 anonymous one. That is, there's no difference in vtop
6132 between '(void *){x}' and '&(void *){x}'. Ignore
6133 pointer typed entities here. Hopefully no real code
6134 will every use compound literals with scalar type. */
6135 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6136 /* These come from compound literals, memcpy stuff over. */
6137 Section *ssec;
6138 ElfW(Sym) *esym;
6139 ElfW_Rel *rel;
6140 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6141 ssec = tcc_state->sections[esym->st_shndx];
6142 memmove (ptr, ssec->data + esym->st_value, size);
6143 if (ssec->reloc) {
6144 /* We need to copy over all memory contents, and that
6145 includes relocations. Use the fact that relocs are
6146 created it order, so look from the end of relocs
6147 until we hit one before the copied region. */
6148 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6149 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6150 while (num_relocs--) {
6151 rel--;
6152 if (rel->r_offset >= esym->st_value + size)
6153 continue;
6154 if (rel->r_offset < esym->st_value)
6155 break;
6156 /* Note: if the same fields are initialized multiple
6157 times (possible with designators) then we possibly
6158 add multiple relocations for the same offset here.
6159 That would lead to wrong code, the last reloc needs
6160 to win. We clean this up later after the whole
6161 initializer is parsed. */
6162 put_elf_reloca(symtab_section, sec,
6163 c + rel->r_offset - esym->st_value,
6164 ELFW(R_TYPE)(rel->r_info),
6165 ELFW(R_SYM)(rel->r_info),
6166 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6167 rel->r_addend
6168 #else
6170 #endif
6174 } else {
6175 if ((vtop->r & VT_SYM) &&
6176 (bt == VT_BYTE ||
6177 bt == VT_SHORT ||
6178 bt == VT_DOUBLE ||
6179 bt == VT_LDOUBLE ||
6180 #if PTR_SIZE == 8
6181 (bt == VT_LLONG && bit_size != 64) ||
6182 bt == VT_INT
6183 #else
6184 bt == VT_LLONG ||
6185 (bt == VT_INT && bit_size != 32)
6186 #endif
6188 tcc_error("initializer element is not computable at load time");
6189 switch(bt) {
6190 /* XXX: when cross-compiling we assume that each type has the
6191 same representation on host and target, which is likely to
6192 be wrong in the case of long double */
6193 case VT_BOOL:
6194 vtop->c.i = (vtop->c.i != 0);
6195 case VT_BYTE:
6196 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6197 break;
6198 case VT_SHORT:
6199 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6200 break;
6201 case VT_DOUBLE:
6202 *(double *)ptr = vtop->c.d;
6203 break;
6204 case VT_LDOUBLE:
6205 if (sizeof(long double) == LDOUBLE_SIZE)
6206 *(long double *)ptr = vtop->c.ld;
6207 else if (sizeof(double) == LDOUBLE_SIZE)
6208 *(double *)ptr = vtop->c.ld;
6209 else
6210 tcc_error("can't cross compile long double constants");
6211 break;
6212 #if PTR_SIZE != 8
6213 case VT_LLONG:
6214 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6215 break;
6216 #else
6217 case VT_LLONG:
6218 #endif
6219 case VT_PTR:
6221 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6222 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6223 if (vtop->r & VT_SYM)
6224 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6225 else
6226 *(addr_t *)ptr |= val;
6227 #else
6228 if (vtop->r & VT_SYM)
6229 greloc(sec, vtop->sym, c, R_DATA_PTR);
6230 *(addr_t *)ptr |= val;
6231 #endif
6232 break;
6234 default:
6236 int val = (vtop->c.i & bit_mask) << bit_pos;
6237 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6238 if (vtop->r & VT_SYM)
6239 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6240 else
6241 *(int *)ptr |= val;
6242 #else
6243 if (vtop->r & VT_SYM)
6244 greloc(sec, vtop->sym, c, R_DATA_PTR);
6245 *(int *)ptr |= val;
6246 #endif
6247 break;
6251 vtop--;
6252 } else {
6253 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6254 vswap();
6255 vstore();
6256 vpop();
6260 /* put zeros for variable based init */
6261 static void init_putz(Section *sec, unsigned long c, int size)
6263 if (sec) {
6264 /* nothing to do because globals are already set to zero */
6265 } else {
6266 vpush_global_sym(&func_old_type, TOK_memset);
6267 vseti(VT_LOCAL, c);
6268 #ifdef TCC_TARGET_ARM
6269 vpushs(size);
6270 vpushi(0);
6271 #else
6272 vpushi(0);
6273 vpushs(size);
6274 #endif
6275 gfunc_call(3);
6279 /* 't' contains the type and storage info. 'c' is the offset of the
6280 object in section 'sec'. If 'sec' is NULL, it means stack based
6281 allocation. 'first' is true if array '{' must be read (multi
6282 dimension implicit array init handling). 'size_only' is true if
6283 size only evaluation is wanted (only for arrays). */
6284 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6285 int first, int size_only)
6287 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6288 int size1, align1;
6289 int have_elem;
6290 Sym *s, *f;
6291 Sym indexsym;
6292 CType *t1;
6294 /* If we currently are at an '}' or ',' we have read an initializer
6295 element in one of our callers, and not yet consumed it. */
6296 have_elem = tok == '}' || tok == ',';
6297 if (!have_elem && tok != '{' &&
6298 /* In case of strings we have special handling for arrays, so
6299 don't consume them as initializer value (which would commit them
6300 to some anonymous symbol). */
6301 tok != TOK_LSTR && tok != TOK_STR &&
6302 !size_only) {
6303 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6304 have_elem = 1;
6307 if (have_elem &&
6308 !(type->t & VT_ARRAY) &&
6309 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6310 The source type might have VT_CONSTANT set, which is
6311 of course assignable to non-const elements. */
6312 is_compatible_parameter_types(type, &vtop->type)) {
6313 init_putv(type, sec, c);
6314 } else if (type->t & VT_ARRAY) {
6315 s = type->ref;
6316 n = s->c;
6317 array_length = 0;
6318 t1 = pointed_type(type);
6319 size1 = type_size(t1, &align1);
6321 no_oblock = 1;
6322 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6323 tok == '{') {
6324 if (tok != '{')
6325 tcc_error("character array initializer must be a literal,"
6326 " optionally enclosed in braces");
6327 skip('{');
6328 no_oblock = 0;
6331 /* only parse strings here if correct type (otherwise: handle
6332 them as ((w)char *) expressions */
6333 if ((tok == TOK_LSTR &&
6334 #ifdef TCC_TARGET_PE
6335 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6336 #else
6337 (t1->t & VT_BTYPE) == VT_INT
6338 #endif
6339 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6340 while (tok == TOK_STR || tok == TOK_LSTR) {
6341 int cstr_len, ch;
6343 /* compute maximum number of chars wanted */
6344 if (tok == TOK_STR)
6345 cstr_len = tokc.str.size;
6346 else
6347 cstr_len = tokc.str.size / sizeof(nwchar_t);
6348 cstr_len--;
6349 nb = cstr_len;
6350 if (n >= 0 && nb > (n - array_length))
6351 nb = n - array_length;
6352 if (!size_only) {
6353 if (cstr_len > nb)
6354 tcc_warning("initializer-string for array is too long");
6355 /* in order to go faster for common case (char
6356 string in global variable, we handle it
6357 specifically */
6358 if (sec && tok == TOK_STR && size1 == 1) {
6359 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6360 } else {
6361 for(i=0;i<nb;i++) {
6362 if (tok == TOK_STR)
6363 ch = ((unsigned char *)tokc.str.data)[i];
6364 else
6365 ch = ((nwchar_t *)tokc.str.data)[i];
6366 vpushi(ch);
6367 init_putv(t1, sec, c + (array_length + i) * size1);
6371 array_length += nb;
6372 next();
6374 /* only add trailing zero if enough storage (no
6375 warning in this case since it is standard) */
6376 if (n < 0 || array_length < n) {
6377 if (!size_only) {
6378 vpushi(0);
6379 init_putv(t1, sec, c + (array_length * size1));
6381 array_length++;
6383 } else {
6384 indexsym.c = 0;
6385 indexsym.r = 0;
6386 f = &indexsym;
6388 do_init_list:
6389 while (tok != '}' || have_elem) {
6390 decl_designator(type, sec, c, &f, size_only);
6391 have_elem = 0;
6392 index = f->c;
6393 /* must put zero in holes (note that doing it that way
6394 ensures that it even works with designators) */
6395 if (!size_only && array_length < index) {
6396 init_putz(sec, c + array_length * size1,
6397 (index - array_length) * size1);
6399 if (type->t & VT_ARRAY) {
6400 index = indexsym.c = ++indexsym.r;
6401 } else {
6402 index = index + type_size(&f->type, &align1);
6403 if (s->type.t == TOK_UNION)
6404 f = NULL;
6405 else
6406 f = f->next;
6408 if (index > array_length)
6409 array_length = index;
6411 if (type->t & VT_ARRAY) {
6412 /* special test for multi dimensional arrays (may not
6413 be strictly correct if designators are used at the
6414 same time) */
6415 if (no_oblock && index >= n)
6416 break;
6417 } else {
6418 if (no_oblock && f == NULL)
6419 break;
6421 if (tok == '}')
6422 break;
6423 skip(',');
6426 /* put zeros at the end */
6427 if (!size_only && array_length < n) {
6428 init_putz(sec, c + array_length * size1,
6429 (n - array_length) * size1);
6431 if (!no_oblock)
6432 skip('}');
6433 /* patch type size if needed, which happens only for array types */
6434 if (n < 0)
6435 s->c = array_length;
6436 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6437 size1 = 1;
6438 no_oblock = 1;
6439 if (first || tok == '{') {
6440 skip('{');
6441 no_oblock = 0;
6443 s = type->ref;
6444 f = s->next;
6445 array_length = 0;
6446 n = s->c;
6447 goto do_init_list;
6448 } else if (tok == '{') {
6449 next();
6450 decl_initializer(type, sec, c, first, size_only);
6451 skip('}');
6452 } else if (size_only) {
6453 /* If we supported only ISO C we wouldn't have to accept calling
6454 this on anything than an array size_only==1 (and even then
6455 only on the outermost level, so no recursion would be needed),
6456 because initializing a flex array member isn't supported.
6457 But GNU C supports it, so we need to recurse even into
6458 subfields of structs and arrays when size_only is set. */
6459 /* just skip expression */
6460 parlevel = parlevel1 = 0;
6461 while ((parlevel > 0 || parlevel1 > 0 ||
6462 (tok != '}' && tok != ',')) && tok != -1) {
6463 if (tok == '(')
6464 parlevel++;
6465 else if (tok == ')') {
6466 if (parlevel == 0 && parlevel1 == 0)
6467 break;
6468 parlevel--;
6470 else if (tok == '{')
6471 parlevel1++;
6472 else if (tok == '}') {
6473 if (parlevel == 0 && parlevel1 == 0)
6474 break;
6475 parlevel1--;
6477 next();
6479 } else {
6480 if (!have_elem) {
6481 /* This should happen only when we haven't parsed
6482 the init element above for fear of committing a
6483 string constant to memory too early. */
6484 if (tok != TOK_STR && tok != TOK_LSTR)
6485 expect("string constant");
6486 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6488 init_putv(type, sec, c);
6492 /* parse an initializer for type 't' if 'has_init' is non zero, and
6493 allocate space in local or global data space ('r' is either
6494 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6495 variable 'v' of scope 'scope' is declared before initializers
6496 are parsed. If 'v' is zero, then a reference to the new object
6497 is put in the value stack. If 'has_init' is 2, a special parsing
6498 is done to handle string constants. */
6499 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6500 int has_init, int v, int scope)
6502 int size, align, addr, data_offset;
6503 int level;
6504 ParseState saved_parse_state = {0};
6505 TokenString *init_str = NULL;
6506 Section *sec;
6507 Sym *flexible_array;
6509 flexible_array = NULL;
6510 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6511 Sym *field = type->ref->next;
6512 if (field) {
6513 while (field->next)
6514 field = field->next;
6515 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6516 flexible_array = field;
6520 size = type_size(type, &align);
6521 /* If unknown size, we must evaluate it before
6522 evaluating initializers because
6523 initializers can generate global data too
6524 (e.g. string pointers or ISOC99 compound
6525 literals). It also simplifies local
6526 initializers handling */
6527 if (size < 0 || (flexible_array && has_init)) {
6528 if (!has_init)
6529 tcc_error("unknown type size");
6530 /* get all init string */
6531 init_str = tok_str_alloc();
6532 if (has_init == 2) {
6533 /* only get strings */
6534 while (tok == TOK_STR || tok == TOK_LSTR) {
6535 tok_str_add_tok(init_str);
6536 next();
6538 } else {
6539 level = 0;
6540 while (level > 0 || (tok != ',' && tok != ';')) {
6541 if (tok < 0)
6542 tcc_error("unexpected end of file in initializer");
6543 tok_str_add_tok(init_str);
6544 if (tok == '{')
6545 level++;
6546 else if (tok == '}') {
6547 level--;
6548 if (level <= 0) {
6549 next();
6550 break;
6553 next();
6556 tok_str_add(init_str, -1);
6557 tok_str_add(init_str, 0);
6559 /* compute size */
6560 save_parse_state(&saved_parse_state);
6562 begin_macro(init_str, 1);
6563 next();
6564 decl_initializer(type, NULL, 0, 1, 1);
6565 /* prepare second initializer parsing */
6566 macro_ptr = init_str->str;
6567 next();
6569 /* if still unknown size, error */
6570 size = type_size(type, &align);
6571 if (size < 0)
6572 tcc_error("unknown type size");
6574 /* If there's a flex member and it was used in the initializer
6575 adjust size. */
6576 if (flexible_array &&
6577 flexible_array->type.ref->c > 0)
6578 size += flexible_array->type.ref->c
6579 * pointed_size(&flexible_array->type);
6580 /* take into account specified alignment if bigger */
6581 if (ad->a.aligned) {
6582 int speca = 1 << (ad->a.aligned - 1);
6583 if (speca > align)
6584 align = speca;
6585 } else if (ad->a.packed) {
6586 align = 1;
6588 if ((r & VT_VALMASK) == VT_LOCAL) {
6589 sec = NULL;
6590 #ifdef CONFIG_TCC_BCHECK
6591 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6592 loc--;
6594 #endif
6595 loc = (loc - size) & -align;
6596 addr = loc;
6597 #ifdef CONFIG_TCC_BCHECK
6598 /* handles bounds */
6599 /* XXX: currently, since we do only one pass, we cannot track
6600 '&' operators, so we add only arrays */
6601 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6602 addr_t *bounds_ptr;
6603 /* add padding between regions */
6604 loc--;
6605 /* then add local bound info */
6606 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6607 bounds_ptr[0] = addr;
6608 bounds_ptr[1] = size;
6610 #endif
6611 if (v) {
6612 /* local variable */
6613 #ifdef CONFIG_TCC_ASM
6614 if (ad->asm_label) {
6615 int reg = asm_parse_regvar(ad->asm_label);
6616 if (reg >= 0)
6617 r = (r & ~VT_VALMASK) | reg;
6619 #endif
6620 sym_push(v, type, r, addr);
6621 } else {
6622 /* push local reference */
6623 vset(type, r, addr);
6625 } else {
6626 Sym *sym;
6628 sym = NULL;
6629 if (v && scope == VT_CONST) {
6630 /* see if the symbol was already defined */
6631 sym = sym_find(v);
6632 if (sym) {
6633 if (!is_compatible_types(&sym->type, type))
6634 tcc_error("incompatible types for redefinition of '%s'",
6635 get_tok_str(v, NULL));
6636 if (sym->type.t & VT_EXTERN) {
6637 /* if the variable is extern, it was not allocated */
6638 sym->type.t &= ~VT_EXTERN;
6639 /* set array size if it was omitted in extern
6640 declaration */
6641 if ((sym->type.t & VT_ARRAY) &&
6642 sym->type.ref->c < 0 &&
6643 type->ref->c >= 0)
6644 sym->type.ref->c = type->ref->c;
6645 } else {
6646 /* we accept several definitions of the same
6647 global variable. this is tricky, because we
6648 must play with the SHN_COMMON type of the symbol */
6649 /* XXX: should check if the variable was already
6650 initialized. It is incorrect to initialized it
6651 twice */
6652 /* no init data, we won't add more to the symbol */
6653 if (!has_init)
6654 goto no_alloc;
6659 /* allocate symbol in corresponding section */
6660 sec = ad->section;
6661 if (!sec) {
6662 if (has_init)
6663 sec = data_section;
6664 else if (tcc_state->nocommon)
6665 sec = bss_section;
6667 if (sec) {
6668 data_offset = sec->data_offset;
6669 data_offset = (data_offset + align - 1) & -align;
6670 addr = data_offset;
6671 /* very important to increment global pointer at this time
6672 because initializers themselves can create new initializers */
6673 data_offset += size;
6674 #ifdef CONFIG_TCC_BCHECK
6675 /* add padding if bound check */
6676 if (tcc_state->do_bounds_check)
6677 data_offset++;
6678 #endif
6679 sec->data_offset = data_offset;
6680 /* allocate section space to put the data */
6681 if (sec->sh_type != SHT_NOBITS &&
6682 data_offset > sec->data_allocated)
6683 section_realloc(sec, data_offset);
6684 /* align section if needed */
6685 if (align > sec->sh_addralign)
6686 sec->sh_addralign = align;
6687 } else {
6688 addr = 0; /* avoid warning */
6691 if (v) {
6692 if (scope != VT_CONST || !sym) {
6693 sym = sym_push(v, type, r | VT_SYM, 0);
6694 sym->asm_label = ad->asm_label;
6696 /* update symbol definition */
6697 if (sec) {
6698 put_extern_sym(sym, sec, addr, size);
6699 } else {
6700 ElfW(Sym) *esym;
6701 /* put a common area */
6702 put_extern_sym(sym, NULL, align, size);
6703 /* XXX: find a nicer way */
6704 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6705 esym->st_shndx = SHN_COMMON;
6707 } else {
6708 /* push global reference */
6709 sym = get_sym_ref(type, sec, addr, size);
6710 vpushsym(type, sym);
6712 /* patch symbol weakness */
6713 if (type->t & VT_WEAK)
6714 weaken_symbol(sym);
6715 apply_visibility(sym, type);
6716 #ifdef CONFIG_TCC_BCHECK
6717 /* handles bounds now because the symbol must be defined
6718 before for the relocation */
6719 if (tcc_state->do_bounds_check) {
6720 addr_t *bounds_ptr;
6722 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6723 /* then add global bound info */
6724 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6725 bounds_ptr[0] = 0; /* relocated */
6726 bounds_ptr[1] = size;
6728 #endif
6730 if (type->t & VT_VLA) {
6731 int a;
6733 /* save current stack pointer */
6734 if (vlas_in_scope == 0) {
6735 if (vla_sp_root_loc == -1)
6736 vla_sp_root_loc = (loc -= PTR_SIZE);
6737 gen_vla_sp_save(vla_sp_root_loc);
6740 vla_runtime_type_size(type, &a);
6741 gen_vla_alloc(type, a);
6742 gen_vla_sp_save(addr);
6743 vla_sp_loc = addr;
6744 vlas_in_scope++;
6745 } else if (has_init) {
6746 size_t oldreloc_offset = 0;
6747 if (sec && sec->reloc)
6748 oldreloc_offset = sec->reloc->data_offset;
6749 decl_initializer(type, sec, addr, 1, 0);
6750 if (sec && sec->reloc)
6751 squeeze_multi_relocs(sec, oldreloc_offset);
6752 /* patch flexible array member size back to -1, */
6753 /* for possible subsequent similar declarations */
6754 if (flexible_array)
6755 flexible_array->type.ref->c = -1;
6757 no_alloc: ;
6758 /* restore parse state if needed */
6759 if (init_str) {
6760 end_macro();
6761 restore_parse_state(&saved_parse_state);
6765 static void put_func_debug(Sym *sym)
6767 char buf[512];
6769 /* stabs info */
6770 /* XXX: we put here a dummy type */
6771 snprintf(buf, sizeof(buf), "%s:%c1",
6772 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6773 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6774 cur_text_section, sym->c);
6775 /* //gr gdb wants a line at the function */
6776 put_stabn(N_SLINE, 0, file->line_num, 0);
6777 last_ind = 0;
6778 last_line_num = 0;
6781 /* parse an old style function declaration list */
6782 /* XXX: check multiple parameter */
6783 static void func_decl_list(Sym *func_sym)
6785 AttributeDef ad;
6786 int v;
6787 Sym *s;
6788 CType btype, type;
6790 /* parse each declaration */
6791 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6792 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6793 if (!parse_btype(&btype, &ad))
6794 expect("declaration list");
6795 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6796 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6797 tok == ';') {
6798 /* we accept no variable after */
6799 } else {
6800 for(;;) {
6801 type = btype;
6802 type_decl(&type, &ad, &v, TYPE_DIRECT);
6803 /* find parameter in function parameter list */
6804 s = func_sym->next;
6805 while (s != NULL) {
6806 if ((s->v & ~SYM_FIELD) == v)
6807 goto found;
6808 s = s->next;
6810 tcc_error("declaration for parameter '%s' but no such parameter",
6811 get_tok_str(v, NULL));
6812 found:
6813 /* check that no storage specifier except 'register' was given */
6814 if (type.t & VT_STORAGE)
6815 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6816 convert_parameter_type(&type);
6817 /* we can add the type (NOTE: it could be local to the function) */
6818 s->type = type;
6819 /* accept other parameters */
6820 if (tok == ',')
6821 next();
6822 else
6823 break;
6826 skip(';');
6830 /* parse a function defined by symbol 'sym' and generate its code in
6831 'cur_text_section' */
6832 static void gen_function(Sym *sym)
6834 nocode_wanted = 0;
6835 ind = cur_text_section->data_offset;
6836 /* NOTE: we patch the symbol size later */
6837 put_extern_sym(sym, cur_text_section, ind, 0);
6838 funcname = get_tok_str(sym->v, NULL);
6839 func_ind = ind;
6840 /* Initialize VLA state */
6841 vla_sp_loc = -1;
6842 vla_sp_root_loc = -1;
6843 /* put debug symbol */
6844 if (tcc_state->do_debug)
6845 put_func_debug(sym);
6847 /* push a dummy symbol to enable local sym storage */
6848 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6849 local_scope = 1; /* for function parameters */
6850 gfunc_prolog(&sym->type);
6851 local_scope = 0;
6853 rsym = 0;
6854 block(NULL, NULL, 0);
6855 nocode_wanted = 0;
6856 gsym(rsym);
6857 gfunc_epilog();
6858 cur_text_section->data_offset = ind;
6859 label_pop(&global_label_stack, NULL);
6860 /* reset local stack */
6861 local_scope = 0;
6862 sym_pop(&local_stack, NULL, 0);
6863 /* end of function */
6864 /* patch symbol size */
6865 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6866 ind - func_ind;
6867 /* patch symbol weakness (this definition overrules any prototype) */
6868 if (sym->type.t & VT_WEAK)
6869 weaken_symbol(sym);
6870 apply_visibility(sym, &sym->type);
6871 if (tcc_state->do_debug) {
6872 put_stabn(N_FUN, 0, 0, ind - func_ind);
6874 /* It's better to crash than to generate wrong code */
6875 cur_text_section = NULL;
6876 funcname = ""; /* for safety */
6877 func_vt.t = VT_VOID; /* for safety */
6878 func_var = 0; /* for safety */
6879 ind = 0; /* for safety */
6880 nocode_wanted = 1;
6881 check_vstack();
6884 static void gen_inline_functions(TCCState *s)
6886 Sym *sym;
6887 int inline_generated, i, ln;
6888 struct InlineFunc *fn;
6890 ln = file->line_num;
6891 /* iterate while inline function are referenced */
6892 for(;;) {
6893 inline_generated = 0;
6894 for (i = 0; i < s->nb_inline_fns; ++i) {
6895 fn = s->inline_fns[i];
6896 sym = fn->sym;
6897 if (sym && sym->c) {
6898 /* the function was used: generate its code and
6899 convert it to a normal function */
6900 fn->sym = NULL;
6901 if (file)
6902 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6903 sym->r = VT_SYM | VT_CONST;
6904 sym->type.t &= ~VT_INLINE;
6906 begin_macro(fn->func_str, 1);
6907 next();
6908 cur_text_section = text_section;
6909 gen_function(sym);
6910 end_macro();
6912 inline_generated = 1;
6915 if (!inline_generated)
6916 break;
6918 file->line_num = ln;
6921 ST_FUNC void free_inline_functions(TCCState *s)
6923 int i;
6924 /* free tokens of unused inline functions */
6925 for (i = 0; i < s->nb_inline_fns; ++i) {
6926 struct InlineFunc *fn = s->inline_fns[i];
6927 if (fn->sym)
6928 tok_str_free(fn->func_str);
6930 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6933 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6934 static int decl0(int l, int is_for_loop_init)
6936 int v, has_init, r;
6937 CType type, btype;
6938 Sym *sym;
6939 AttributeDef ad;
6941 while (1) {
6942 if (!parse_btype(&btype, &ad)) {
6943 if (is_for_loop_init)
6944 return 0;
6945 /* skip redundant ';' */
6946 /* XXX: find more elegant solution */
6947 if (tok == ';') {
6948 next();
6949 continue;
6951 if (l == VT_CONST &&
6952 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6953 /* global asm block */
6954 asm_global_instr();
6955 continue;
6957 /* special test for old K&R protos without explicit int
6958 type. Only accepted when defining global data */
6959 if (l == VT_LOCAL || tok < TOK_UIDENT)
6960 break;
6961 btype.t = VT_INT;
6963 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6964 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6965 tok == ';') {
6966 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6967 int v = btype.ref->v;
6968 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6969 tcc_warning("unnamed struct/union that defines no instances");
6971 next();
6972 continue;
6974 while (1) { /* iterate thru each declaration */
6975 type = btype;
6976 /* If the base type itself was an array type of unspecified
6977 size (like in 'typedef int arr[]; arr x = {1};') then
6978 we will overwrite the unknown size by the real one for
6979 this decl. We need to unshare the ref symbol holding
6980 that size. */
6981 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6982 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6984 type_decl(&type, &ad, &v, TYPE_DIRECT);
6985 #if 0
6987 char buf[500];
6988 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6989 printf("type = '%s'\n", buf);
6991 #endif
6992 if ((type.t & VT_BTYPE) == VT_FUNC) {
6993 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6994 tcc_error("function without file scope cannot be static");
6996 /* if old style function prototype, we accept a
6997 declaration list */
6998 sym = type.ref;
6999 if (sym->c == FUNC_OLD)
7000 func_decl_list(sym);
7003 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7004 ad.asm_label = asm_label_instr();
7005 /* parse one last attribute list, after asm label */
7006 parse_attribute(&ad);
7007 if (tok == '{')
7008 expect(";");
7011 if (ad.a.weak)
7012 type.t |= VT_WEAK;
7013 #ifdef TCC_TARGET_PE
7014 if (ad.a.func_import)
7015 type.t |= VT_IMPORT;
7016 if (ad.a.func_export)
7017 type.t |= VT_EXPORT;
7018 #endif
7019 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7021 if (tok == '{') {
7022 if (l == VT_LOCAL)
7023 tcc_error("cannot use local functions");
7024 if ((type.t & VT_BTYPE) != VT_FUNC)
7025 expect("function definition");
7027 /* reject abstract declarators in function definition */
7028 sym = type.ref;
7029 while ((sym = sym->next) != NULL)
7030 if (!(sym->v & ~SYM_FIELD))
7031 expect("identifier");
7033 /* XXX: cannot do better now: convert extern line to static inline */
7034 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7035 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7037 sym = sym_find(v);
7038 if (sym) {
7039 Sym *ref;
7040 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7041 goto func_error1;
7043 ref = sym->type.ref;
7044 if (0 == ref->a.func_proto)
7045 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7047 /* use func_call from prototype if not defined */
7048 if (ref->a.func_call != FUNC_CDECL
7049 && type.ref->a.func_call == FUNC_CDECL)
7050 type.ref->a.func_call = ref->a.func_call;
7052 /* use export from prototype */
7053 if (ref->a.func_export)
7054 type.ref->a.func_export = 1;
7056 /* use static from prototype */
7057 if (sym->type.t & VT_STATIC)
7058 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7060 /* If the definition has no visibility use the
7061 one from prototype. */
7062 if (! (type.t & VT_VIS_MASK))
7063 type.t |= sym->type.t & VT_VIS_MASK;
7065 if (!is_compatible_types(&sym->type, &type)) {
7066 func_error1:
7067 tcc_error("incompatible types for redefinition of '%s'",
7068 get_tok_str(v, NULL));
7070 type.ref->a.func_proto = 0;
7071 /* if symbol is already defined, then put complete type */
7072 sym->type = type;
7073 } else {
7074 /* put function symbol */
7075 sym = global_identifier_push(v, type.t, 0);
7076 sym->type.ref = type.ref;
7079 /* static inline functions are just recorded as a kind
7080 of macro. Their code will be emitted at the end of
7081 the compilation unit only if they are used */
7082 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7083 (VT_INLINE | VT_STATIC)) {
7084 int block_level;
7085 struct InlineFunc *fn;
7086 const char *filename;
7088 filename = file ? file->filename : "";
7089 fn = tcc_malloc(sizeof *fn + strlen(filename));
7090 strcpy(fn->filename, filename);
7091 fn->sym = sym;
7092 fn->func_str = tok_str_alloc();
7094 block_level = 0;
7095 for(;;) {
7096 int t;
7097 if (tok == TOK_EOF)
7098 tcc_error("unexpected end of file");
7099 tok_str_add_tok(fn->func_str);
7100 t = tok;
7101 next();
7102 if (t == '{') {
7103 block_level++;
7104 } else if (t == '}') {
7105 block_level--;
7106 if (block_level == 0)
7107 break;
7110 tok_str_add(fn->func_str, -1);
7111 tok_str_add(fn->func_str, 0);
7112 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7114 } else {
7115 /* compute text section */
7116 cur_text_section = ad.section;
7117 if (!cur_text_section)
7118 cur_text_section = text_section;
7119 sym->r = VT_SYM | VT_CONST;
7120 gen_function(sym);
7122 break;
7123 } else {
7124 if (btype.t & VT_TYPEDEF) {
7125 /* save typedefed type */
7126 /* XXX: test storage specifiers ? */
7127 sym = sym_find(v);
7128 if (sym && sym->scope == local_scope) {
7129 if (!is_compatible_types(&sym->type, &type)
7130 || !(sym->type.t & VT_TYPEDEF))
7131 tcc_error("incompatible redefinition of '%s'",
7132 get_tok_str(v, NULL));
7133 sym->type = type;
7134 } else {
7135 sym = sym_push(v, &type, 0, 0);
7137 sym->a = ad.a;
7138 sym->type.t |= VT_TYPEDEF;
7139 } else {
7140 r = 0;
7141 if ((type.t & VT_BTYPE) == VT_FUNC) {
7142 /* external function definition */
7143 /* specific case for func_call attribute */
7144 ad.a.func_proto = 1;
7145 type.ref->a = ad.a;
7146 } else if (!(type.t & VT_ARRAY)) {
7147 /* not lvalue if array */
7148 r |= lvalue_type(type.t);
7150 has_init = (tok == '=');
7151 if (has_init && (type.t & VT_VLA))
7152 tcc_error("variable length array cannot be initialized");
7153 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7154 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7155 !has_init && l == VT_CONST && type.ref->c < 0)) {
7156 /* external variable or function */
7157 /* NOTE: as GCC, uninitialized global static
7158 arrays of null size are considered as
7159 extern */
7160 sym = external_sym(v, &type, r);
7161 sym->asm_label = ad.asm_label;
7163 if (ad.alias_target) {
7164 Section tsec;
7165 ElfW(Sym) *esym;
7166 Sym *alias_target;
7168 alias_target = sym_find(ad.alias_target);
7169 if (!alias_target || !alias_target->c)
7170 tcc_error("unsupported forward __alias__ attribute");
7171 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7172 tsec.sh_num = esym->st_shndx;
7173 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7175 } else {
7176 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7177 if (type.t & VT_STATIC)
7178 r |= VT_CONST;
7179 else
7180 r |= l;
7181 if (has_init)
7182 next();
7183 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7186 if (tok != ',') {
7187 if (is_for_loop_init)
7188 return 1;
7189 skip(';');
7190 break;
7192 next();
7194 ad.a.aligned = 0;
7197 return 0;
7200 ST_FUNC void decl(int l)
7202 decl0(l, 0);
7205 /* ------------------------------------------------------------------------- */