x86-64: Allow loads from some structs/unions
[tinycc.git] / tccgen.c
blobeefa1b4ef0c5818b6f9925db3a1fba9b2761f686
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int v1, v2, sym;
66 } **p; int n; /* list of case ranges */
67 int def_sym; /* default symbol */
68 } *cur_switch; /* current switch */
70 /* ------------------------------------------------------------------------- */
71 static void gen_cast(CType *type);
72 static inline CType *pointed_type(CType *type);
73 static int is_compatible_types(CType *type1, CType *type2);
74 static int parse_btype(CType *type, AttributeDef *ad);
75 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
76 static void parse_expr_type(CType *type);
77 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
78 static void block(int *bsym, int *csym, int is_expr);
79 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
80 static int decl0(int l, int is_for_loop_init);
81 static void expr_eq(void);
82 static void expr_lor_const(void);
83 static void unary_type(CType *type);
84 static void vla_runtime_type_size(CType *type, int *a);
85 static void vla_sp_restore(void);
86 static void vla_sp_restore_root(void);
87 static int is_compatible_parameter_types(CType *type1, CType *type2);
88 static void expr_type(CType *type);
89 ST_FUNC void vpush64(int ty, unsigned long long v);
90 ST_FUNC void vpush(CType *type);
91 ST_FUNC int gvtst(int inv, int t);
92 ST_FUNC int is_btype_size(int bt);
93 static void gen_inline_functions(TCCState *s);
95 ST_INLN int is_float(int t)
97 int bt;
98 bt = t & VT_BTYPE;
99 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
102 /* we use our own 'finite' function to avoid potential problems with
103 non standard math libs */
104 /* XXX: endianness dependent */
105 ST_FUNC int ieee_finite(double d)
107 int p[4];
108 memcpy(p, &d, sizeof(double));
109 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
112 ST_FUNC void test_lvalue(void)
114 if (!(vtop->r & VT_LVAL))
115 expect("lvalue");
118 ST_FUNC void check_vstack(void)
120 if (pvtop != vtop)
121 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
124 /* ------------------------------------------------------------------------- */
125 /* vstack debugging aid */
127 #if 0
128 void pv (const char *lbl, int a, int b)
130 int i;
131 for (i = a; i < a + b; ++i) {
132 SValue *p = &vtop[-i];
133 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
134 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
137 #endif
139 /* ------------------------------------------------------------------------- */
140 ST_FUNC void tccgen_start(TCCState *s1)
142 cur_text_section = NULL;
143 funcname = "";
144 anon_sym = SYM_FIRST_ANOM;
145 section_sym = 0;
146 nocode_wanted = 1;
148 /* define some often used types */
149 int_type.t = VT_INT;
150 char_pointer_type.t = VT_BYTE;
151 mk_pointer(&char_pointer_type);
152 #if PTR_SIZE == 4
153 size_type.t = VT_INT;
154 #else
155 size_type.t = VT_LLONG;
156 #endif
157 func_old_type.t = VT_FUNC;
158 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
160 if (s1->do_debug) {
161 char buf[512];
163 /* file info: full path + filename */
164 section_sym = put_elf_sym(symtab_section, 0, 0,
165 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
166 text_section->sh_num, NULL);
167 getcwd(buf, sizeof(buf));
168 #ifdef _WIN32
169 normalize_slashes(buf);
170 #endif
171 pstrcat(buf, sizeof(buf), "/");
172 put_stabs_r(buf, N_SO, 0, 0,
173 text_section->data_offset, text_section, section_sym);
174 put_stabs_r(file->filename, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
177 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
178 symbols can be safely used */
179 put_elf_sym(symtab_section, 0, 0,
180 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
181 SHN_ABS, file->filename);
183 #ifdef TCC_TARGET_ARM
184 arm_init(s1);
185 #endif
188 ST_FUNC void tccgen_end(TCCState *s1)
190 gen_inline_functions(s1);
191 check_vstack();
192 /* end of translation unit info */
193 if (s1->do_debug) {
194 put_stabs_r(NULL, N_SO, 0, 0,
195 text_section->data_offset, text_section, section_sym);
199 /* ------------------------------------------------------------------------- */
200 /* update sym->c so that it points to an external symbol in section
201 'section' with value 'value' */
203 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
204 addr_t value, unsigned long size,
205 int can_add_underscore)
207 int sym_type, sym_bind, sh_num, info, other;
208 ElfW(Sym) *esym;
209 const char *name;
210 char buf1[256];
212 #ifdef CONFIG_TCC_BCHECK
213 char buf[32];
214 #endif
216 if (section == NULL)
217 sh_num = SHN_UNDEF;
218 else if (section == SECTION_ABS)
219 sh_num = SHN_ABS;
220 else
221 sh_num = section->sh_num;
223 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
224 sym_type = STT_FUNC;
225 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
226 sym_type = STT_NOTYPE;
227 } else {
228 sym_type = STT_OBJECT;
231 if (sym->type.t & VT_STATIC)
232 sym_bind = STB_LOCAL;
233 else {
234 if (sym->type.t & VT_WEAK)
235 sym_bind = STB_WEAK;
236 else
237 sym_bind = STB_GLOBAL;
240 if (!sym->c) {
241 name = get_tok_str(sym->v, NULL);
242 #ifdef CONFIG_TCC_BCHECK
243 if (tcc_state->do_bounds_check) {
244 /* XXX: avoid doing that for statics ? */
245 /* if bound checking is activated, we change some function
246 names by adding the "__bound" prefix */
247 switch(sym->v) {
248 #ifdef TCC_TARGET_PE
249 /* XXX: we rely only on malloc hooks */
250 case TOK_malloc:
251 case TOK_free:
252 case TOK_realloc:
253 case TOK_memalign:
254 case TOK_calloc:
255 #endif
256 case TOK_memcpy:
257 case TOK_memmove:
258 case TOK_memset:
259 case TOK_strlen:
260 case TOK_strcpy:
261 case TOK_alloca:
262 strcpy(buf, "__bound_");
263 strcat(buf, name);
264 name = buf;
265 break;
268 #endif
269 other = 0;
271 #ifdef TCC_TARGET_PE
272 if (sym->type.t & VT_EXPORT)
273 other |= ST_PE_EXPORT;
274 if (sym_type == STT_FUNC && sym->type.ref) {
275 Sym *ref = sym->type.ref;
276 if (ref->a.func_export)
277 other |= ST_PE_EXPORT;
278 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
279 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
280 name = buf1;
281 other |= ST_PE_STDCALL;
282 can_add_underscore = 0;
284 } else {
285 if (find_elf_sym(tcc_state->dynsymtab_section, name))
286 other |= ST_PE_IMPORT;
287 if (sym->type.t & VT_IMPORT)
288 other |= ST_PE_IMPORT;
290 #else
291 if (! (sym->type.t & VT_STATIC))
292 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
293 #endif
294 if (tcc_state->leading_underscore && can_add_underscore) {
295 buf1[0] = '_';
296 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
297 name = buf1;
299 if (sym->asm_label) {
300 name = get_tok_str(sym->asm_label, NULL);
302 info = ELFW(ST_INFO)(sym_bind, sym_type);
303 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
304 } else {
305 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
306 esym->st_value = value;
307 esym->st_size = size;
308 esym->st_shndx = sh_num;
312 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
313 addr_t value, unsigned long size)
315 put_extern_sym2(sym, section, value, size, 1);
318 /* add a new relocation entry to symbol 'sym' in section 's' */
319 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
320 addr_t addend)
322 int c = 0;
323 if (sym) {
324 if (0 == sym->c)
325 put_extern_sym(sym, NULL, 0, 0);
326 c = sym->c;
328 /* now we can add ELF relocation info */
329 put_elf_reloca(symtab_section, s, offset, type, c, addend);
332 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
334 greloca(s, sym, offset, type, 0);
337 /* ------------------------------------------------------------------------- */
338 /* symbol allocator */
339 static Sym *__sym_malloc(void)
341 Sym *sym_pool, *sym, *last_sym;
342 int i;
344 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
345 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
347 last_sym = sym_free_first;
348 sym = sym_pool;
349 for(i = 0; i < SYM_POOL_NB; i++) {
350 sym->next = last_sym;
351 last_sym = sym;
352 sym++;
354 sym_free_first = last_sym;
355 return last_sym;
358 static inline Sym *sym_malloc(void)
360 Sym *sym;
361 sym = sym_free_first;
362 if (!sym)
363 sym = __sym_malloc();
364 sym_free_first = sym->next;
365 return sym;
368 ST_INLN void sym_free(Sym *sym)
370 sym->next = sym_free_first;
371 sym_free_first = sym;
374 /* push, without hashing */
375 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
377 Sym *s;
379 s = sym_malloc();
380 s->asm_label = 0;
381 s->v = v;
382 s->type.t = t;
383 s->type.ref = NULL;
384 #ifdef _WIN64
385 s->d = NULL;
386 #endif
387 s->c = c;
388 s->next = NULL;
389 /* add in stack */
390 s->prev = *ps;
391 *ps = s;
392 return s;
395 /* find a symbol and return its associated structure. 's' is the top
396 of the symbol stack */
397 ST_FUNC Sym *sym_find2(Sym *s, int v)
399 while (s) {
400 if (s->v == v)
401 return s;
402 else if (s->v == -1)
403 return NULL;
404 s = s->prev;
406 return NULL;
409 /* structure lookup */
410 ST_INLN Sym *struct_find(int v)
412 v -= TOK_IDENT;
413 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
414 return NULL;
415 return table_ident[v]->sym_struct;
418 /* find an identifier */
419 ST_INLN Sym *sym_find(int v)
421 v -= TOK_IDENT;
422 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
423 return NULL;
424 return table_ident[v]->sym_identifier;
427 /* push a given symbol on the symbol stack */
428 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
430 Sym *s, **ps;
431 TokenSym *ts;
433 if (local_stack)
434 ps = &local_stack;
435 else
436 ps = &global_stack;
437 s = sym_push2(ps, v, type->t, c);
438 s->type.ref = type->ref;
439 s->r = r;
440 /* don't record fields or anonymous symbols */
441 /* XXX: simplify */
442 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
443 /* record symbol in token array */
444 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
445 if (v & SYM_STRUCT)
446 ps = &ts->sym_struct;
447 else
448 ps = &ts->sym_identifier;
449 s->prev_tok = *ps;
450 *ps = s;
451 s->scope = local_scope;
452 if (s->prev_tok && s->prev_tok->scope == s->scope)
453 tcc_error("redeclaration of '%s'",
454 get_tok_str(v & ~SYM_STRUCT, NULL));
456 return s;
459 /* push a global identifier */
460 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
462 Sym *s, **ps;
463 s = sym_push2(&global_stack, v, t, c);
464 /* don't record anonymous symbol */
465 if (v < SYM_FIRST_ANOM) {
466 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
467 /* modify the top most local identifier, so that
468 sym_identifier will point to 's' when popped */
469 while (*ps != NULL)
470 ps = &(*ps)->prev_tok;
471 s->prev_tok = NULL;
472 *ps = s;
474 return s;
477 /* pop symbols until top reaches 'b' */
478 ST_FUNC void sym_pop(Sym **ptop, Sym *b)
480 Sym *s, *ss, **ps;
481 TokenSym *ts;
482 int v;
484 s = *ptop;
485 while(s != b) {
486 ss = s->prev;
487 v = s->v;
488 /* remove symbol in token array */
489 /* XXX: simplify */
490 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
491 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
492 if (v & SYM_STRUCT)
493 ps = &ts->sym_struct;
494 else
495 ps = &ts->sym_identifier;
496 *ps = s->prev_tok;
498 sym_free(s);
499 s = ss;
501 *ptop = b;
504 static void weaken_symbol(Sym *sym)
506 sym->type.t |= VT_WEAK;
507 if (sym->c > 0) {
508 int esym_type;
509 ElfW(Sym) *esym;
511 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
512 esym_type = ELFW(ST_TYPE)(esym->st_info);
513 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
517 static void apply_visibility(Sym *sym, CType *type)
519 int vis = sym->type.t & VT_VIS_MASK;
520 int vis2 = type->t & VT_VIS_MASK;
521 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
522 vis = vis2;
523 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
525 else
526 vis = (vis < vis2) ? vis : vis2;
527 sym->type.t &= ~VT_VIS_MASK;
528 sym->type.t |= vis;
530 if (sym->c > 0) {
531 ElfW(Sym) *esym;
533 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
534 vis >>= VT_VIS_SHIFT;
535 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
539 /* ------------------------------------------------------------------------- */
541 ST_FUNC void swap(int *p, int *q)
543 int t;
544 t = *p;
545 *p = *q;
546 *q = t;
549 static void vsetc(CType *type, int r, CValue *vc)
551 int v;
553 if (vtop >= vstack + (VSTACK_SIZE - 1))
554 tcc_error("memory full (vstack)");
555 /* cannot let cpu flags if other instruction are generated. Also
556 avoid leaving VT_JMP anywhere except on the top of the stack
557 because it would complicate the code generator. */
558 if (vtop >= vstack) {
559 v = vtop->r & VT_VALMASK;
560 if (v == VT_CMP || (v & ~1) == VT_JMP)
561 gv(RC_INT);
563 vtop++;
564 vtop->type = *type;
565 vtop->r = r;
566 vtop->r2 = VT_CONST;
567 vtop->c = *vc;
570 /* push constant of type "type" with useless value */
571 ST_FUNC void vpush(CType *type)
573 CValue cval;
574 vsetc(type, VT_CONST, &cval);
577 /* push integer constant */
578 ST_FUNC void vpushi(int v)
580 CValue cval;
581 cval.i = v;
582 vsetc(&int_type, VT_CONST, &cval);
585 /* push a pointer sized constant */
586 static void vpushs(addr_t v)
588 CValue cval;
589 cval.i = v;
590 vsetc(&size_type, VT_CONST, &cval);
593 /* push arbitrary 64bit constant */
594 ST_FUNC void vpush64(int ty, unsigned long long v)
596 CValue cval;
597 CType ctype;
598 ctype.t = ty;
599 ctype.ref = NULL;
600 cval.i = v;
601 vsetc(&ctype, VT_CONST, &cval);
604 /* push long long constant */
605 static inline void vpushll(long long v)
607 vpush64(VT_LLONG, v);
610 /* push a symbol value of TYPE */
611 static inline void vpushsym(CType *type, Sym *sym)
613 CValue cval;
614 cval.i = 0;
615 vsetc(type, VT_CONST | VT_SYM, &cval);
616 vtop->sym = sym;
619 /* Return a static symbol pointing to a section */
620 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
622 int v;
623 Sym *sym;
625 v = anon_sym++;
626 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
627 sym->type.ref = type->ref;
628 sym->r = VT_CONST | VT_SYM;
629 put_extern_sym(sym, sec, offset, size);
630 return sym;
633 /* push a reference to a section offset by adding a dummy symbol */
634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
636 vpushsym(type, get_sym_ref(type, sec, offset, size));
639 /* define a new external reference to a symbol 'v' of type 'u' */
640 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
642 Sym *s;
644 s = sym_find(v);
645 if (!s) {
646 /* push forward reference */
647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
648 s->type.ref = type->ref;
649 s->r = r | VT_CONST | VT_SYM;
651 return s;
654 /* define a new external reference to a symbol 'v' */
655 static Sym *external_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
663 s->type.t |= VT_EXTERN;
664 } else if (s->type.ref == func_old_type.ref) {
665 s->type.ref = type->ref;
666 s->r = r | VT_CONST | VT_SYM;
667 s->type.t |= VT_EXTERN;
668 } else if (!is_compatible_types(&s->type, type)) {
669 tcc_error("incompatible types for redefinition of '%s'",
670 get_tok_str(v, NULL));
672 /* Merge some storage attributes. */
673 if (type->t & VT_WEAK)
674 weaken_symbol(s);
676 if (type->t & VT_VIS_MASK)
677 apply_visibility(s, type);
679 return s;
682 /* push a reference to global symbol v */
683 ST_FUNC void vpush_global_sym(CType *type, int v)
685 vpushsym(type, external_global_sym(v, type, 0));
688 ST_FUNC void vset(CType *type, int r, int v)
690 CValue cval;
692 cval.i = v;
693 vsetc(type, r, &cval);
696 static void vseti(int r, int v)
698 CType type;
699 type.t = VT_INT;
700 type.ref = 0;
701 vset(&type, r, v);
704 ST_FUNC void vswap(void)
706 SValue tmp;
707 /* cannot let cpu flags if other instruction are generated. Also
708 avoid leaving VT_JMP anywhere except on the top of the stack
709 because it would complicate the code generator. */
710 if (vtop >= vstack) {
711 int v = vtop->r & VT_VALMASK;
712 if (v == VT_CMP || (v & ~1) == VT_JMP)
713 gv(RC_INT);
715 tmp = vtop[0];
716 vtop[0] = vtop[-1];
717 vtop[-1] = tmp;
719 /* XXX: +2% overall speed possible with optimized memswap
721 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
725 ST_FUNC void vpushv(SValue *v)
727 if (vtop >= vstack + (VSTACK_SIZE - 1))
728 tcc_error("memory full (vstack)");
729 vtop++;
730 *vtop = *v;
733 static void vdup(void)
735 vpushv(vtop);
738 /* save registers up to (vtop - n) stack entry */
739 ST_FUNC void save_regs(int n)
741 SValue *p, *p1;
742 for(p = vstack, p1 = vtop - n; p <= p1; p++)
743 save_reg(p->r);
746 /* save r to the memory stack, and mark it as being free */
747 ST_FUNC void save_reg(int r)
749 save_reg_upstack(r, 0);
752 /* save r to the memory stack, and mark it as being free,
753 if seen up to (vtop - n) stack entry */
754 ST_FUNC void save_reg_upstack(int r, int n)
756 int l, saved, size, align;
757 SValue *p, *p1, sv;
758 CType *type;
760 if ((r &= VT_VALMASK) >= VT_CONST)
761 return;
763 /* modify all stack values */
764 saved = 0;
765 l = 0;
766 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
767 if ((p->r & VT_VALMASK) == r ||
768 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
769 /* must save value on stack if not already done */
770 if (!saved) {
771 /* NOTE: must reload 'r' because r might be equal to r2 */
772 r = p->r & VT_VALMASK;
773 /* store register in the stack */
774 type = &p->type;
775 if ((p->r & VT_LVAL) ||
776 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
777 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
778 type = &char_pointer_type;
779 #else
780 type = &int_type;
781 #endif
782 size = type_size(type, &align);
783 loc = (loc - size) & -align;
784 sv.type.t = type->t;
785 sv.r = VT_LOCAL | VT_LVAL;
786 sv.c.i = loc;
787 store(r, &sv);
788 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
789 /* x86 specific: need to pop fp register ST0 if saved */
790 if (r == TREG_ST0) {
791 o(0xd8dd); /* fstp %st(0) */
793 #endif
794 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
795 /* special long long case */
796 if ((type->t & VT_BTYPE) == VT_LLONG) {
797 sv.c.i += 4;
798 store(p->r2, &sv);
800 #endif
801 l = loc;
802 saved = 1;
804 /* mark that stack entry as being saved on the stack */
805 if (p->r & VT_LVAL) {
806 /* also clear the bounded flag because the
807 relocation address of the function was stored in
808 p->c.i */
809 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
810 } else {
811 p->r = lvalue_type(p->type.t) | VT_LOCAL;
813 p->r2 = VT_CONST;
814 p->c.i = l;
819 #ifdef TCC_TARGET_ARM
820 /* find a register of class 'rc2' with at most one reference on stack.
821 * If none, call get_reg(rc) */
822 ST_FUNC int get_reg_ex(int rc, int rc2)
824 int r;
825 SValue *p;
827 for(r=0;r<NB_REGS;r++) {
828 if (reg_classes[r] & rc2) {
829 int n;
830 n=0;
831 for(p = vstack; p <= vtop; p++) {
832 if ((p->r & VT_VALMASK) == r ||
833 (p->r2 & VT_VALMASK) == r)
834 n++;
836 if (n <= 1)
837 return r;
840 return get_reg(rc);
842 #endif
844 /* find a free register of class 'rc'. If none, save one register */
845 ST_FUNC int get_reg(int rc)
847 int r;
848 SValue *p;
850 /* find a free register */
851 for(r=0;r<NB_REGS;r++) {
852 if (reg_classes[r] & rc) {
853 for(p=vstack;p<=vtop;p++) {
854 if ((p->r & VT_VALMASK) == r ||
855 (p->r2 & VT_VALMASK) == r)
856 goto notfound;
858 return r;
860 notfound: ;
863 /* no register left : free the first one on the stack (VERY
864 IMPORTANT to start from the bottom to ensure that we don't
865 spill registers used in gen_opi()) */
866 for(p=vstack;p<=vtop;p++) {
867 /* look at second register (if long long) */
868 r = p->r2 & VT_VALMASK;
869 if (r < VT_CONST && (reg_classes[r] & rc))
870 goto save_found;
871 r = p->r & VT_VALMASK;
872 if (r < VT_CONST && (reg_classes[r] & rc)) {
873 save_found:
874 save_reg(r);
875 return r;
878 /* Should never comes here */
879 return -1;
882 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
883 if needed */
884 static void move_reg(int r, int s, int t)
886 SValue sv;
888 if (r != s) {
889 save_reg(r);
890 sv.type.t = t;
891 sv.type.ref = NULL;
892 sv.r = s;
893 sv.c.i = 0;
894 load(r, &sv);
898 /* get address of vtop (vtop MUST BE an lvalue) */
899 ST_FUNC void gaddrof(void)
901 if (vtop->r & VT_REF && !nocode_wanted)
902 gv(RC_INT);
903 vtop->r &= ~VT_LVAL;
904 /* tricky: if saved lvalue, then we can go back to lvalue */
905 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
906 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
911 #ifdef CONFIG_TCC_BCHECK
912 /* generate lvalue bound code */
913 static void gbound(void)
915 int lval_type;
916 CType type1;
918 vtop->r &= ~VT_MUSTBOUND;
919 /* if lvalue, then use checking code before dereferencing */
920 if (vtop->r & VT_LVAL) {
921 /* if not VT_BOUNDED value, then make one */
922 if (!(vtop->r & VT_BOUNDED)) {
923 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
924 /* must save type because we must set it to int to get pointer */
925 type1 = vtop->type;
926 vtop->type.t = VT_PTR;
927 gaddrof();
928 vpushi(0);
929 gen_bounded_ptr_add();
930 vtop->r |= lval_type;
931 vtop->type = type1;
933 /* then check for dereferencing */
934 gen_bounded_ptr_deref();
937 #endif
939 /* store vtop a register belonging to class 'rc'. lvalues are
940 converted to values. Cannot be used if cannot be converted to
941 register value (such as structures). */
942 ST_FUNC int gv(int rc)
944 int r, bit_pos, bit_size, size, align, i;
945 int rc2;
947 /* NOTE: get_reg can modify vstack[] */
948 if (vtop->type.t & VT_BITFIELD) {
949 CType type;
950 int bits = 32;
951 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
952 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
953 /* remove bit field info to avoid loops */
954 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
955 /* cast to int to propagate signedness in following ops */
956 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
957 type.t = VT_LLONG;
958 bits = 64;
959 } else
960 type.t = VT_INT;
961 if((vtop->type.t & VT_UNSIGNED) ||
962 (vtop->type.t & VT_BTYPE) == VT_BOOL)
963 type.t |= VT_UNSIGNED;
964 gen_cast(&type);
965 /* generate shifts */
966 vpushi(bits - (bit_pos + bit_size));
967 gen_op(TOK_SHL);
968 vpushi(bits - bit_size);
969 /* NOTE: transformed to SHR if unsigned */
970 gen_op(TOK_SAR);
971 r = gv(rc);
972 } else {
973 if (is_float(vtop->type.t) &&
974 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
975 Sym *sym;
976 int *ptr;
977 unsigned long offset;
978 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
979 CValue check;
980 #endif
982 /* XXX: unify with initializers handling ? */
983 /* CPUs usually cannot use float constants, so we store them
984 generically in data segment */
985 size = type_size(&vtop->type, &align);
986 offset = (data_section->data_offset + align - 1) & -align;
987 data_section->data_offset = offset;
988 /* XXX: not portable yet */
989 #if defined(__i386__) || defined(__x86_64__)
990 /* Zero pad x87 tenbyte long doubles */
991 if (size == LDOUBLE_SIZE) {
992 vtop->c.tab[2] &= 0xffff;
993 #if LDOUBLE_SIZE == 16
994 vtop->c.tab[3] = 0;
995 #endif
997 #endif
998 ptr = section_ptr_add(data_section, size);
999 size = size >> 2;
1000 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1001 check.d = 1;
1002 if(check.tab[0])
1003 for(i=0;i<size;i++)
1004 ptr[i] = vtop->c.tab[size-1-i];
1005 else
1006 #endif
1007 for(i=0;i<size;i++)
1008 ptr[i] = vtop->c.tab[i];
1009 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1010 vtop->r |= VT_LVAL | VT_SYM;
1011 vtop->sym = sym;
1012 vtop->c.i = 0;
1014 #ifdef CONFIG_TCC_BCHECK
1015 if (vtop->r & VT_MUSTBOUND)
1016 gbound();
1017 #endif
1019 r = vtop->r & VT_VALMASK;
1020 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1021 #ifndef TCC_TARGET_ARM64
1022 if (rc == RC_IRET)
1023 rc2 = RC_LRET;
1024 #ifdef TCC_TARGET_X86_64
1025 else if (rc == RC_FRET)
1026 rc2 = RC_QRET;
1027 #endif
1028 #endif
1030 /* need to reload if:
1031 - constant
1032 - lvalue (need to dereference pointer)
1033 - already a register, but not in the right class */
1034 if (r >= VT_CONST
1035 || (vtop->r & VT_LVAL)
1036 || !(reg_classes[r] & rc)
1037 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1038 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1039 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1040 #else
1041 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1042 #endif
1045 r = get_reg(rc);
1046 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1047 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1048 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1049 #else
1050 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1051 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1052 unsigned long long ll;
1053 #endif
1054 int r2, original_type;
1055 original_type = vtop->type.t;
1056 /* two register type load : expand to two words
1057 temporarily */
1058 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1059 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1060 /* load constant */
1061 ll = vtop->c.i;
1062 vtop->c.i = ll; /* first word */
1063 load(r, vtop);
1064 vtop->r = r; /* save register value */
1065 vpushi(ll >> 32); /* second word */
1066 } else
1067 #endif
1068 if (vtop->r & VT_LVAL) {
1069 /* We do not want to modifier the long long
1070 pointer here, so the safest (and less
1071 efficient) is to save all the other registers
1072 in the stack. XXX: totally inefficient. */
1073 #if 0
1074 save_regs(1);
1075 #else
1076 /* lvalue_save: save only if used further down the stack */
1077 save_reg_upstack(vtop->r, 1);
1078 #endif
1079 /* load from memory */
1080 vtop->type.t = load_type;
1081 load(r, vtop);
1082 vdup();
1083 vtop[-1].r = r; /* save register value */
1084 /* increment pointer to get second word */
1085 vtop->type.t = addr_type;
1086 gaddrof();
1087 vpushi(load_size);
1088 gen_op('+');
1089 vtop->r |= VT_LVAL;
1090 vtop->type.t = load_type;
1091 } else {
1092 /* move registers */
1093 load(r, vtop);
1094 vdup();
1095 vtop[-1].r = r; /* save register value */
1096 vtop->r = vtop[-1].r2;
1098 /* Allocate second register. Here we rely on the fact that
1099 get_reg() tries first to free r2 of an SValue. */
1100 r2 = get_reg(rc2);
1101 load(r2, vtop);
1102 vpop();
1103 /* write second register */
1104 vtop->r2 = r2;
1105 vtop->type.t = original_type;
1106 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1107 int t1, t;
1108 /* lvalue of scalar type : need to use lvalue type
1109 because of possible cast */
1110 t = vtop->type.t;
1111 t1 = t;
1112 /* compute memory access type */
1113 if (vtop->r & VT_REF)
1114 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1115 t = VT_PTR;
1116 #else
1117 t = VT_INT;
1118 #endif
1119 else if (vtop->r & VT_LVAL_BYTE)
1120 t = VT_BYTE;
1121 else if (vtop->r & VT_LVAL_SHORT)
1122 t = VT_SHORT;
1123 if (vtop->r & VT_LVAL_UNSIGNED)
1124 t |= VT_UNSIGNED;
1125 vtop->type.t = t;
1126 load(r, vtop);
1127 /* restore wanted type */
1128 vtop->type.t = t1;
1129 } else {
1130 /* one register type load */
1131 load(r, vtop);
1134 vtop->r = r;
1135 #ifdef TCC_TARGET_C67
1136 /* uses register pairs for doubles */
1137 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1138 vtop->r2 = r+1;
1139 #endif
1141 return r;
1144 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1145 ST_FUNC void gv2(int rc1, int rc2)
1147 int v;
1149 /* generate more generic register first. But VT_JMP or VT_CMP
1150 values must be generated first in all cases to avoid possible
1151 reload errors */
1152 v = vtop[0].r & VT_VALMASK;
1153 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1154 vswap();
1155 gv(rc1);
1156 vswap();
1157 gv(rc2);
1158 /* test if reload is needed for first register */
1159 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1160 vswap();
1161 gv(rc1);
1162 vswap();
1164 } else {
1165 gv(rc2);
1166 vswap();
1167 gv(rc1);
1168 vswap();
1169 /* test if reload is needed for first register */
1170 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1171 gv(rc2);
1176 #ifndef TCC_TARGET_ARM64
1177 /* wrapper around RC_FRET to return a register by type */
1178 static int rc_fret(int t)
1180 #ifdef TCC_TARGET_X86_64
1181 if (t == VT_LDOUBLE) {
1182 return RC_ST0;
1184 #endif
1185 return RC_FRET;
1187 #endif
1189 /* wrapper around REG_FRET to return a register by type */
1190 static int reg_fret(int t)
1192 #ifdef TCC_TARGET_X86_64
1193 if (t == VT_LDOUBLE) {
1194 return TREG_ST0;
1196 #endif
1197 return REG_FRET;
1200 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1201 /* expand 64bit on stack in two ints */
1202 static void lexpand(void)
1204 int u, v;
1205 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1206 v = vtop->r & (VT_VALMASK | VT_LVAL);
1207 if (v == VT_CONST) {
1208 vdup();
1209 vtop[0].c.i >>= 32;
1210 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1211 vdup();
1212 vtop[0].c.i += 4;
1213 } else {
1214 gv(RC_INT);
1215 vdup();
1216 vtop[0].r = vtop[-1].r2;
1217 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1219 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1221 #endif
1223 #ifdef TCC_TARGET_ARM
1224 /* expand long long on stack */
1225 ST_FUNC void lexpand_nr(void)
1227 int u,v;
1229 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1230 vdup();
1231 vtop->r2 = VT_CONST;
1232 vtop->type.t = VT_INT | u;
1233 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1234 if (v == VT_CONST) {
1235 vtop[-1].c.i = vtop->c.i;
1236 vtop->c.i = vtop->c.i >> 32;
1237 vtop->r = VT_CONST;
1238 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1239 vtop->c.i += 4;
1240 vtop->r = vtop[-1].r;
1241 } else if (v > VT_CONST) {
1242 vtop--;
1243 lexpand();
1244 } else
1245 vtop->r = vtop[-1].r2;
1246 vtop[-1].r2 = VT_CONST;
1247 vtop[-1].type.t = VT_INT | u;
1249 #endif
1251 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1252 /* build a long long from two ints */
1253 static void lbuild(int t)
1255 gv2(RC_INT, RC_INT);
1256 vtop[-1].r2 = vtop[0].r;
1257 vtop[-1].type.t = t;
1258 vpop();
1260 #endif
1262 /* rotate n first stack elements to the bottom
1263 I1 ... In -> I2 ... In I1 [top is right]
1265 ST_FUNC void vrotb(int n)
1267 int i;
1268 SValue tmp;
1270 tmp = vtop[-n + 1];
1271 for(i=-n+1;i!=0;i++)
1272 vtop[i] = vtop[i+1];
1273 vtop[0] = tmp;
1276 /* rotate the n elements before entry e towards the top
1277 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1279 ST_FUNC void vrote(SValue *e, int n)
1281 int i;
1282 SValue tmp;
1284 tmp = *e;
1285 for(i = 0;i < n - 1; i++)
1286 e[-i] = e[-i - 1];
1287 e[-n + 1] = tmp;
1290 /* rotate n first stack elements to the top
1291 I1 ... In -> In I1 ... I(n-1) [top is right]
1293 ST_FUNC void vrott(int n)
1295 vrote(vtop, n);
1298 /* pop stack value */
1299 ST_FUNC void vpop(void)
1301 int v;
1302 v = vtop->r & VT_VALMASK;
1303 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1304 /* for x86, we need to pop the FP stack */
1305 if (v == TREG_ST0 && !nocode_wanted) {
1306 o(0xd8dd); /* fstp %st(0) */
1307 } else
1308 #endif
1309 if (v == VT_JMP || v == VT_JMPI) {
1310 /* need to put correct jump if && or || without test */
1311 gsym(vtop->c.i);
1313 vtop--;
1316 /* convert stack entry to register and duplicate its value in another
1317 register */
1318 static void gv_dup(void)
1320 int rc, t, r, r1;
1321 SValue sv;
1323 t = vtop->type.t;
1324 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1325 if ((t & VT_BTYPE) == VT_LLONG) {
1326 lexpand();
1327 gv_dup();
1328 vswap();
1329 vrotb(3);
1330 gv_dup();
1331 vrotb(4);
1332 /* stack: H L L1 H1 */
1333 lbuild(t);
1334 vrotb(3);
1335 vrotb(3);
1336 vswap();
1337 lbuild(t);
1338 vswap();
1339 } else
1340 #endif
1342 /* duplicate value */
1343 rc = RC_INT;
1344 sv.type.t = VT_INT;
1345 if (is_float(t)) {
1346 rc = RC_FLOAT;
1347 #ifdef TCC_TARGET_X86_64
1348 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1349 rc = RC_ST0;
1351 #endif
1352 sv.type.t = t;
1354 r = gv(rc);
1355 r1 = get_reg(rc);
1356 sv.r = r;
1357 sv.c.i = 0;
1358 load(r1, &sv); /* move r to r1 */
1359 vdup();
1360 /* duplicates value */
1361 if (r != r1)
1362 vtop->r = r1;
1366 /* Generate value test
1368 * Generate a test for any value (jump, comparison and integers) */
1369 ST_FUNC int gvtst(int inv, int t)
1371 int v = vtop->r & VT_VALMASK;
1372 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1373 vpushi(0);
1374 gen_op(TOK_NE);
1376 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1377 /* constant jmp optimization */
1378 if ((vtop->c.i != 0) != inv)
1379 t = gjmp(t);
1380 vtop--;
1381 return t;
1383 return gtst(inv, t);
1386 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1387 /* generate CPU independent (unsigned) long long operations */
1388 static void gen_opl(int op)
1390 int t, a, b, op1, c, i;
1391 int func;
1392 unsigned short reg_iret = REG_IRET;
1393 unsigned short reg_lret = REG_LRET;
1394 SValue tmp;
1396 switch(op) {
1397 case '/':
1398 case TOK_PDIV:
1399 func = TOK___divdi3;
1400 goto gen_func;
1401 case TOK_UDIV:
1402 func = TOK___udivdi3;
1403 goto gen_func;
1404 case '%':
1405 func = TOK___moddi3;
1406 goto gen_mod_func;
1407 case TOK_UMOD:
1408 func = TOK___umoddi3;
1409 gen_mod_func:
1410 #ifdef TCC_ARM_EABI
1411 reg_iret = TREG_R2;
1412 reg_lret = TREG_R3;
1413 #endif
1414 gen_func:
1415 /* call generic long long function */
1416 vpush_global_sym(&func_old_type, func);
1417 vrott(3);
1418 gfunc_call(2);
1419 vpushi(0);
1420 vtop->r = reg_iret;
1421 vtop->r2 = reg_lret;
1422 break;
1423 case '^':
1424 case '&':
1425 case '|':
1426 case '*':
1427 case '+':
1428 case '-':
1429 //pv("gen_opl A",0,2);
1430 t = vtop->type.t;
1431 vswap();
1432 lexpand();
1433 vrotb(3);
1434 lexpand();
1435 /* stack: L1 H1 L2 H2 */
1436 tmp = vtop[0];
1437 vtop[0] = vtop[-3];
1438 vtop[-3] = tmp;
1439 tmp = vtop[-2];
1440 vtop[-2] = vtop[-3];
1441 vtop[-3] = tmp;
1442 vswap();
1443 /* stack: H1 H2 L1 L2 */
1444 //pv("gen_opl B",0,4);
1445 if (op == '*') {
1446 vpushv(vtop - 1);
1447 vpushv(vtop - 1);
1448 gen_op(TOK_UMULL);
1449 lexpand();
1450 /* stack: H1 H2 L1 L2 ML MH */
1451 for(i=0;i<4;i++)
1452 vrotb(6);
1453 /* stack: ML MH H1 H2 L1 L2 */
1454 tmp = vtop[0];
1455 vtop[0] = vtop[-2];
1456 vtop[-2] = tmp;
1457 /* stack: ML MH H1 L2 H2 L1 */
1458 gen_op('*');
1459 vrotb(3);
1460 vrotb(3);
1461 gen_op('*');
1462 /* stack: ML MH M1 M2 */
1463 gen_op('+');
1464 gen_op('+');
1465 } else if (op == '+' || op == '-') {
1466 /* XXX: add non carry method too (for MIPS or alpha) */
1467 if (op == '+')
1468 op1 = TOK_ADDC1;
1469 else
1470 op1 = TOK_SUBC1;
1471 gen_op(op1);
1472 /* stack: H1 H2 (L1 op L2) */
1473 vrotb(3);
1474 vrotb(3);
1475 gen_op(op1 + 1); /* TOK_xxxC2 */
1476 } else {
1477 gen_op(op);
1478 /* stack: H1 H2 (L1 op L2) */
1479 vrotb(3);
1480 vrotb(3);
1481 /* stack: (L1 op L2) H1 H2 */
1482 gen_op(op);
1483 /* stack: (L1 op L2) (H1 op H2) */
1485 /* stack: L H */
1486 lbuild(t);
1487 break;
1488 case TOK_SAR:
1489 case TOK_SHR:
1490 case TOK_SHL:
1491 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1492 t = vtop[-1].type.t;
1493 vswap();
1494 lexpand();
1495 vrotb(3);
1496 /* stack: L H shift */
1497 c = (int)vtop->c.i;
1498 /* constant: simpler */
1499 /* NOTE: all comments are for SHL. the other cases are
1500 done by swaping words */
1501 vpop();
1502 if (op != TOK_SHL)
1503 vswap();
1504 if (c >= 32) {
1505 /* stack: L H */
1506 vpop();
1507 if (c > 32) {
1508 vpushi(c - 32);
1509 gen_op(op);
1511 if (op != TOK_SAR) {
1512 vpushi(0);
1513 } else {
1514 gv_dup();
1515 vpushi(31);
1516 gen_op(TOK_SAR);
1518 vswap();
1519 } else {
1520 vswap();
1521 gv_dup();
1522 /* stack: H L L */
1523 vpushi(c);
1524 gen_op(op);
1525 vswap();
1526 vpushi(32 - c);
1527 if (op == TOK_SHL)
1528 gen_op(TOK_SHR);
1529 else
1530 gen_op(TOK_SHL);
1531 vrotb(3);
1532 /* stack: L L H */
1533 vpushi(c);
1534 if (op == TOK_SHL)
1535 gen_op(TOK_SHL);
1536 else
1537 gen_op(TOK_SHR);
1538 gen_op('|');
1540 if (op != TOK_SHL)
1541 vswap();
1542 lbuild(t);
1543 } else {
1544 /* XXX: should provide a faster fallback on x86 ? */
1545 switch(op) {
1546 case TOK_SAR:
1547 func = TOK___ashrdi3;
1548 goto gen_func;
1549 case TOK_SHR:
1550 func = TOK___lshrdi3;
1551 goto gen_func;
1552 case TOK_SHL:
1553 func = TOK___ashldi3;
1554 goto gen_func;
1557 break;
1558 default:
1559 /* compare operations */
1560 t = vtop->type.t;
1561 vswap();
1562 lexpand();
1563 vrotb(3);
1564 lexpand();
1565 /* stack: L1 H1 L2 H2 */
1566 tmp = vtop[-1];
1567 vtop[-1] = vtop[-2];
1568 vtop[-2] = tmp;
1569 /* stack: L1 L2 H1 H2 */
1570 /* compare high */
1571 op1 = op;
1572 /* when values are equal, we need to compare low words. since
1573 the jump is inverted, we invert the test too. */
1574 if (op1 == TOK_LT)
1575 op1 = TOK_LE;
1576 else if (op1 == TOK_GT)
1577 op1 = TOK_GE;
1578 else if (op1 == TOK_ULT)
1579 op1 = TOK_ULE;
1580 else if (op1 == TOK_UGT)
1581 op1 = TOK_UGE;
1582 a = 0;
1583 b = 0;
1584 gen_op(op1);
1585 if (op1 != TOK_NE) {
1586 a = gvtst(1, 0);
1588 if (op != TOK_EQ) {
1589 /* generate non equal test */
1590 /* XXX: NOT PORTABLE yet */
1591 if (a == 0) {
1592 b = gvtst(0, 0);
1593 } else {
1594 #if defined(TCC_TARGET_I386)
1595 b = psym(0x850f, 0);
1596 #elif defined(TCC_TARGET_ARM)
1597 b = ind;
1598 o(0x1A000000 | encbranch(ind, 0, 1));
1599 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1600 tcc_error("not implemented");
1601 #else
1602 #error not supported
1603 #endif
1606 /* compare low. Always unsigned */
1607 op1 = op;
1608 if (op1 == TOK_LT)
1609 op1 = TOK_ULT;
1610 else if (op1 == TOK_LE)
1611 op1 = TOK_ULE;
1612 else if (op1 == TOK_GT)
1613 op1 = TOK_UGT;
1614 else if (op1 == TOK_GE)
1615 op1 = TOK_UGE;
1616 gen_op(op1);
1617 a = gvtst(1, a);
1618 gsym(b);
1619 vseti(VT_JMPI, a);
1620 break;
1623 #endif
1625 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1627 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1628 return (a ^ b) >> 63 ? -x : x;
1631 static int gen_opic_lt(uint64_t a, uint64_t b)
1633 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1636 /* handle integer constant optimizations and various machine
1637 independent opt */
1638 static void gen_opic(int op)
1640 SValue *v1 = vtop - 1;
1641 SValue *v2 = vtop;
1642 int t1 = v1->type.t & VT_BTYPE;
1643 int t2 = v2->type.t & VT_BTYPE;
1644 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1645 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1646 uint64_t l1 = c1 ? v1->c.i : 0;
1647 uint64_t l2 = c2 ? v2->c.i : 0;
1648 int shm = (t1 == VT_LLONG) ? 63 : 31;
1650 if (t1 != VT_LLONG)
1651 l1 = ((uint32_t)l1 |
1652 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1653 if (t2 != VT_LLONG)
1654 l2 = ((uint32_t)l2 |
1655 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1657 if (c1 && c2) {
1658 switch(op) {
1659 case '+': l1 += l2; break;
1660 case '-': l1 -= l2; break;
1661 case '&': l1 &= l2; break;
1662 case '^': l1 ^= l2; break;
1663 case '|': l1 |= l2; break;
1664 case '*': l1 *= l2; break;
1666 case TOK_PDIV:
1667 case '/':
1668 case '%':
1669 case TOK_UDIV:
1670 case TOK_UMOD:
1671 /* if division by zero, generate explicit division */
1672 if (l2 == 0) {
1673 if (const_wanted)
1674 tcc_error("division by zero in constant");
1675 goto general_case;
1677 switch(op) {
1678 default: l1 = gen_opic_sdiv(l1, l2); break;
1679 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1680 case TOK_UDIV: l1 = l1 / l2; break;
1681 case TOK_UMOD: l1 = l1 % l2; break;
1683 break;
1684 case TOK_SHL: l1 <<= (l2 & shm); break;
1685 case TOK_SHR: l1 >>= (l2 & shm); break;
1686 case TOK_SAR:
1687 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1688 break;
1689 /* tests */
1690 case TOK_ULT: l1 = l1 < l2; break;
1691 case TOK_UGE: l1 = l1 >= l2; break;
1692 case TOK_EQ: l1 = l1 == l2; break;
1693 case TOK_NE: l1 = l1 != l2; break;
1694 case TOK_ULE: l1 = l1 <= l2; break;
1695 case TOK_UGT: l1 = l1 > l2; break;
1696 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1697 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1698 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1699 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1700 /* logical */
1701 case TOK_LAND: l1 = l1 && l2; break;
1702 case TOK_LOR: l1 = l1 || l2; break;
1703 default:
1704 goto general_case;
1706 v1->c.i = l1;
1707 vtop--;
1708 } else {
1709 /* if commutative ops, put c2 as constant */
1710 if (c1 && (op == '+' || op == '&' || op == '^' ||
1711 op == '|' || op == '*')) {
1712 vswap();
1713 c2 = c1; //c = c1, c1 = c2, c2 = c;
1714 l2 = l1; //l = l1, l1 = l2, l2 = l;
1716 if (!const_wanted &&
1717 c1 && ((l1 == 0 &&
1718 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1719 (l1 == -1 && op == TOK_SAR))) {
1720 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1721 vtop--;
1722 } else if (!const_wanted &&
1723 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1724 (l2 == -1 && op == '|') ||
1725 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1726 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1727 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1728 if (l2 == 1)
1729 vtop->c.i = 0;
1730 vswap();
1731 vtop--;
1732 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1733 op == TOK_PDIV) &&
1734 l2 == 1) ||
1735 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1736 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1737 l2 == 0) ||
1738 (op == '&' &&
1739 l2 == -1))) {
1740 /* filter out NOP operations like x*1, x-0, x&-1... */
1741 vtop--;
1742 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1743 /* try to use shifts instead of muls or divs */
1744 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1745 int n = -1;
1746 while (l2) {
1747 l2 >>= 1;
1748 n++;
1750 vtop->c.i = n;
1751 if (op == '*')
1752 op = TOK_SHL;
1753 else if (op == TOK_PDIV)
1754 op = TOK_SAR;
1755 else
1756 op = TOK_SHR;
1758 goto general_case;
1759 } else if (c2 && (op == '+' || op == '-') &&
1760 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1761 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1762 /* symbol + constant case */
1763 if (op == '-')
1764 l2 = -l2;
1765 vtop--;
1766 vtop->c.i += l2;
1767 } else {
1768 general_case:
1769 if (!nocode_wanted) {
1770 /* call low level op generator */
1771 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1772 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1773 gen_opl(op);
1774 else
1775 gen_opi(op);
1776 } else {
1777 vtop--;
1783 /* generate a floating point operation with constant propagation */
1784 static void gen_opif(int op)
1786 int c1, c2;
1787 SValue *v1, *v2;
1788 long double f1, f2;
1790 v1 = vtop - 1;
1791 v2 = vtop;
1792 /* currently, we cannot do computations with forward symbols */
1793 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1794 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1795 if (c1 && c2) {
1796 if (v1->type.t == VT_FLOAT) {
1797 f1 = v1->c.f;
1798 f2 = v2->c.f;
1799 } else if (v1->type.t == VT_DOUBLE) {
1800 f1 = v1->c.d;
1801 f2 = v2->c.d;
1802 } else {
1803 f1 = v1->c.ld;
1804 f2 = v2->c.ld;
1807 /* NOTE: we only do constant propagation if finite number (not
1808 NaN or infinity) (ANSI spec) */
1809 if (!ieee_finite(f1) || !ieee_finite(f2))
1810 goto general_case;
1812 switch(op) {
1813 case '+': f1 += f2; break;
1814 case '-': f1 -= f2; break;
1815 case '*': f1 *= f2; break;
1816 case '/':
1817 if (f2 == 0.0) {
1818 if (const_wanted)
1819 tcc_error("division by zero in constant");
1820 goto general_case;
1822 f1 /= f2;
1823 break;
1824 /* XXX: also handles tests ? */
1825 default:
1826 goto general_case;
1828 /* XXX: overflow test ? */
1829 if (v1->type.t == VT_FLOAT) {
1830 v1->c.f = f1;
1831 } else if (v1->type.t == VT_DOUBLE) {
1832 v1->c.d = f1;
1833 } else {
1834 v1->c.ld = f1;
1836 vtop--;
1837 } else {
1838 general_case:
1839 if (!nocode_wanted) {
1840 gen_opf(op);
1841 } else {
1842 vtop--;
1847 static int pointed_size(CType *type)
1849 int align;
1850 return type_size(pointed_type(type), &align);
1853 static void vla_runtime_pointed_size(CType *type)
1855 int align;
1856 vla_runtime_type_size(pointed_type(type), &align);
1859 static inline int is_null_pointer(SValue *p)
1861 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1862 return 0;
1863 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1864 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1865 ((p->type.t & VT_BTYPE) == VT_PTR &&
1866 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1869 static inline int is_integer_btype(int bt)
1871 return (bt == VT_BYTE || bt == VT_SHORT ||
1872 bt == VT_INT || bt == VT_LLONG);
1875 /* check types for comparison or subtraction of pointers */
1876 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1878 CType *type1, *type2, tmp_type1, tmp_type2;
1879 int bt1, bt2;
1881 /* null pointers are accepted for all comparisons as gcc */
1882 if (is_null_pointer(p1) || is_null_pointer(p2))
1883 return;
1884 type1 = &p1->type;
1885 type2 = &p2->type;
1886 bt1 = type1->t & VT_BTYPE;
1887 bt2 = type2->t & VT_BTYPE;
1888 /* accept comparison between pointer and integer with a warning */
1889 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1890 if (op != TOK_LOR && op != TOK_LAND )
1891 tcc_warning("comparison between pointer and integer");
1892 return;
1895 /* both must be pointers or implicit function pointers */
1896 if (bt1 == VT_PTR) {
1897 type1 = pointed_type(type1);
1898 } else if (bt1 != VT_FUNC)
1899 goto invalid_operands;
1901 if (bt2 == VT_PTR) {
1902 type2 = pointed_type(type2);
1903 } else if (bt2 != VT_FUNC) {
1904 invalid_operands:
1905 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1907 if ((type1->t & VT_BTYPE) == VT_VOID ||
1908 (type2->t & VT_BTYPE) == VT_VOID)
1909 return;
1910 tmp_type1 = *type1;
1911 tmp_type2 = *type2;
1912 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1913 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1914 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1915 /* gcc-like error if '-' is used */
1916 if (op == '-')
1917 goto invalid_operands;
1918 else
1919 tcc_warning("comparison of distinct pointer types lacks a cast");
1923 /* generic gen_op: handles types problems */
1924 ST_FUNC void gen_op(int op)
1926 int u, t1, t2, bt1, bt2, t;
1927 CType type1;
1929 redo:
1930 t1 = vtop[-1].type.t;
1931 t2 = vtop[0].type.t;
1932 bt1 = t1 & VT_BTYPE;
1933 bt2 = t2 & VT_BTYPE;
1935 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1936 tcc_error("operation on a struct");
1937 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1938 if (bt2 == VT_FUNC) {
1939 mk_pointer(&vtop->type);
1940 gaddrof();
1942 if (bt1 == VT_FUNC) {
1943 vswap();
1944 mk_pointer(&vtop->type);
1945 gaddrof();
1946 vswap();
1948 goto redo;
1949 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1950 /* at least one operand is a pointer */
1951 /* relationnal op: must be both pointers */
1952 if (op >= TOK_ULT && op <= TOK_LOR) {
1953 check_comparison_pointer_types(vtop - 1, vtop, op);
1954 /* pointers are handled are unsigned */
1955 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1956 t = VT_LLONG | VT_UNSIGNED;
1957 #else
1958 t = VT_INT | VT_UNSIGNED;
1959 #endif
1960 goto std_op;
1962 /* if both pointers, then it must be the '-' op */
1963 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1964 if (op != '-')
1965 tcc_error("cannot use pointers here");
1966 check_comparison_pointer_types(vtop - 1, vtop, op);
1967 /* XXX: check that types are compatible */
1968 if (vtop[-1].type.t & VT_VLA) {
1969 vla_runtime_pointed_size(&vtop[-1].type);
1970 } else {
1971 vpushi(pointed_size(&vtop[-1].type));
1973 vrott(3);
1974 gen_opic(op);
1975 /* set to integer type */
1976 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1977 vtop->type.t = VT_LLONG;
1978 #else
1979 vtop->type.t = VT_INT;
1980 #endif
1981 vswap();
1982 gen_op(TOK_PDIV);
1983 } else {
1984 /* exactly one pointer : must be '+' or '-'. */
1985 if (op != '-' && op != '+')
1986 tcc_error("cannot use pointers here");
1987 /* Put pointer as first operand */
1988 if (bt2 == VT_PTR) {
1989 vswap();
1990 swap(&t1, &t2);
1992 #if PTR_SIZE == 4
1993 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
1994 /* XXX: truncate here because gen_opl can't handle ptr + long long */
1995 gen_cast(&int_type);
1996 #endif
1997 type1 = vtop[-1].type;
1998 type1.t &= ~VT_ARRAY;
1999 if (vtop[-1].type.t & VT_VLA)
2000 vla_runtime_pointed_size(&vtop[-1].type);
2001 else {
2002 u = pointed_size(&vtop[-1].type);
2003 if (u < 0)
2004 tcc_error("unknown array element size");
2005 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2006 vpushll(u);
2007 #else
2008 /* XXX: cast to int ? (long long case) */
2009 vpushi(u);
2010 #endif
2012 gen_op('*');
2013 #if 0
2014 /* #ifdef CONFIG_TCC_BCHECK
2015 The main reason to removing this code:
2016 #include <stdio.h>
2017 int main ()
2019 int v[10];
2020 int i = 10;
2021 int j = 9;
2022 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2023 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2025 When this code is on. then the output looks like
2026 v+i-j = 0xfffffffe
2027 v+(i-j) = 0xbff84000
2029 /* if evaluating constant expression, no code should be
2030 generated, so no bound check */
2031 if (tcc_state->do_bounds_check && !const_wanted) {
2032 /* if bounded pointers, we generate a special code to
2033 test bounds */
2034 if (op == '-') {
2035 vpushi(0);
2036 vswap();
2037 gen_op('-');
2039 gen_bounded_ptr_add();
2040 } else
2041 #endif
2043 gen_opic(op);
2045 /* put again type if gen_opic() swaped operands */
2046 vtop->type = type1;
2048 } else if (is_float(bt1) || is_float(bt2)) {
2049 /* compute bigger type and do implicit casts */
2050 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2051 t = VT_LDOUBLE;
2052 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2053 t = VT_DOUBLE;
2054 } else {
2055 t = VT_FLOAT;
2057 /* floats can only be used for a few operations */
2058 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2059 (op < TOK_ULT || op > TOK_GT))
2060 tcc_error("invalid operands for binary operation");
2061 goto std_op;
2062 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2063 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2064 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2065 t |= VT_UNSIGNED;
2066 goto std_op;
2067 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2068 /* cast to biggest op */
2069 t = VT_LLONG;
2070 /* convert to unsigned if it does not fit in a long long */
2071 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2072 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2073 t |= VT_UNSIGNED;
2074 goto std_op;
2075 } else {
2076 /* integer operations */
2077 t = VT_INT;
2078 /* convert to unsigned if it does not fit in an integer */
2079 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2080 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2081 t |= VT_UNSIGNED;
2082 std_op:
2083 /* XXX: currently, some unsigned operations are explicit, so
2084 we modify them here */
2085 if (t & VT_UNSIGNED) {
2086 if (op == TOK_SAR)
2087 op = TOK_SHR;
2088 else if (op == '/')
2089 op = TOK_UDIV;
2090 else if (op == '%')
2091 op = TOK_UMOD;
2092 else if (op == TOK_LT)
2093 op = TOK_ULT;
2094 else if (op == TOK_GT)
2095 op = TOK_UGT;
2096 else if (op == TOK_LE)
2097 op = TOK_ULE;
2098 else if (op == TOK_GE)
2099 op = TOK_UGE;
2101 vswap();
2102 type1.t = t;
2103 gen_cast(&type1);
2104 vswap();
2105 /* special case for shifts and long long: we keep the shift as
2106 an integer */
2107 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2108 type1.t = VT_INT;
2109 gen_cast(&type1);
2110 if (is_float(t))
2111 gen_opif(op);
2112 else
2113 gen_opic(op);
2114 if (op >= TOK_ULT && op <= TOK_GT) {
2115 /* relationnal op: the result is an int */
2116 vtop->type.t = VT_INT;
2117 } else {
2118 vtop->type.t = t;
2121 // Make sure that we have converted to an rvalue:
2122 if (vtop->r & VT_LVAL && !nocode_wanted)
2123 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2126 #ifndef TCC_TARGET_ARM
2127 /* generic itof for unsigned long long case */
2128 static void gen_cvt_itof1(int t)
2130 #ifdef TCC_TARGET_ARM64
2131 gen_cvt_itof(t);
2132 #else
2133 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2134 (VT_LLONG | VT_UNSIGNED)) {
2136 if (t == VT_FLOAT)
2137 vpush_global_sym(&func_old_type, TOK___floatundisf);
2138 #if LDOUBLE_SIZE != 8
2139 else if (t == VT_LDOUBLE)
2140 vpush_global_sym(&func_old_type, TOK___floatundixf);
2141 #endif
2142 else
2143 vpush_global_sym(&func_old_type, TOK___floatundidf);
2144 vrott(2);
2145 gfunc_call(1);
2146 vpushi(0);
2147 vtop->r = reg_fret(t);
2148 } else {
2149 gen_cvt_itof(t);
2151 #endif
2153 #endif
2155 /* generic ftoi for unsigned long long case */
2156 static void gen_cvt_ftoi1(int t)
2158 #ifdef TCC_TARGET_ARM64
2159 gen_cvt_ftoi(t);
2160 #else
2161 int st;
2163 if (t == (VT_LLONG | VT_UNSIGNED)) {
2164 /* not handled natively */
2165 st = vtop->type.t & VT_BTYPE;
2166 if (st == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2168 #if LDOUBLE_SIZE != 8
2169 else if (st == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = REG_IRET;
2178 vtop->r2 = REG_LRET;
2179 } else {
2180 gen_cvt_ftoi(t);
2182 #endif
2185 /* force char or short cast */
2186 static void force_charshort_cast(int t)
2188 int bits, dbt;
2189 dbt = t & VT_BTYPE;
2190 /* XXX: add optimization if lvalue : just change type and offset */
2191 if (dbt == VT_BYTE)
2192 bits = 8;
2193 else
2194 bits = 16;
2195 if (t & VT_UNSIGNED) {
2196 vpushi((1 << bits) - 1);
2197 gen_op('&');
2198 } else {
2199 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2200 bits = 64 - bits;
2201 else
2202 bits = 32 - bits;
2203 vpushi(bits);
2204 gen_op(TOK_SHL);
2205 /* result must be signed or the SAR is converted to an SHL
2206 This was not the case when "t" was a signed short
2207 and the last value on the stack was an unsigned int */
2208 vtop->type.t &= ~VT_UNSIGNED;
2209 vpushi(bits);
2210 gen_op(TOK_SAR);
2214 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2215 static void gen_cast(CType *type)
2217 int sbt, dbt, sf, df, c, p;
2219 /* special delayed cast for char/short */
2220 /* XXX: in some cases (multiple cascaded casts), it may still
2221 be incorrect */
2222 if (vtop->r & VT_MUSTCAST) {
2223 vtop->r &= ~VT_MUSTCAST;
2224 force_charshort_cast(vtop->type.t);
2227 /* bitfields first get cast to ints */
2228 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2229 gv(RC_INT);
2232 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2233 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2235 if (sbt != dbt) {
2236 sf = is_float(sbt);
2237 df = is_float(dbt);
2238 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2239 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2240 if (c) {
2241 /* constant case: we can do it now */
2242 /* XXX: in ISOC, cannot do it if error in convert */
2243 if (sbt == VT_FLOAT)
2244 vtop->c.ld = vtop->c.f;
2245 else if (sbt == VT_DOUBLE)
2246 vtop->c.ld = vtop->c.d;
2248 if (df) {
2249 if ((sbt & VT_BTYPE) == VT_LLONG) {
2250 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2251 vtop->c.ld = vtop->c.i;
2252 else
2253 vtop->c.ld = -(long double)-vtop->c.i;
2254 } else if(!sf) {
2255 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2256 vtop->c.ld = (uint32_t)vtop->c.i;
2257 else
2258 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2261 if (dbt == VT_FLOAT)
2262 vtop->c.f = (float)vtop->c.ld;
2263 else if (dbt == VT_DOUBLE)
2264 vtop->c.d = (double)vtop->c.ld;
2265 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2266 vtop->c.i = vtop->c.ld;
2267 } else if (sf && dbt == VT_BOOL) {
2268 vtop->c.i = (vtop->c.ld != 0);
2269 } else {
2270 if(sf)
2271 vtop->c.i = vtop->c.ld;
2272 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2274 else if (sbt & VT_UNSIGNED)
2275 vtop->c.i = (uint32_t)vtop->c.i;
2276 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2277 else if (sbt == VT_PTR)
2279 #endif
2280 else if (sbt != VT_LLONG)
2281 vtop->c.i = ((uint32_t)vtop->c.i |
2282 -(vtop->c.i & 0x80000000));
2284 if (dbt == (VT_LLONG|VT_UNSIGNED))
2286 else if (dbt == VT_BOOL)
2287 vtop->c.i = (vtop->c.i != 0);
2288 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2289 else if (dbt == VT_PTR)
2291 #endif
2292 else if (dbt != VT_LLONG) {
2293 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2294 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2295 0xffffffff);
2296 vtop->c.i &= m;
2297 if (!(dbt & VT_UNSIGNED))
2298 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2301 } else if (p && dbt == VT_BOOL) {
2302 vtop->r = VT_CONST;
2303 vtop->c.i = 1;
2304 } else if (!nocode_wanted) {
2305 /* non constant case: generate code */
2306 if (sf && df) {
2307 /* convert from fp to fp */
2308 gen_cvt_ftof(dbt);
2309 } else if (df) {
2310 /* convert int to fp */
2311 gen_cvt_itof1(dbt);
2312 } else if (sf) {
2313 /* convert fp to int */
2314 if (dbt == VT_BOOL) {
2315 vpushi(0);
2316 gen_op(TOK_NE);
2317 } else {
2318 /* we handle char/short/etc... with generic code */
2319 if (dbt != (VT_INT | VT_UNSIGNED) &&
2320 dbt != (VT_LLONG | VT_UNSIGNED) &&
2321 dbt != VT_LLONG)
2322 dbt = VT_INT;
2323 gen_cvt_ftoi1(dbt);
2324 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2325 /* additional cast for char/short... */
2326 vtop->type.t = dbt;
2327 gen_cast(type);
2330 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2331 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2332 if ((sbt & VT_BTYPE) != VT_LLONG) {
2333 /* scalar to long long */
2334 /* machine independent conversion */
2335 gv(RC_INT);
2336 /* generate high word */
2337 if (sbt == (VT_INT | VT_UNSIGNED)) {
2338 vpushi(0);
2339 gv(RC_INT);
2340 } else {
2341 if (sbt == VT_PTR) {
2342 /* cast from pointer to int before we apply
2343 shift operation, which pointers don't support*/
2344 gen_cast(&int_type);
2346 gv_dup();
2347 vpushi(31);
2348 gen_op(TOK_SAR);
2350 /* patch second register */
2351 vtop[-1].r2 = vtop->r;
2352 vpop();
2354 #else
2355 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2356 (dbt & VT_BTYPE) == VT_PTR ||
2357 (dbt & VT_BTYPE) == VT_FUNC) {
2358 if ((sbt & VT_BTYPE) != VT_LLONG &&
2359 (sbt & VT_BTYPE) != VT_PTR &&
2360 (sbt & VT_BTYPE) != VT_FUNC) {
2361 /* need to convert from 32bit to 64bit */
2362 gv(RC_INT);
2363 if (sbt != (VT_INT | VT_UNSIGNED)) {
2364 #if defined(TCC_TARGET_ARM64)
2365 gen_cvt_sxtw();
2366 #elif defined(TCC_TARGET_X86_64)
2367 int r = gv(RC_INT);
2368 /* x86_64 specific: movslq */
2369 o(0x6348);
2370 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2371 #else
2372 #error
2373 #endif
2376 #endif
2377 } else if (dbt == VT_BOOL) {
2378 /* scalar to bool */
2379 vpushi(0);
2380 gen_op(TOK_NE);
2381 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2382 (dbt & VT_BTYPE) == VT_SHORT) {
2383 if (sbt == VT_PTR) {
2384 vtop->type.t = VT_INT;
2385 tcc_warning("nonportable conversion from pointer to char/short");
2387 force_charshort_cast(dbt);
2388 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2389 } else if ((dbt & VT_BTYPE) == VT_INT) {
2390 /* scalar to int */
2391 if ((sbt & VT_BTYPE) == VT_LLONG) {
2392 /* from long long: just take low order word */
2393 lexpand();
2394 vpop();
2396 /* if lvalue and single word type, nothing to do because
2397 the lvalue already contains the real type size (see
2398 VT_LVAL_xxx constants) */
2399 #endif
2402 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2403 /* if we are casting between pointer types,
2404 we must update the VT_LVAL_xxx size */
2405 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2406 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2408 vtop->type = *type;
2411 /* return type size as known at compile time. Put alignment at 'a' */
2412 ST_FUNC int type_size(CType *type, int *a)
2414 Sym *s;
2415 int bt;
2417 bt = type->t & VT_BTYPE;
2418 if (bt == VT_STRUCT) {
2419 /* struct/union */
2420 s = type->ref;
2421 *a = s->r;
2422 return s->c;
2423 } else if (bt == VT_PTR) {
2424 if (type->t & VT_ARRAY) {
2425 int ts;
2427 s = type->ref;
2428 ts = type_size(&s->type, a);
2430 if (ts < 0 && s->c < 0)
2431 ts = -ts;
2433 return ts * s->c;
2434 } else {
2435 *a = PTR_SIZE;
2436 return PTR_SIZE;
2438 } else if (bt == VT_LDOUBLE) {
2439 *a = LDOUBLE_ALIGN;
2440 return LDOUBLE_SIZE;
2441 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2442 #ifdef TCC_TARGET_I386
2443 #ifdef TCC_TARGET_PE
2444 *a = 8;
2445 #else
2446 *a = 4;
2447 #endif
2448 #elif defined(TCC_TARGET_ARM)
2449 #ifdef TCC_ARM_EABI
2450 *a = 8;
2451 #else
2452 *a = 4;
2453 #endif
2454 #else
2455 *a = 8;
2456 #endif
2457 return 8;
2458 } else if (bt == VT_INT || bt == VT_FLOAT) {
2459 *a = 4;
2460 return 4;
2461 } else if (bt == VT_SHORT) {
2462 *a = 2;
2463 return 2;
2464 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2465 *a = 8;
2466 return 16;
2467 } else if (bt == VT_ENUM) {
2468 *a = 4;
2469 /* Enums might be incomplete, so don't just return '4' here. */
2470 return type->ref->c;
2471 } else {
2472 /* char, void, function, _Bool */
2473 *a = 1;
2474 return 1;
2478 /* push type size as known at runtime time on top of value stack. Put
2479 alignment at 'a' */
2480 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2482 if (type->t & VT_VLA) {
2483 type_size(&type->ref->type, a);
2484 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2485 } else {
2486 vpushi(type_size(type, a));
2490 static void vla_sp_restore(void) {
2491 if (vlas_in_scope) {
2492 gen_vla_sp_restore(vla_sp_loc);
2496 static void vla_sp_restore_root(void) {
2497 if (vlas_in_scope) {
2498 gen_vla_sp_restore(vla_sp_root_loc);
2502 /* return the pointed type of t */
2503 static inline CType *pointed_type(CType *type)
2505 return &type->ref->type;
2508 /* modify type so that its it is a pointer to type. */
2509 ST_FUNC void mk_pointer(CType *type)
2511 Sym *s;
2512 s = sym_push(SYM_FIELD, type, 0, -1);
2513 type->t = VT_PTR | (type->t & ~VT_TYPE);
2514 type->ref = s;
2517 /* compare function types. OLD functions match any new functions */
2518 static int is_compatible_func(CType *type1, CType *type2)
2520 Sym *s1, *s2;
2522 s1 = type1->ref;
2523 s2 = type2->ref;
2524 if (!is_compatible_types(&s1->type, &s2->type))
2525 return 0;
2526 /* check func_call */
2527 if (s1->a.func_call != s2->a.func_call)
2528 return 0;
2529 /* XXX: not complete */
2530 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2531 return 1;
2532 if (s1->c != s2->c)
2533 return 0;
2534 while (s1 != NULL) {
2535 if (s2 == NULL)
2536 return 0;
2537 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2538 return 0;
2539 s1 = s1->next;
2540 s2 = s2->next;
2542 if (s2)
2543 return 0;
2544 return 1;
2547 /* return true if type1 and type2 are the same. If unqualified is
2548 true, qualifiers on the types are ignored.
2550 - enums are not checked as gcc __builtin_types_compatible_p ()
2552 static int compare_types(CType *type1, CType *type2, int unqualified)
2554 int bt1, t1, t2;
2556 t1 = type1->t & VT_TYPE;
2557 t2 = type2->t & VT_TYPE;
2558 if (unqualified) {
2559 /* strip qualifiers before comparing */
2560 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2561 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2563 /* Default Vs explicit signedness only matters for char */
2564 if ((t1 & VT_BTYPE) != VT_BYTE) {
2565 t1 &= ~VT_DEFSIGN;
2566 t2 &= ~VT_DEFSIGN;
2568 /* An enum is compatible with (unsigned) int. Ideally we would
2569 store the enums signedness in type->ref.a.<some_bit> and
2570 only accept unsigned enums with unsigned int and vice versa.
2571 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2572 from pointer target types, so we can't add it here either. */
2573 if ((t1 & VT_BTYPE) == VT_ENUM) {
2574 t1 = VT_INT;
2575 if (type1->ref->a.unsigned_enum)
2576 t1 |= VT_UNSIGNED;
2578 if ((t2 & VT_BTYPE) == VT_ENUM) {
2579 t2 = VT_INT;
2580 if (type2->ref->a.unsigned_enum)
2581 t2 |= VT_UNSIGNED;
2583 /* XXX: bitfields ? */
2584 if (t1 != t2)
2585 return 0;
2586 /* test more complicated cases */
2587 bt1 = t1 & VT_BTYPE;
2588 if (bt1 == VT_PTR) {
2589 type1 = pointed_type(type1);
2590 type2 = pointed_type(type2);
2591 return is_compatible_types(type1, type2);
2592 } else if (bt1 == VT_STRUCT) {
2593 return (type1->ref == type2->ref);
2594 } else if (bt1 == VT_FUNC) {
2595 return is_compatible_func(type1, type2);
2596 } else {
2597 return 1;
2601 /* return true if type1 and type2 are exactly the same (including
2602 qualifiers).
2604 static int is_compatible_types(CType *type1, CType *type2)
2606 return compare_types(type1,type2,0);
2609 /* return true if type1 and type2 are the same (ignoring qualifiers).
2611 static int is_compatible_parameter_types(CType *type1, CType *type2)
2613 return compare_types(type1,type2,1);
2616 /* print a type. If 'varstr' is not NULL, then the variable is also
2617 printed in the type */
2618 /* XXX: union */
2619 /* XXX: add array and function pointers */
2620 static void type_to_str(char *buf, int buf_size,
2621 CType *type, const char *varstr)
2623 int bt, v, t;
2624 Sym *s, *sa;
2625 char buf1[256];
2626 const char *tstr;
2628 t = type->t & VT_TYPE;
2629 bt = t & VT_BTYPE;
2630 buf[0] = '\0';
2631 if (t & VT_CONSTANT)
2632 pstrcat(buf, buf_size, "const ");
2633 if (t & VT_VOLATILE)
2634 pstrcat(buf, buf_size, "volatile ");
2635 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2636 pstrcat(buf, buf_size, "unsigned ");
2637 else if (t & VT_DEFSIGN)
2638 pstrcat(buf, buf_size, "signed ");
2639 switch(bt) {
2640 case VT_VOID:
2641 tstr = "void";
2642 goto add_tstr;
2643 case VT_BOOL:
2644 tstr = "_Bool";
2645 goto add_tstr;
2646 case VT_BYTE:
2647 tstr = "char";
2648 goto add_tstr;
2649 case VT_SHORT:
2650 tstr = "short";
2651 goto add_tstr;
2652 case VT_INT:
2653 tstr = "int";
2654 goto add_tstr;
2655 case VT_LONG:
2656 tstr = "long";
2657 goto add_tstr;
2658 case VT_LLONG:
2659 tstr = "long long";
2660 goto add_tstr;
2661 case VT_FLOAT:
2662 tstr = "float";
2663 goto add_tstr;
2664 case VT_DOUBLE:
2665 tstr = "double";
2666 goto add_tstr;
2667 case VT_LDOUBLE:
2668 tstr = "long double";
2669 add_tstr:
2670 pstrcat(buf, buf_size, tstr);
2671 break;
2672 case VT_ENUM:
2673 case VT_STRUCT:
2674 if (bt == VT_STRUCT)
2675 tstr = "struct ";
2676 else
2677 tstr = "enum ";
2678 pstrcat(buf, buf_size, tstr);
2679 v = type->ref->v & ~SYM_STRUCT;
2680 if (v >= SYM_FIRST_ANOM)
2681 pstrcat(buf, buf_size, "<anonymous>");
2682 else
2683 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2684 break;
2685 case VT_FUNC:
2686 s = type->ref;
2687 type_to_str(buf, buf_size, &s->type, varstr);
2688 pstrcat(buf, buf_size, "(");
2689 sa = s->next;
2690 while (sa != NULL) {
2691 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2692 pstrcat(buf, buf_size, buf1);
2693 sa = sa->next;
2694 if (sa)
2695 pstrcat(buf, buf_size, ", ");
2697 pstrcat(buf, buf_size, ")");
2698 goto no_var;
2699 case VT_PTR:
2700 s = type->ref;
2701 if (t & VT_ARRAY) {
2702 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2703 type_to_str(buf, buf_size, &s->type, buf1);
2704 goto no_var;
2706 pstrcpy(buf1, sizeof(buf1), "*");
2707 if (t & VT_CONSTANT)
2708 pstrcat(buf1, buf_size, "const ");
2709 if (t & VT_VOLATILE)
2710 pstrcat(buf1, buf_size, "volatile ");
2711 if (varstr)
2712 pstrcat(buf1, sizeof(buf1), varstr);
2713 type_to_str(buf, buf_size, &s->type, buf1);
2714 goto no_var;
2716 if (varstr) {
2717 pstrcat(buf, buf_size, " ");
2718 pstrcat(buf, buf_size, varstr);
2720 no_var: ;
2723 /* verify type compatibility to store vtop in 'dt' type, and generate
2724 casts if needed. */
2725 static void gen_assign_cast(CType *dt)
2727 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2728 char buf1[256], buf2[256];
2729 int dbt, sbt;
2731 st = &vtop->type; /* source type */
2732 dbt = dt->t & VT_BTYPE;
2733 sbt = st->t & VT_BTYPE;
2734 if (sbt == VT_VOID || dbt == VT_VOID) {
2735 if (sbt == VT_VOID && dbt == VT_VOID)
2736 ; /*
2737 It is Ok if both are void
2738 A test program:
2739 void func1() {}
2740 void func2() {
2741 return func1();
2743 gcc accepts this program
2745 else
2746 tcc_error("cannot cast from/to void");
2748 if (dt->t & VT_CONSTANT)
2749 tcc_warning("assignment of read-only location");
2750 switch(dbt) {
2751 case VT_PTR:
2752 /* special cases for pointers */
2753 /* '0' can also be a pointer */
2754 if (is_null_pointer(vtop))
2755 goto type_ok;
2756 /* accept implicit pointer to integer cast with warning */
2757 if (is_integer_btype(sbt)) {
2758 tcc_warning("assignment makes pointer from integer without a cast");
2759 goto type_ok;
2761 type1 = pointed_type(dt);
2762 /* a function is implicitely a function pointer */
2763 if (sbt == VT_FUNC) {
2764 if ((type1->t & VT_BTYPE) != VT_VOID &&
2765 !is_compatible_types(pointed_type(dt), st))
2766 tcc_warning("assignment from incompatible pointer type");
2767 goto type_ok;
2769 if (sbt != VT_PTR)
2770 goto error;
2771 type2 = pointed_type(st);
2772 if ((type1->t & VT_BTYPE) == VT_VOID ||
2773 (type2->t & VT_BTYPE) == VT_VOID) {
2774 /* void * can match anything */
2775 } else {
2776 /* exact type match, except for qualifiers */
2777 tmp_type1 = *type1;
2778 tmp_type2 = *type2;
2779 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2780 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2781 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2782 /* Like GCC don't warn by default for merely changes
2783 in pointer target signedness. Do warn for different
2784 base types, though, in particular for unsigned enums
2785 and signed int targets. */
2786 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2787 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2788 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2790 else
2791 tcc_warning("assignment from incompatible pointer type");
2794 /* check const and volatile */
2795 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2796 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2797 tcc_warning("assignment discards qualifiers from pointer target type");
2798 break;
2799 case VT_BYTE:
2800 case VT_SHORT:
2801 case VT_INT:
2802 case VT_LLONG:
2803 if (sbt == VT_PTR || sbt == VT_FUNC) {
2804 tcc_warning("assignment makes integer from pointer without a cast");
2805 } else if (sbt == VT_STRUCT) {
2806 goto case_VT_STRUCT;
2808 /* XXX: more tests */
2809 break;
2810 case VT_STRUCT:
2811 case_VT_STRUCT:
2812 tmp_type1 = *dt;
2813 tmp_type2 = *st;
2814 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2815 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2816 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2817 error:
2818 type_to_str(buf1, sizeof(buf1), st, NULL);
2819 type_to_str(buf2, sizeof(buf2), dt, NULL);
2820 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2822 break;
2824 type_ok:
2825 gen_cast(dt);
2828 /* store vtop in lvalue pushed on stack */
2829 ST_FUNC void vstore(void)
2831 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2833 ft = vtop[-1].type.t;
2834 sbt = vtop->type.t & VT_BTYPE;
2835 dbt = ft & VT_BTYPE;
2836 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2837 (sbt == VT_INT && dbt == VT_SHORT))
2838 && !(vtop->type.t & VT_BITFIELD)) {
2839 /* optimize char/short casts */
2840 delayed_cast = VT_MUSTCAST;
2841 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2842 ((1 << VT_STRUCT_SHIFT) - 1));
2843 /* XXX: factorize */
2844 if (ft & VT_CONSTANT)
2845 tcc_warning("assignment of read-only location");
2846 } else {
2847 delayed_cast = 0;
2848 if (!(ft & VT_BITFIELD))
2849 gen_assign_cast(&vtop[-1].type);
2852 if (sbt == VT_STRUCT) {
2853 /* if structure, only generate pointer */
2854 /* structure assignment : generate memcpy */
2855 /* XXX: optimize if small size */
2856 if (!nocode_wanted) {
2857 size = type_size(&vtop->type, &align);
2859 /* destination */
2860 vswap();
2861 vtop->type.t = VT_PTR;
2862 gaddrof();
2864 /* address of memcpy() */
2865 #ifdef TCC_ARM_EABI
2866 if(!(align & 7))
2867 vpush_global_sym(&func_old_type, TOK_memcpy8);
2868 else if(!(align & 3))
2869 vpush_global_sym(&func_old_type, TOK_memcpy4);
2870 else
2871 #endif
2872 /* Use memmove, rather than memcpy, as dest and src may be same: */
2873 vpush_global_sym(&func_old_type, TOK_memmove);
2875 vswap();
2876 /* source */
2877 vpushv(vtop - 2);
2878 vtop->type.t = VT_PTR;
2879 gaddrof();
2880 /* type size */
2881 vpushi(size);
2882 gfunc_call(3);
2883 } else {
2884 vswap();
2885 vpop();
2887 /* leave source on stack */
2888 } else if (ft & VT_BITFIELD) {
2889 /* bitfield store handling */
2891 /* save lvalue as expression result (example: s.b = s.a = n;) */
2892 vdup(), vtop[-1] = vtop[-2];
2894 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2895 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2896 /* remove bit field info to avoid loops */
2897 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2899 if((ft & VT_BTYPE) == VT_BOOL) {
2900 gen_cast(&vtop[-1].type);
2901 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2904 /* duplicate destination */
2905 vdup();
2906 vtop[-1] = vtop[-2];
2908 /* mask and shift source */
2909 if((ft & VT_BTYPE) != VT_BOOL) {
2910 if((ft & VT_BTYPE) == VT_LLONG) {
2911 vpushll((1ULL << bit_size) - 1ULL);
2912 } else {
2913 vpushi((1 << bit_size) - 1);
2915 gen_op('&');
2917 vpushi(bit_pos);
2918 gen_op(TOK_SHL);
2919 /* load destination, mask and or with source */
2920 vswap();
2921 if((ft & VT_BTYPE) == VT_LLONG) {
2922 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2923 } else {
2924 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2926 gen_op('&');
2927 gen_op('|');
2928 /* store result */
2929 vstore();
2930 /* ... and discard */
2931 vpop();
2933 } else {
2934 if (!nocode_wanted) {
2935 #ifdef CONFIG_TCC_BCHECK
2936 /* bound check case */
2937 if (vtop[-1].r & VT_MUSTBOUND) {
2938 vswap();
2939 gbound();
2940 vswap();
2942 #endif
2943 rc = RC_INT;
2944 if (is_float(ft)) {
2945 rc = RC_FLOAT;
2946 #ifdef TCC_TARGET_X86_64
2947 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2948 rc = RC_ST0;
2949 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2950 rc = RC_FRET;
2952 #endif
2954 r = gv(rc); /* generate value */
2955 /* if lvalue was saved on stack, must read it */
2956 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2957 SValue sv;
2958 t = get_reg(RC_INT);
2959 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2960 sv.type.t = VT_PTR;
2961 #else
2962 sv.type.t = VT_INT;
2963 #endif
2964 sv.r = VT_LOCAL | VT_LVAL;
2965 sv.c.i = vtop[-1].c.i;
2966 load(t, &sv);
2967 vtop[-1].r = t | VT_LVAL;
2969 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2970 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2971 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2972 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2973 #else
2974 if ((ft & VT_BTYPE) == VT_LLONG) {
2975 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
2976 #endif
2977 vtop[-1].type.t = load_type;
2978 store(r, vtop - 1);
2979 vswap();
2980 /* convert to int to increment easily */
2981 vtop->type.t = addr_type;
2982 gaddrof();
2983 vpushi(load_size);
2984 gen_op('+');
2985 vtop->r |= VT_LVAL;
2986 vswap();
2987 vtop[-1].type.t = load_type;
2988 /* XXX: it works because r2 is spilled last ! */
2989 store(vtop->r2, vtop - 1);
2990 } else {
2991 store(r, vtop - 1);
2994 vswap();
2995 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
2996 vtop->r |= delayed_cast;
3000 /* post defines POST/PRE add. c is the token ++ or -- */
3001 ST_FUNC void inc(int post, int c)
3003 test_lvalue();
3004 vdup(); /* save lvalue */
3005 if (post) {
3006 if (!nocode_wanted)
3007 gv_dup(); /* duplicate value */
3008 else
3009 vdup(); /* duplicate value */
3010 vrotb(3);
3011 vrotb(3);
3013 /* add constant */
3014 vpushi(c - TOK_MID);
3015 gen_op('+');
3016 vstore(); /* store value */
3017 if (post)
3018 vpop(); /* if post op, return saved value */
3021 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3023 /* read the string */
3024 if (tok != TOK_STR)
3025 expect(msg);
3026 cstr_new(astr);
3027 while (tok == TOK_STR) {
3028 /* XXX: add \0 handling too ? */
3029 cstr_cat(astr, tokc.str.data, -1);
3030 next();
3032 cstr_ccat(astr, '\0');
3035 /* Parse GNUC __attribute__ extension. Currently, the following
3036 extensions are recognized:
3037 - aligned(n) : set data/function alignment.
3038 - packed : force data alignment to 1
3039 - section(x) : generate data/code in this section.
3040 - unused : currently ignored, but may be used someday.
3041 - regparm(n) : pass function parameters in registers (i386 only)
3043 static void parse_attribute(AttributeDef *ad)
3045 int t, n;
3046 CString astr;
3048 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3049 next();
3050 skip('(');
3051 skip('(');
3052 while (tok != ')') {
3053 if (tok < TOK_IDENT)
3054 expect("attribute name");
3055 t = tok;
3056 next();
3057 switch(t) {
3058 case TOK_SECTION1:
3059 case TOK_SECTION2:
3060 skip('(');
3061 parse_mult_str(&astr, "section name");
3062 ad->section = find_section(tcc_state, (char *)astr.data);
3063 skip(')');
3064 cstr_free(&astr);
3065 break;
3066 case TOK_ALIAS1:
3067 case TOK_ALIAS2:
3068 skip('(');
3069 parse_mult_str(&astr, "alias(\"target\")");
3070 ad->alias_target = /* save string as token, for later */
3071 tok_alloc((char*)astr.data, astr.size-1)->tok;
3072 skip(')');
3073 cstr_free(&astr);
3074 break;
3075 case TOK_VISIBILITY1:
3076 case TOK_VISIBILITY2:
3077 skip('(');
3078 parse_mult_str(&astr,
3079 "visibility(\"default|hidden|internal|protected\")");
3080 if (!strcmp (astr.data, "default"))
3081 ad->a.visibility = STV_DEFAULT;
3082 else if (!strcmp (astr.data, "hidden"))
3083 ad->a.visibility = STV_HIDDEN;
3084 else if (!strcmp (astr.data, "internal"))
3085 ad->a.visibility = STV_INTERNAL;
3086 else if (!strcmp (astr.data, "protected"))
3087 ad->a.visibility = STV_PROTECTED;
3088 else
3089 expect("visibility(\"default|hidden|internal|protected\")");
3090 skip(')');
3091 cstr_free(&astr);
3092 break;
3093 case TOK_ALIGNED1:
3094 case TOK_ALIGNED2:
3095 if (tok == '(') {
3096 next();
3097 n = expr_const();
3098 if (n <= 0 || (n & (n - 1)) != 0)
3099 tcc_error("alignment must be a positive power of two");
3100 skip(')');
3101 } else {
3102 n = MAX_ALIGN;
3104 ad->a.aligned = n;
3105 break;
3106 case TOK_PACKED1:
3107 case TOK_PACKED2:
3108 ad->a.packed = 1;
3109 break;
3110 case TOK_WEAK1:
3111 case TOK_WEAK2:
3112 ad->a.weak = 1;
3113 break;
3114 case TOK_UNUSED1:
3115 case TOK_UNUSED2:
3116 /* currently, no need to handle it because tcc does not
3117 track unused objects */
3118 break;
3119 case TOK_NORETURN1:
3120 case TOK_NORETURN2:
3121 /* currently, no need to handle it because tcc does not
3122 track unused objects */
3123 break;
3124 case TOK_CDECL1:
3125 case TOK_CDECL2:
3126 case TOK_CDECL3:
3127 ad->a.func_call = FUNC_CDECL;
3128 break;
3129 case TOK_STDCALL1:
3130 case TOK_STDCALL2:
3131 case TOK_STDCALL3:
3132 ad->a.func_call = FUNC_STDCALL;
3133 break;
3134 #ifdef TCC_TARGET_I386
3135 case TOK_REGPARM1:
3136 case TOK_REGPARM2:
3137 skip('(');
3138 n = expr_const();
3139 if (n > 3)
3140 n = 3;
3141 else if (n < 0)
3142 n = 0;
3143 if (n > 0)
3144 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3145 skip(')');
3146 break;
3147 case TOK_FASTCALL1:
3148 case TOK_FASTCALL2:
3149 case TOK_FASTCALL3:
3150 ad->a.func_call = FUNC_FASTCALLW;
3151 break;
3152 #endif
3153 case TOK_MODE:
3154 skip('(');
3155 switch(tok) {
3156 case TOK_MODE_DI:
3157 ad->a.mode = VT_LLONG + 1;
3158 break;
3159 case TOK_MODE_QI:
3160 ad->a.mode = VT_BYTE + 1;
3161 break;
3162 case TOK_MODE_HI:
3163 ad->a.mode = VT_SHORT + 1;
3164 break;
3165 case TOK_MODE_SI:
3166 case TOK_MODE_word:
3167 ad->a.mode = VT_INT + 1;
3168 break;
3169 default:
3170 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3171 break;
3173 next();
3174 skip(')');
3175 break;
3176 case TOK_DLLEXPORT:
3177 ad->a.func_export = 1;
3178 break;
3179 case TOK_DLLIMPORT:
3180 ad->a.func_import = 1;
3181 break;
3182 default:
3183 if (tcc_state->warn_unsupported)
3184 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3185 /* skip parameters */
3186 if (tok == '(') {
3187 int parenthesis = 0;
3188 do {
3189 if (tok == '(')
3190 parenthesis++;
3191 else if (tok == ')')
3192 parenthesis--;
3193 next();
3194 } while (parenthesis && tok != -1);
3196 break;
3198 if (tok != ',')
3199 break;
3200 next();
3202 skip(')');
3203 skip(')');
3207 static Sym * find_field (CType *type, int v)
3209 Sym *s = type->ref;
3210 v |= SYM_FIELD;
3211 while ((s = s->next) != NULL) {
3212 if ((s->v & SYM_FIELD) && (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3213 Sym *ret = find_field (&s->type, v);
3214 if (ret)
3215 return ret;
3217 if (s->v == v)
3218 break;
3220 return s;
3223 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3224 static void struct_decl(CType *type, AttributeDef *ad, int u)
3226 int a, v, size, align, maxalign, c, offset, flexible, extra_bytes;
3227 int bit_size, bit_pos, bsize, bt, lbit_pos, prevbt;
3228 Sym *s, *ss, *ass, **ps;
3229 AttributeDef ad1;
3230 CType type1, btype;
3232 a = tok; /* save decl type */
3233 next();
3234 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3235 parse_attribute(ad);
3236 if (tok != '{') {
3237 v = tok;
3238 next();
3239 /* struct already defined ? return it */
3240 if (v < TOK_IDENT)
3241 expect("struct/union/enum name");
3242 s = struct_find(v);
3243 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3244 if (s->type.t != a)
3245 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3246 goto do_decl;
3248 } else {
3249 v = anon_sym++;
3251 /* Record the original enum/struct/union token. */
3252 type1.t = a;
3253 type1.ref = NULL;
3254 /* we put an undefined size for struct/union */
3255 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3256 s->r = 0; /* default alignment is zero as gcc */
3257 /* put struct/union/enum name in type */
3258 do_decl:
3259 type->t = u;
3260 type->ref = s;
3262 if (tok == '{') {
3263 next();
3264 if (s->c != -1)
3265 tcc_error("struct/union/enum already defined");
3266 /* cannot be empty */
3267 c = 0;
3268 /* non empty enums are not allowed */
3269 if (a == TOK_ENUM) {
3270 int seen_neg = 0;
3271 for(;;) {
3272 v = tok;
3273 if (v < TOK_UIDENT)
3274 expect("identifier");
3275 ss = sym_find(v);
3276 if (ss && !local_stack)
3277 tcc_error("redefinition of enumerator '%s'",
3278 get_tok_str(v, NULL));
3279 next();
3280 if (tok == '=') {
3281 next();
3282 c = expr_const();
3284 if (c < 0)
3285 seen_neg = 1;
3286 /* enum symbols have static storage */
3287 ss = sym_push(v, &int_type, VT_CONST, c);
3288 ss->type.t |= VT_STATIC;
3289 if (tok != ',')
3290 break;
3291 next();
3292 c++;
3293 /* NOTE: we accept a trailing comma */
3294 if (tok == '}')
3295 break;
3297 if (!seen_neg)
3298 s->a.unsigned_enum = 1;
3299 s->c = type_size(&int_type, &align);
3300 skip('}');
3301 } else {
3302 maxalign = 1;
3303 ps = &s->next;
3304 prevbt = VT_INT;
3305 bit_pos = 0;
3306 offset = 0;
3307 flexible = 0;
3308 while (tok != '}') {
3309 if (!parse_btype(&btype, &ad1)) {
3310 skip(';');
3311 continue;
3313 while (1) {
3314 extra_bytes = 0;
3315 if (flexible)
3316 tcc_error("flexible array member '%s' not at the end of struct",
3317 get_tok_str(v, NULL));
3318 bit_size = -1;
3319 v = 0;
3320 type1 = btype;
3321 if (tok != ':') {
3322 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3323 if (v == 0) {
3324 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3325 expect("identifier");
3326 else {
3327 int v = btype.ref->v;
3328 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3329 if (tcc_state->ms_extensions == 0)
3330 expect("identifier");
3334 if (type_size(&type1, &align) < 0) {
3335 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3336 flexible = 1;
3337 else
3338 tcc_error("field '%s' has incomplete type",
3339 get_tok_str(v, NULL));
3341 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3342 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3343 tcc_error("invalid type for '%s'",
3344 get_tok_str(v, NULL));
3346 if (tok == ':') {
3347 next();
3348 bit_size = expr_const();
3349 /* XXX: handle v = 0 case for messages */
3350 if (bit_size < 0)
3351 tcc_error("negative width in bit-field '%s'",
3352 get_tok_str(v, NULL));
3353 if (v && bit_size == 0)
3354 tcc_error("zero width for bit-field '%s'",
3355 get_tok_str(v, NULL));
3357 size = type_size(&type1, &align);
3358 if (ad1.a.aligned) {
3359 if (align < ad1.a.aligned)
3360 align = ad1.a.aligned;
3361 } else if (ad1.a.packed || ad->a.packed) {
3362 align = 1;
3363 } else if (*tcc_state->pack_stack_ptr) {
3364 if (align > *tcc_state->pack_stack_ptr)
3365 align = *tcc_state->pack_stack_ptr;
3367 lbit_pos = 0;
3368 if (bit_size >= 0) {
3369 bt = type1.t & VT_BTYPE;
3370 if (bt != VT_INT &&
3371 bt != VT_BYTE &&
3372 bt != VT_SHORT &&
3373 bt != VT_BOOL &&
3374 bt != VT_ENUM &&
3375 bt != VT_LLONG)
3376 tcc_error("bitfields must have scalar type");
3377 bsize = size * 8;
3378 if (bit_size > bsize) {
3379 tcc_error("width of '%s' exceeds its type",
3380 get_tok_str(v, NULL));
3381 } else if (bit_size == bsize) {
3382 /* no need for bit fields */
3383 bit_pos = 0;
3384 } else if (bit_size == 0) {
3385 /* XXX: what to do if only padding in a
3386 structure ? */
3387 /* zero size: means to pad */
3388 bit_pos = 0;
3389 } else {
3390 /* if type change, union, or will overrun
3391 * allignment slot, start at a newly
3392 * alligned slot */
3393 if ((bit_pos + bit_size) > bsize ||
3394 bt != prevbt || a == TOK_UNION)
3395 bit_pos = 0;
3396 lbit_pos = bit_pos;
3397 /* XXX: handle LSB first */
3398 type1.t |= VT_BITFIELD |
3399 (bit_pos << VT_STRUCT_SHIFT) |
3400 (bit_size << (VT_STRUCT_SHIFT + 6));
3401 bit_pos += bit_size;
3402 /* without ms-bitfields, allocate the
3403 * minimum number of bytes necessary,
3404 * adding single bytes as needed */
3405 if (!tcc_state->ms_bitfields) {
3406 if (lbit_pos == 0)
3407 /* minimum bytes for new bitfield */
3408 size = (bit_size + 7) / 8;
3409 else {
3410 /* enough spare bits already allocated? */
3411 bit_size = (lbit_pos - 1) % 8 + 1 + bit_size;
3412 if (bit_size > 8) /* doesn't fit */
3413 extra_bytes = (bit_size - 1) / 8;
3417 prevbt = bt;
3418 } else {
3419 bit_pos = 0;
3421 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3422 /* add new memory data only if starting bit
3423 field or adding bytes to existing bit field */
3424 if (extra_bytes) c += extra_bytes;
3425 else if (lbit_pos == 0) {
3426 if (a == TOK_STRUCT) {
3427 c = (c + align - 1) & -align;
3428 offset = c;
3429 if (size > 0)
3430 c += size;
3431 } else {
3432 offset = 0;
3433 if (size > c)
3434 c = size;
3436 if (align > maxalign)
3437 maxalign = align;
3439 #if 0
3440 printf("add field %s offset=%d",
3441 get_tok_str(v, NULL), offset);
3442 if (type1.t & VT_BITFIELD) {
3443 printf(" pos=%d size=%d",
3444 (type1.t >> VT_STRUCT_SHIFT) & 0x3f,
3445 (type1.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3447 printf("\n");
3448 #endif
3450 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3451 /* An anonymous struct/union. Adjust member offsets
3452 to reflect the real offset of our containing struct.
3453 Also set the offset of this anon member inside
3454 the outer struct to be zero. Via this it
3455 works when accessing the field offset directly
3456 (from base object), as well as when recursing
3457 members in initializer handling. */
3458 int v2 = btype.ref->v;
3459 if (!(v2 & SYM_FIELD) &&
3460 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3461 Sym **pps;
3462 /* This happens only with MS extensions. The
3463 anon member has a named struct type, so it
3464 potentially is shared with other references.
3465 We need to unshare members so we can modify
3466 them. */
3467 ass = type1.ref;
3468 type1.ref = sym_push(anon_sym++ | SYM_FIELD,
3469 &type1.ref->type, 0,
3470 type1.ref->c);
3471 pps = &type1.ref->next;
3472 while ((ass = ass->next) != NULL) {
3473 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3474 pps = &((*pps)->next);
3476 *pps = NULL;
3478 ass = type1.ref;
3479 while ((ass = ass->next) != NULL)
3480 ass->c += offset;
3481 offset = 0;
3482 v = anon_sym++;
3484 if (v) {
3485 ss = sym_push(v | SYM_FIELD, &type1, 0, offset);
3486 *ps = ss;
3487 ps = &ss->next;
3489 if (tok == ';' || tok == TOK_EOF)
3490 break;
3491 skip(',');
3493 skip(';');
3495 skip('}');
3496 /* store size and alignment */
3497 s->c = (c + maxalign - 1) & -maxalign;
3498 s->r = maxalign;
3503 /* return 1 if basic type is a type size (short, long, long long) */
3504 ST_FUNC int is_btype_size(int bt)
3506 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3509 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3510 are added to the element type, copied because it could be a typedef. */
3511 static void parse_btype_qualify(CType *type, int qualifiers)
3513 while (type->t & VT_ARRAY) {
3514 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3515 type = &type->ref->type;
3517 type->t |= qualifiers;
3520 /* return 0 if no type declaration. otherwise, return the basic type
3521 and skip it.
3523 static int parse_btype(CType *type, AttributeDef *ad)
3525 int t, u, bt_size, complete, type_found, typespec_found;
3526 Sym *s;
3527 CType type1;
3529 memset(ad, 0, sizeof(AttributeDef));
3530 complete = 0;
3531 type_found = 0;
3532 typespec_found = 0;
3533 t = 0;
3534 while(1) {
3535 switch(tok) {
3536 case TOK_EXTENSION:
3537 /* currently, we really ignore extension */
3538 next();
3539 continue;
3541 /* basic types */
3542 case TOK_CHAR:
3543 u = VT_BYTE;
3544 basic_type:
3545 next();
3546 basic_type1:
3547 if (complete)
3548 tcc_error("too many basic types");
3549 t |= u;
3550 bt_size = is_btype_size (u & VT_BTYPE);
3551 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3552 complete = 1;
3553 typespec_found = 1;
3554 break;
3555 case TOK_VOID:
3556 u = VT_VOID;
3557 goto basic_type;
3558 case TOK_SHORT:
3559 u = VT_SHORT;
3560 goto basic_type;
3561 case TOK_INT:
3562 u = VT_INT;
3563 goto basic_type;
3564 case TOK_LONG:
3565 next();
3566 if ((t & VT_BTYPE) == VT_DOUBLE) {
3567 #ifndef TCC_TARGET_PE
3568 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3569 #endif
3570 } else if ((t & VT_BTYPE) == VT_LONG) {
3571 t = (t & ~VT_BTYPE) | VT_LLONG;
3572 } else {
3573 u = VT_LONG;
3574 goto basic_type1;
3576 break;
3577 #ifdef TCC_TARGET_ARM64
3578 case TOK_UINT128:
3579 /* GCC's __uint128_t appears in some Linux header files. Make it a
3580 synonym for long double to get the size and alignment right. */
3581 u = VT_LDOUBLE;
3582 goto basic_type;
3583 #endif
3584 case TOK_BOOL:
3585 u = VT_BOOL;
3586 goto basic_type;
3587 case TOK_FLOAT:
3588 u = VT_FLOAT;
3589 goto basic_type;
3590 case TOK_DOUBLE:
3591 next();
3592 if ((t & VT_BTYPE) == VT_LONG) {
3593 #ifdef TCC_TARGET_PE
3594 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3595 #else
3596 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3597 #endif
3598 } else {
3599 u = VT_DOUBLE;
3600 goto basic_type1;
3602 break;
3603 case TOK_ENUM:
3604 struct_decl(&type1, ad, VT_ENUM);
3605 basic_type2:
3606 u = type1.t;
3607 type->ref = type1.ref;
3608 goto basic_type1;
3609 case TOK_STRUCT:
3610 case TOK_UNION:
3611 struct_decl(&type1, ad, VT_STRUCT);
3612 goto basic_type2;
3614 /* type modifiers */
3615 case TOK_CONST1:
3616 case TOK_CONST2:
3617 case TOK_CONST3:
3618 type->t = t;
3619 parse_btype_qualify(type, VT_CONSTANT);
3620 t = type->t;
3621 next();
3622 break;
3623 case TOK_VOLATILE1:
3624 case TOK_VOLATILE2:
3625 case TOK_VOLATILE3:
3626 type->t = t;
3627 parse_btype_qualify(type, VT_VOLATILE);
3628 t = type->t;
3629 next();
3630 break;
3631 case TOK_SIGNED1:
3632 case TOK_SIGNED2:
3633 case TOK_SIGNED3:
3634 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3635 tcc_error("signed and unsigned modifier");
3636 typespec_found = 1;
3637 t |= VT_DEFSIGN;
3638 next();
3639 break;
3640 case TOK_REGISTER:
3641 case TOK_AUTO:
3642 case TOK_RESTRICT1:
3643 case TOK_RESTRICT2:
3644 case TOK_RESTRICT3:
3645 next();
3646 break;
3647 case TOK_UNSIGNED:
3648 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3649 tcc_error("signed and unsigned modifier");
3650 t |= VT_DEFSIGN | VT_UNSIGNED;
3651 next();
3652 typespec_found = 1;
3653 break;
3655 /* storage */
3656 case TOK_EXTERN:
3657 t |= VT_EXTERN;
3658 next();
3659 break;
3660 case TOK_STATIC:
3661 t |= VT_STATIC;
3662 next();
3663 break;
3664 case TOK_TYPEDEF:
3665 t |= VT_TYPEDEF;
3666 next();
3667 break;
3668 case TOK_INLINE1:
3669 case TOK_INLINE2:
3670 case TOK_INLINE3:
3671 t |= VT_INLINE;
3672 next();
3673 break;
3675 /* GNUC attribute */
3676 case TOK_ATTRIBUTE1:
3677 case TOK_ATTRIBUTE2:
3678 parse_attribute(ad);
3679 if (ad->a.mode) {
3680 u = ad->a.mode -1;
3681 t = (t & ~VT_BTYPE) | u;
3683 break;
3684 /* GNUC typeof */
3685 case TOK_TYPEOF1:
3686 case TOK_TYPEOF2:
3687 case TOK_TYPEOF3:
3688 next();
3689 parse_expr_type(&type1);
3690 /* remove all storage modifiers except typedef */
3691 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3692 goto basic_type2;
3693 default:
3694 if (typespec_found)
3695 goto the_end;
3696 s = sym_find(tok);
3697 if (!s || !(s->type.t & VT_TYPEDEF))
3698 goto the_end;
3700 type->t = ((s->type.t & ~VT_TYPEDEF) |
3701 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3702 type->ref = s->type.ref;
3703 if (t & (VT_CONSTANT | VT_VOLATILE))
3704 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3705 t = type->t;
3707 if (s->r) {
3708 /* get attributes from typedef */
3709 if (0 == ad->a.aligned)
3710 ad->a.aligned = s->a.aligned;
3711 if (0 == ad->a.func_call)
3712 ad->a.func_call = s->a.func_call;
3713 ad->a.packed |= s->a.packed;
3715 next();
3716 typespec_found = 1;
3717 break;
3719 type_found = 1;
3721 the_end:
3722 if (tcc_state->char_is_unsigned) {
3723 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3724 t |= VT_UNSIGNED;
3727 /* long is never used as type */
3728 if ((t & VT_BTYPE) == VT_LONG)
3729 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3730 defined TCC_TARGET_PE
3731 t = (t & ~VT_BTYPE) | VT_INT;
3732 #else
3733 t = (t & ~VT_BTYPE) | VT_LLONG;
3734 #endif
3735 type->t = t;
3736 return type_found;
3739 /* convert a function parameter type (array to pointer and function to
3740 function pointer) */
3741 static inline void convert_parameter_type(CType *pt)
3743 /* remove const and volatile qualifiers (XXX: const could be used
3744 to indicate a const function parameter */
3745 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3746 /* array must be transformed to pointer according to ANSI C */
3747 pt->t &= ~VT_ARRAY;
3748 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3749 mk_pointer(pt);
3753 ST_FUNC void parse_asm_str(CString *astr)
3755 skip('(');
3756 parse_mult_str(astr, "string constant");
3759 /* Parse an asm label and return the token */
3760 static int asm_label_instr(void)
3762 int v;
3763 CString astr;
3765 next();
3766 parse_asm_str(&astr);
3767 skip(')');
3768 #ifdef ASM_DEBUG
3769 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3770 #endif
3771 v = tok_alloc(astr.data, astr.size - 1)->tok;
3772 cstr_free(&astr);
3773 return v;
3776 static void post_type(CType *type, AttributeDef *ad)
3778 int n, l, t1, arg_size, align;
3779 Sym **plast, *s, *first;
3780 AttributeDef ad1;
3781 CType pt;
3783 if (tok == '(') {
3784 /* function declaration */
3785 next();
3786 l = 0;
3787 first = NULL;
3788 plast = &first;
3789 arg_size = 0;
3790 if (tok != ')') {
3791 for(;;) {
3792 /* read param name and compute offset */
3793 if (l != FUNC_OLD) {
3794 if (!parse_btype(&pt, &ad1)) {
3795 if (l) {
3796 tcc_error("invalid type");
3797 } else {
3798 l = FUNC_OLD;
3799 goto old_proto;
3802 l = FUNC_NEW;
3803 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3804 break;
3805 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3806 if ((pt.t & VT_BTYPE) == VT_VOID)
3807 tcc_error("parameter declared as void");
3808 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3809 } else {
3810 old_proto:
3811 n = tok;
3812 if (n < TOK_UIDENT)
3813 expect("identifier");
3814 pt.t = VT_INT;
3815 next();
3817 convert_parameter_type(&pt);
3818 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3819 *plast = s;
3820 plast = &s->next;
3821 if (tok == ')')
3822 break;
3823 skip(',');
3824 if (l == FUNC_NEW && tok == TOK_DOTS) {
3825 l = FUNC_ELLIPSIS;
3826 next();
3827 break;
3831 /* if no parameters, then old type prototype */
3832 if (l == 0)
3833 l = FUNC_OLD;
3834 skip(')');
3835 /* NOTE: const is ignored in returned type as it has a special
3836 meaning in gcc / C++ */
3837 type->t &= ~VT_CONSTANT;
3838 /* some ancient pre-K&R C allows a function to return an array
3839 and the array brackets to be put after the arguments, such
3840 that "int c()[]" means something like "int[] c()" */
3841 if (tok == '[') {
3842 next();
3843 skip(']'); /* only handle simple "[]" */
3844 type->t |= VT_PTR;
3846 /* we push a anonymous symbol which will contain the function prototype */
3847 ad->a.func_args = arg_size;
3848 s = sym_push(SYM_FIELD, type, 0, l);
3849 s->a = ad->a;
3850 s->next = first;
3851 type->t = VT_FUNC;
3852 type->ref = s;
3853 } else if (tok == '[') {
3854 /* array definition */
3855 next();
3856 if (tok == TOK_RESTRICT1)
3857 next();
3858 n = -1;
3859 t1 = 0;
3860 if (tok != ']') {
3861 if (!local_stack || nocode_wanted)
3862 vpushi(expr_const());
3863 else gexpr();
3864 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3865 n = vtop->c.i;
3866 if (n < 0)
3867 tcc_error("invalid array size");
3868 } else {
3869 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
3870 tcc_error("size of variable length array should be an integer");
3871 t1 = VT_VLA;
3874 skip(']');
3875 /* parse next post type */
3876 post_type(type, ad);
3877 if (type->t == VT_FUNC)
3878 tcc_error("declaration of an array of functions");
3879 t1 |= type->t & VT_VLA;
3881 if (t1 & VT_VLA) {
3882 loc -= type_size(&int_type, &align);
3883 loc &= -align;
3884 n = loc;
3886 vla_runtime_type_size(type, &align);
3887 gen_op('*');
3888 vset(&int_type, VT_LOCAL|VT_LVAL, n);
3889 vswap();
3890 vstore();
3892 if (n != -1)
3893 vpop();
3895 /* we push an anonymous symbol which will contain the array
3896 element type */
3897 s = sym_push(SYM_FIELD, type, 0, n);
3898 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
3899 type->ref = s;
3903 /* Parse a type declaration (except basic type), and return the type
3904 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3905 expected. 'type' should contain the basic type. 'ad' is the
3906 attribute definition of the basic type. It can be modified by
3907 type_decl().
3909 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
3911 Sym *s;
3912 CType type1, *type2;
3913 int qualifiers, storage;
3915 while (tok == '*') {
3916 qualifiers = 0;
3917 redo:
3918 next();
3919 switch(tok) {
3920 case TOK_CONST1:
3921 case TOK_CONST2:
3922 case TOK_CONST3:
3923 qualifiers |= VT_CONSTANT;
3924 goto redo;
3925 case TOK_VOLATILE1:
3926 case TOK_VOLATILE2:
3927 case TOK_VOLATILE3:
3928 qualifiers |= VT_VOLATILE;
3929 goto redo;
3930 case TOK_RESTRICT1:
3931 case TOK_RESTRICT2:
3932 case TOK_RESTRICT3:
3933 goto redo;
3934 /* XXX: clarify attribute handling */
3935 case TOK_ATTRIBUTE1:
3936 case TOK_ATTRIBUTE2:
3937 parse_attribute(ad);
3938 break;
3940 mk_pointer(type);
3941 type->t |= qualifiers;
3944 /* recursive type */
3945 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3946 type1.t = 0; /* XXX: same as int */
3947 if (tok == '(') {
3948 next();
3949 /* XXX: this is not correct to modify 'ad' at this point, but
3950 the syntax is not clear */
3951 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3952 parse_attribute(ad);
3953 type_decl(&type1, ad, v, td);
3954 skip(')');
3955 } else {
3956 /* type identifier */
3957 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
3958 *v = tok;
3959 next();
3960 } else {
3961 if (!(td & TYPE_ABSTRACT))
3962 expect("identifier");
3963 *v = 0;
3966 storage = type->t & VT_STORAGE;
3967 type->t &= ~VT_STORAGE;
3968 if (storage & VT_STATIC) {
3969 int saved_nocode_wanted = nocode_wanted;
3970 nocode_wanted = 1;
3971 post_type(type, ad);
3972 nocode_wanted = saved_nocode_wanted;
3973 } else
3974 post_type(type, ad);
3975 type->t |= storage;
3976 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3977 parse_attribute(ad);
3979 if (!type1.t)
3980 return;
3981 /* append type at the end of type1 */
3982 type2 = &type1;
3983 for(;;) {
3984 s = type2->ref;
3985 type2 = &s->type;
3986 if (!type2->t) {
3987 *type2 = *type;
3988 break;
3991 *type = type1;
3994 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3995 ST_FUNC int lvalue_type(int t)
3997 int bt, r;
3998 r = VT_LVAL;
3999 bt = t & VT_BTYPE;
4000 if (bt == VT_BYTE || bt == VT_BOOL)
4001 r |= VT_LVAL_BYTE;
4002 else if (bt == VT_SHORT)
4003 r |= VT_LVAL_SHORT;
4004 else
4005 return r;
4006 if (t & VT_UNSIGNED)
4007 r |= VT_LVAL_UNSIGNED;
4008 return r;
4011 /* indirection with full error checking and bound check */
4012 ST_FUNC void indir(void)
4014 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4015 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4016 return;
4017 expect("pointer");
4019 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4020 gv(RC_INT);
4021 vtop->type = *pointed_type(&vtop->type);
4022 /* Arrays and functions are never lvalues */
4023 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4024 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4025 vtop->r |= lvalue_type(vtop->type.t);
4026 /* if bound checking, the referenced pointer must be checked */
4027 #ifdef CONFIG_TCC_BCHECK
4028 if (tcc_state->do_bounds_check)
4029 vtop->r |= VT_MUSTBOUND;
4030 #endif
4034 /* pass a parameter to a function and do type checking and casting */
4035 static void gfunc_param_typed(Sym *func, Sym *arg)
4037 int func_type;
4038 CType type;
4040 func_type = func->c;
4041 if (func_type == FUNC_OLD ||
4042 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4043 /* default casting : only need to convert float to double */
4044 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4045 type.t = VT_DOUBLE;
4046 gen_cast(&type);
4047 } else if (vtop->type.t & VT_BITFIELD) {
4048 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4049 gen_cast(&type);
4051 } else if (arg == NULL) {
4052 tcc_error("too many arguments to function");
4053 } else {
4054 type = arg->type;
4055 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4056 gen_assign_cast(&type);
4060 /* parse an expression of the form '(type)' or '(expr)' and return its
4061 type */
4062 static void parse_expr_type(CType *type)
4064 int n;
4065 AttributeDef ad;
4067 skip('(');
4068 if (parse_btype(type, &ad)) {
4069 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4070 } else {
4071 expr_type(type);
4073 skip(')');
4076 static void parse_type(CType *type)
4078 AttributeDef ad;
4079 int n;
4081 if (!parse_btype(type, &ad)) {
4082 expect("type");
4084 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4087 static void vpush_tokc(int t)
4089 CType type;
4090 type.t = t;
4091 type.ref = 0;
4092 vsetc(&type, VT_CONST, &tokc);
4095 ST_FUNC void unary(void)
4097 int n, t, align, size, r, sizeof_caller;
4098 CType type;
4099 Sym *s;
4100 AttributeDef ad;
4102 sizeof_caller = in_sizeof;
4103 in_sizeof = 0;
4104 /* XXX: GCC 2.95.3 does not generate a table although it should be
4105 better here */
4106 tok_next:
4107 switch(tok) {
4108 case TOK_EXTENSION:
4109 next();
4110 goto tok_next;
4111 case TOK_CINT:
4112 case TOK_CCHAR:
4113 case TOK_LCHAR:
4114 vpushi(tokc.i);
4115 next();
4116 break;
4117 case TOK_CUINT:
4118 vpush_tokc(VT_INT | VT_UNSIGNED);
4119 next();
4120 break;
4121 case TOK_CLLONG:
4122 vpush_tokc(VT_LLONG);
4123 next();
4124 break;
4125 case TOK_CULLONG:
4126 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4127 next();
4128 break;
4129 case TOK_CFLOAT:
4130 vpush_tokc(VT_FLOAT);
4131 next();
4132 break;
4133 case TOK_CDOUBLE:
4134 vpush_tokc(VT_DOUBLE);
4135 next();
4136 break;
4137 case TOK_CLDOUBLE:
4138 vpush_tokc(VT_LDOUBLE);
4139 next();
4140 break;
4141 case TOK___FUNCTION__:
4142 if (!gnu_ext)
4143 goto tok_identifier;
4144 /* fall thru */
4145 case TOK___FUNC__:
4147 void *ptr;
4148 int len;
4149 /* special function name identifier */
4150 len = strlen(funcname) + 1;
4151 /* generate char[len] type */
4152 type.t = VT_BYTE;
4153 mk_pointer(&type);
4154 type.t |= VT_ARRAY;
4155 type.ref->c = len;
4156 vpush_ref(&type, data_section, data_section->data_offset, len);
4157 ptr = section_ptr_add(data_section, len);
4158 memcpy(ptr, funcname, len);
4159 next();
4161 break;
4162 case TOK_LSTR:
4163 #ifdef TCC_TARGET_PE
4164 t = VT_SHORT | VT_UNSIGNED;
4165 #else
4166 t = VT_INT;
4167 #endif
4168 goto str_init;
4169 case TOK_STR:
4170 /* string parsing */
4171 t = VT_BYTE;
4172 str_init:
4173 if (tcc_state->warn_write_strings)
4174 t |= VT_CONSTANT;
4175 type.t = t;
4176 mk_pointer(&type);
4177 type.t |= VT_ARRAY;
4178 memset(&ad, 0, sizeof(AttributeDef));
4179 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4180 break;
4181 case '(':
4182 next();
4183 /* cast ? */
4184 if (parse_btype(&type, &ad)) {
4185 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4186 skip(')');
4187 /* check ISOC99 compound literal */
4188 if (tok == '{') {
4189 /* data is allocated locally by default */
4190 if (global_expr)
4191 r = VT_CONST;
4192 else
4193 r = VT_LOCAL;
4194 /* all except arrays are lvalues */
4195 if (!(type.t & VT_ARRAY))
4196 r |= lvalue_type(type.t);
4197 memset(&ad, 0, sizeof(AttributeDef));
4198 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4199 } else {
4200 if (sizeof_caller) {
4201 vpush(&type);
4202 return;
4204 unary();
4205 gen_cast(&type);
4207 } else if (tok == '{') {
4208 if (const_wanted)
4209 tcc_error("expected constant");
4210 /* save all registers */
4211 if (!nocode_wanted)
4212 save_regs(0);
4213 /* statement expression : we do not accept break/continue
4214 inside as GCC does */
4215 block(NULL, NULL, 1);
4216 skip(')');
4217 } else {
4218 gexpr();
4219 skip(')');
4221 break;
4222 case '*':
4223 next();
4224 unary();
4225 indir();
4226 break;
4227 case '&':
4228 next();
4229 unary();
4230 /* functions names must be treated as function pointers,
4231 except for unary '&' and sizeof. Since we consider that
4232 functions are not lvalues, we only have to handle it
4233 there and in function calls. */
4234 /* arrays can also be used although they are not lvalues */
4235 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4236 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4237 test_lvalue();
4238 mk_pointer(&vtop->type);
4239 gaddrof();
4240 break;
4241 case '!':
4242 next();
4243 unary();
4244 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4245 CType boolean;
4246 boolean.t = VT_BOOL;
4247 gen_cast(&boolean);
4248 vtop->c.i = !vtop->c.i;
4249 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4250 vtop->c.i ^= 1;
4251 else {
4252 save_regs(1);
4253 vseti(VT_JMP, gvtst(1, 0));
4255 break;
4256 case '~':
4257 next();
4258 unary();
4259 vpushi(-1);
4260 gen_op('^');
4261 break;
4262 case '+':
4263 next();
4264 unary();
4265 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4266 tcc_error("pointer not accepted for unary plus");
4267 /* In order to force cast, we add zero, except for floating point
4268 where we really need an noop (otherwise -0.0 will be transformed
4269 into +0.0). */
4270 if (!is_float(vtop->type.t)) {
4271 vpushi(0);
4272 gen_op('+');
4274 break;
4275 case TOK_SIZEOF:
4276 case TOK_ALIGNOF1:
4277 case TOK_ALIGNOF2:
4278 t = tok;
4279 next();
4280 in_sizeof++;
4281 unary_type(&type); // Perform a in_sizeof = 0;
4282 size = type_size(&type, &align);
4283 if (t == TOK_SIZEOF) {
4284 if (!(type.t & VT_VLA)) {
4285 if (size < 0)
4286 tcc_error("sizeof applied to an incomplete type");
4287 vpushs(size);
4288 } else {
4289 vla_runtime_type_size(&type, &align);
4291 } else {
4292 vpushs(align);
4294 vtop->type.t |= VT_UNSIGNED;
4295 break;
4297 case TOK_builtin_expect:
4299 /* __builtin_expect is a no-op for now */
4300 int saved_nocode_wanted;
4301 next();
4302 skip('(');
4303 expr_eq();
4304 skip(',');
4305 saved_nocode_wanted = nocode_wanted;
4306 nocode_wanted = 1;
4307 expr_lor_const();
4308 vpop();
4309 nocode_wanted = saved_nocode_wanted;
4310 skip(')');
4312 break;
4313 case TOK_builtin_types_compatible_p:
4315 CType type1, type2;
4316 next();
4317 skip('(');
4318 parse_type(&type1);
4319 skip(',');
4320 parse_type(&type2);
4321 skip(')');
4322 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4323 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4324 vpushi(is_compatible_types(&type1, &type2));
4326 break;
4327 case TOK_builtin_choose_expr:
4329 int saved_nocode_wanted, c;
4330 next();
4331 skip('(');
4332 c = expr_const();
4333 skip(',');
4334 if (!c) {
4335 saved_nocode_wanted = nocode_wanted;
4336 nocode_wanted = 1;
4338 expr_eq();
4339 if (!c) {
4340 vpop();
4341 nocode_wanted = saved_nocode_wanted;
4343 skip(',');
4344 if (c) {
4345 saved_nocode_wanted = nocode_wanted;
4346 nocode_wanted = 1;
4348 expr_eq();
4349 if (c) {
4350 vpop();
4351 nocode_wanted = saved_nocode_wanted;
4353 skip(')');
4355 break;
4356 case TOK_builtin_constant_p:
4358 int saved_nocode_wanted, res;
4359 next();
4360 skip('(');
4361 saved_nocode_wanted = nocode_wanted;
4362 nocode_wanted = 1;
4363 gexpr();
4364 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4365 vpop();
4366 nocode_wanted = saved_nocode_wanted;
4367 skip(')');
4368 vpushi(res);
4370 break;
4371 case TOK_builtin_frame_address:
4372 case TOK_builtin_return_address:
4374 int tok1 = tok;
4375 int level;
4376 CType type;
4377 next();
4378 skip('(');
4379 if (tok != TOK_CINT) {
4380 tcc_error("%s only takes positive integers",
4381 tok1 == TOK_builtin_return_address ?
4382 "__builtin_return_address" :
4383 "__builtin_frame_address");
4385 level = (uint32_t)tokc.i;
4386 next();
4387 skip(')');
4388 type.t = VT_VOID;
4389 mk_pointer(&type);
4390 vset(&type, VT_LOCAL, 0); /* local frame */
4391 while (level--) {
4392 mk_pointer(&vtop->type);
4393 indir(); /* -> parent frame */
4395 if (tok1 == TOK_builtin_return_address) {
4396 // assume return address is just above frame pointer on stack
4397 vpushi(PTR_SIZE);
4398 gen_op('+');
4399 mk_pointer(&vtop->type);
4400 indir();
4403 break;
4404 #ifdef TCC_TARGET_X86_64
4405 #ifdef TCC_TARGET_PE
4406 case TOK_builtin_va_start:
4408 next();
4409 skip('(');
4410 expr_eq();
4411 skip(',');
4412 expr_eq();
4413 skip(')');
4414 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4415 tcc_error("__builtin_va_start expects a local variable");
4416 vtop->r &= ~(VT_LVAL | VT_REF);
4417 vtop->type = char_pointer_type;
4418 vtop->c.i += 8;
4419 vstore();
4421 break;
4422 #else
4423 case TOK_builtin_va_arg_types:
4425 CType type;
4426 next();
4427 skip('(');
4428 parse_type(&type);
4429 skip(')');
4430 vpushi(classify_x86_64_va_arg(&type));
4432 break;
4433 #endif
4434 #endif
4436 #ifdef TCC_TARGET_ARM64
4437 case TOK___va_start: {
4438 if (nocode_wanted)
4439 tcc_error("statement in global scope");
4440 next();
4441 skip('(');
4442 expr_eq();
4443 skip(',');
4444 expr_eq();
4445 skip(')');
4446 //xx check types
4447 gen_va_start();
4448 vpushi(0);
4449 vtop->type.t = VT_VOID;
4450 break;
4452 case TOK___va_arg: {
4453 CType type;
4454 if (nocode_wanted)
4455 tcc_error("statement in global scope");
4456 next();
4457 skip('(');
4458 expr_eq();
4459 skip(',');
4460 parse_type(&type);
4461 skip(')');
4462 //xx check types
4463 gen_va_arg(&type);
4464 vtop->type = type;
4465 break;
4467 case TOK___arm64_clear_cache: {
4468 next();
4469 skip('(');
4470 expr_eq();
4471 skip(',');
4472 expr_eq();
4473 skip(')');
4474 gen_clear_cache();
4475 vpushi(0);
4476 vtop->type.t = VT_VOID;
4477 break;
4479 #endif
4480 /* pre operations */
4481 case TOK_INC:
4482 case TOK_DEC:
4483 t = tok;
4484 next();
4485 unary();
4486 inc(0, t);
4487 break;
4488 case '-':
4489 next();
4490 unary();
4491 t = vtop->type.t & VT_BTYPE;
4492 if (is_float(t)) {
4493 /* In IEEE negate(x) isn't subtract(0,x), but rather
4494 subtract(-0, x). */
4495 vpush(&vtop->type);
4496 if (t == VT_FLOAT)
4497 vtop->c.f = -0.0f;
4498 else if (t == VT_DOUBLE)
4499 vtop->c.d = -0.0;
4500 else
4501 vtop->c.ld = -0.0;
4502 } else
4503 vpushi(0);
4504 vswap();
4505 gen_op('-');
4506 break;
4507 case TOK_LAND:
4508 if (!gnu_ext)
4509 goto tok_identifier;
4510 next();
4511 /* allow to take the address of a label */
4512 if (tok < TOK_UIDENT)
4513 expect("label identifier");
4514 s = label_find(tok);
4515 if (!s) {
4516 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4517 } else {
4518 if (s->r == LABEL_DECLARED)
4519 s->r = LABEL_FORWARD;
4521 if (!s->type.t) {
4522 s->type.t = VT_VOID;
4523 mk_pointer(&s->type);
4524 s->type.t |= VT_STATIC;
4526 vpushsym(&s->type, s);
4527 next();
4528 break;
4530 // special qnan , snan and infinity values
4531 case TOK___NAN__:
4532 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4533 next();
4534 break;
4535 case TOK___SNAN__:
4536 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4537 next();
4538 break;
4539 case TOK___INF__:
4540 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4541 next();
4542 break;
4544 default:
4545 tok_identifier:
4546 t = tok;
4547 next();
4548 if (t < TOK_UIDENT)
4549 expect("identifier");
4550 s = sym_find(t);
4551 if (!s) {
4552 const char *name = get_tok_str(t, NULL);
4553 if (tok != '(')
4554 tcc_error("'%s' undeclared", name);
4555 /* for simple function calls, we tolerate undeclared
4556 external reference to int() function */
4557 if (tcc_state->warn_implicit_function_declaration
4558 #ifdef TCC_TARGET_PE
4559 /* people must be warned about using undeclared WINAPI functions
4560 (which usually start with uppercase letter) */
4561 || (name[0] >= 'A' && name[0] <= 'Z')
4562 #endif
4564 tcc_warning("implicit declaration of function '%s'", name);
4565 s = external_global_sym(t, &func_old_type, 0);
4567 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4568 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4569 /* if referencing an inline function, then we generate a
4570 symbol to it if not already done. It will have the
4571 effect to generate code for it at the end of the
4572 compilation unit. Inline function as always
4573 generated in the text section. */
4574 if (!s->c)
4575 put_extern_sym(s, text_section, 0, 0);
4576 r = VT_SYM | VT_CONST;
4577 } else {
4578 r = s->r;
4580 vset(&s->type, r, s->c);
4581 /* if forward reference, we must point to s */
4582 if (vtop->r & VT_SYM) {
4583 vtop->sym = s;
4584 vtop->c.i = 0;
4586 break;
4589 /* post operations */
4590 while (1) {
4591 if (tok == TOK_INC || tok == TOK_DEC) {
4592 inc(1, tok);
4593 next();
4594 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4595 int qualifiers;
4596 /* field */
4597 if (tok == TOK_ARROW)
4598 indir();
4599 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4600 test_lvalue();
4601 gaddrof();
4602 /* expect pointer on structure */
4603 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4604 expect("struct or union");
4605 if (tok == TOK_CDOUBLE)
4606 expect("field name");
4607 next();
4608 if (tok == TOK_CINT || tok == TOK_CUINT)
4609 expect("field name");
4610 s = find_field(&vtop->type, tok);
4611 if (!s)
4612 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4613 /* add field offset to pointer */
4614 vtop->type = char_pointer_type; /* change type to 'char *' */
4615 vpushi(s->c);
4616 gen_op('+');
4617 /* change type to field type, and set to lvalue */
4618 vtop->type = s->type;
4619 vtop->type.t |= qualifiers;
4620 /* an array is never an lvalue */
4621 if (!(vtop->type.t & VT_ARRAY)) {
4622 vtop->r |= lvalue_type(vtop->type.t);
4623 #ifdef CONFIG_TCC_BCHECK
4624 /* if bound checking, the referenced pointer must be checked */
4625 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4626 vtop->r |= VT_MUSTBOUND;
4627 #endif
4629 next();
4630 } else if (tok == '[') {
4631 next();
4632 gexpr();
4633 gen_op('+');
4634 indir();
4635 skip(']');
4636 } else if (tok == '(') {
4637 SValue ret;
4638 Sym *sa;
4639 int nb_args, ret_nregs, ret_align, regsize, variadic;
4641 /* function call */
4642 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4643 /* pointer test (no array accepted) */
4644 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4645 vtop->type = *pointed_type(&vtop->type);
4646 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4647 goto error_func;
4648 } else {
4649 error_func:
4650 expect("function pointer");
4652 } else {
4653 vtop->r &= ~VT_LVAL; /* no lvalue */
4655 /* get return type */
4656 s = vtop->type.ref;
4657 next();
4658 sa = s->next; /* first parameter */
4659 nb_args = 0;
4660 ret.r2 = VT_CONST;
4661 /* compute first implicit argument if a structure is returned */
4662 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4663 variadic = (s->c == FUNC_ELLIPSIS);
4664 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4665 &ret_align, &regsize);
4666 if (!ret_nregs) {
4667 /* get some space for the returned structure */
4668 size = type_size(&s->type, &align);
4669 #ifdef TCC_TARGET_ARM64
4670 /* On arm64, a small struct is return in registers.
4671 It is much easier to write it to memory if we know
4672 that we are allowed to write some extra bytes, so
4673 round the allocated space up to a power of 2: */
4674 if (size < 16)
4675 while (size & (size - 1))
4676 size = (size | (size - 1)) + 1;
4677 #endif
4678 loc = (loc - size) & -align;
4679 ret.type = s->type;
4680 ret.r = VT_LOCAL | VT_LVAL;
4681 /* pass it as 'int' to avoid structure arg passing
4682 problems */
4683 vseti(VT_LOCAL, loc);
4684 ret.c = vtop->c;
4685 nb_args++;
4687 } else {
4688 ret_nregs = 1;
4689 ret.type = s->type;
4692 if (ret_nregs) {
4693 /* return in register */
4694 if (is_float(ret.type.t)) {
4695 ret.r = reg_fret(ret.type.t);
4696 #ifdef TCC_TARGET_X86_64
4697 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4698 ret.r2 = REG_QRET;
4699 #endif
4700 } else {
4701 #ifndef TCC_TARGET_ARM64
4702 #ifdef TCC_TARGET_X86_64
4703 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4704 #else
4705 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4706 #endif
4707 ret.r2 = REG_LRET;
4708 #endif
4709 ret.r = REG_IRET;
4711 ret.c.i = 0;
4713 if (tok != ')') {
4714 for(;;) {
4715 expr_eq();
4716 gfunc_param_typed(s, sa);
4717 nb_args++;
4718 if (sa)
4719 sa = sa->next;
4720 if (tok == ')')
4721 break;
4722 skip(',');
4725 if (sa)
4726 tcc_error("too few arguments to function");
4727 skip(')');
4728 if (!nocode_wanted) {
4729 gfunc_call(nb_args);
4730 } else {
4731 vtop -= (nb_args + 1);
4734 /* return value */
4735 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4736 vsetc(&ret.type, r, &ret.c);
4737 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4740 /* handle packed struct return */
4741 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4742 int addr, offset;
4744 size = type_size(&s->type, &align);
4745 /* We're writing whole regs often, make sure there's enough
4746 space. Assume register size is power of 2. */
4747 if (regsize > align)
4748 align = regsize;
4749 loc = (loc - size) & -align;
4750 addr = loc;
4751 offset = 0;
4752 for (;;) {
4753 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4754 vswap();
4755 vstore();
4756 vtop--;
4757 if (--ret_nregs == 0)
4758 break;
4759 offset += regsize;
4761 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4763 } else {
4764 break;
4769 ST_FUNC void expr_prod(void)
4771 int t;
4773 unary();
4774 while (tok == '*' || tok == '/' || tok == '%') {
4775 t = tok;
4776 next();
4777 unary();
4778 gen_op(t);
4782 ST_FUNC void expr_sum(void)
4784 int t;
4786 expr_prod();
4787 while (tok == '+' || tok == '-') {
4788 t = tok;
4789 next();
4790 expr_prod();
4791 gen_op(t);
4795 static void expr_shift(void)
4797 int t;
4799 expr_sum();
4800 while (tok == TOK_SHL || tok == TOK_SAR) {
4801 t = tok;
4802 next();
4803 expr_sum();
4804 gen_op(t);
4808 static void expr_cmp(void)
4810 int t;
4812 expr_shift();
4813 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4814 tok == TOK_ULT || tok == TOK_UGE) {
4815 t = tok;
4816 next();
4817 expr_shift();
4818 gen_op(t);
4822 static void expr_cmpeq(void)
4824 int t;
4826 expr_cmp();
4827 while (tok == TOK_EQ || tok == TOK_NE) {
4828 t = tok;
4829 next();
4830 expr_cmp();
4831 gen_op(t);
4835 static void expr_and(void)
4837 expr_cmpeq();
4838 while (tok == '&') {
4839 next();
4840 expr_cmpeq();
4841 gen_op('&');
4845 static void expr_xor(void)
4847 expr_and();
4848 while (tok == '^') {
4849 next();
4850 expr_and();
4851 gen_op('^');
4855 static void expr_or(void)
4857 expr_xor();
4858 while (tok == '|') {
4859 next();
4860 expr_xor();
4861 gen_op('|');
4865 /* XXX: fix this mess */
4866 static void expr_land_const(void)
4868 expr_or();
4869 while (tok == TOK_LAND) {
4870 next();
4871 expr_or();
4872 gen_op(TOK_LAND);
4875 static void expr_lor_const(void)
4877 expr_land_const();
4878 while (tok == TOK_LOR) {
4879 next();
4880 expr_land_const();
4881 gen_op(TOK_LOR);
4885 static void expr_land(void)
4887 expr_or();
4888 if (tok == TOK_LAND) {
4889 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4890 CType ctb, cti;
4891 ctb.t = VT_BOOL;
4892 cti.t = VT_INT;
4893 next();
4894 gen_cast(&ctb);
4895 if (vtop->c.i) {
4896 vpop();
4897 expr_land();
4898 gen_cast(&ctb);
4899 } else {
4900 int saved_nocode_wanted = nocode_wanted;
4901 nocode_wanted = 1;
4902 expr_land();
4903 vpop();
4904 nocode_wanted = saved_nocode_wanted;
4906 gen_cast(&cti);
4907 } else {
4908 int t = 0;
4909 save_regs(1);
4910 for(;;) {
4911 t = gvtst(1, t);
4912 if (tok != TOK_LAND) {
4913 vseti(VT_JMPI, t);
4914 break;
4916 next();
4917 expr_or();
4923 static void expr_lor(void)
4925 expr_land();
4926 if (tok == TOK_LOR) {
4927 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4928 CType ctb, cti;
4929 ctb.t = VT_BOOL;
4930 cti.t = VT_INT;
4931 next();
4932 gen_cast(&ctb);
4933 if (vtop->c.i) {
4934 int saved_nocode_wanted = nocode_wanted;
4935 nocode_wanted = 1;
4936 expr_lor();
4937 vpop();
4938 nocode_wanted = saved_nocode_wanted;
4939 } else {
4940 vpop();
4941 expr_lor();
4942 gen_cast(&ctb);
4944 gen_cast(&cti);
4945 } else {
4946 int t = 0;
4947 save_regs(1);
4948 for(;;) {
4949 t = gvtst(0, t);
4950 if (tok != TOK_LOR) {
4951 vseti(VT_JMP, t);
4952 break;
4954 next();
4955 expr_land();
4961 static void expr_cond(void)
4963 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
4964 SValue sv;
4965 CType type, type1, type2;
4967 expr_lor();
4968 if (tok == '?') {
4969 next();
4970 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4971 int saved_nocode_wanted = nocode_wanted;
4972 CType boolean;
4973 int c;
4974 boolean.t = VT_BOOL;
4975 vdup();
4976 gen_cast(&boolean);
4977 c = vtop->c.i;
4978 vpop();
4979 if (c) {
4980 if (tok != ':' || !gnu_ext) {
4981 vpop();
4982 gexpr();
4984 skip(':');
4985 nocode_wanted = 1;
4986 expr_cond();
4987 vpop();
4988 nocode_wanted = saved_nocode_wanted;
4989 } else {
4990 vpop();
4991 if (tok != ':' || !gnu_ext) {
4992 nocode_wanted = 1;
4993 gexpr();
4994 vpop();
4995 nocode_wanted = saved_nocode_wanted;
4997 skip(':');
4998 expr_cond();
5001 else {
5002 if (vtop != vstack) {
5003 /* needed to avoid having different registers saved in
5004 each branch */
5005 if (is_float(vtop->type.t)) {
5006 rc = RC_FLOAT;
5007 #ifdef TCC_TARGET_X86_64
5008 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5009 rc = RC_ST0;
5011 #endif
5013 else
5014 rc = RC_INT;
5015 gv(rc);
5016 save_regs(1);
5018 if (tok == ':' && gnu_ext) {
5019 gv_dup();
5020 tt = gvtst(1, 0);
5021 } else {
5022 tt = gvtst(1, 0);
5023 gexpr();
5025 type1 = vtop->type;
5026 sv = *vtop; /* save value to handle it later */
5027 vtop--; /* no vpop so that FP stack is not flushed */
5028 skip(':');
5029 u = gjmp(0);
5030 gsym(tt);
5031 expr_cond();
5032 type2 = vtop->type;
5034 t1 = type1.t;
5035 bt1 = t1 & VT_BTYPE;
5036 t2 = type2.t;
5037 bt2 = t2 & VT_BTYPE;
5038 /* cast operands to correct type according to ISOC rules */
5039 if (is_float(bt1) || is_float(bt2)) {
5040 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5041 type.t = VT_LDOUBLE;
5042 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5043 type.t = VT_DOUBLE;
5044 } else {
5045 type.t = VT_FLOAT;
5047 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5048 /* cast to biggest op */
5049 type.t = VT_LLONG;
5050 /* convert to unsigned if it does not fit in a long long */
5051 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5052 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5053 type.t |= VT_UNSIGNED;
5054 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5055 /* If one is a null ptr constant the result type
5056 is the other. */
5057 if (is_null_pointer (vtop))
5058 type = type1;
5059 else if (is_null_pointer (&sv))
5060 type = type2;
5061 /* XXX: test pointer compatibility, C99 has more elaborate
5062 rules here. */
5063 else
5064 type = type1;
5065 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5066 /* XXX: test function pointer compatibility */
5067 type = bt1 == VT_FUNC ? type1 : type2;
5068 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5069 /* XXX: test structure compatibility */
5070 type = bt1 == VT_STRUCT ? type1 : type2;
5071 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5072 /* NOTE: as an extension, we accept void on only one side */
5073 type.t = VT_VOID;
5074 } else {
5075 /* integer operations */
5076 type.t = VT_INT;
5077 /* convert to unsigned if it does not fit in an integer */
5078 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5079 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5080 type.t |= VT_UNSIGNED;
5082 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5083 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5084 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5086 /* now we convert second operand */
5087 gen_cast(&type);
5088 if (islv) {
5089 mk_pointer(&vtop->type);
5090 gaddrof();
5092 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5093 gaddrof();
5094 rc = RC_INT;
5095 if (is_float(type.t)) {
5096 rc = RC_FLOAT;
5097 #ifdef TCC_TARGET_X86_64
5098 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5099 rc = RC_ST0;
5101 #endif
5102 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5103 /* for long longs, we use fixed registers to avoid having
5104 to handle a complicated move */
5105 rc = RC_IRET;
5108 r2 = gv(rc);
5109 /* this is horrible, but we must also convert first
5110 operand */
5111 tt = gjmp(0);
5112 gsym(u);
5113 /* put again first value and cast it */
5114 *vtop = sv;
5115 gen_cast(&type);
5116 if (islv) {
5117 mk_pointer(&vtop->type);
5118 gaddrof();
5120 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5121 gaddrof();
5122 r1 = gv(rc);
5123 move_reg(r2, r1, type.t);
5124 vtop->r = r2;
5125 gsym(tt);
5126 if (islv)
5127 indir();
5132 static void expr_eq(void)
5134 int t;
5136 expr_cond();
5137 if (tok == '=' ||
5138 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5139 tok == TOK_A_XOR || tok == TOK_A_OR ||
5140 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5141 test_lvalue();
5142 t = tok;
5143 next();
5144 if (t == '=') {
5145 expr_eq();
5146 } else {
5147 vdup();
5148 expr_eq();
5149 gen_op(t & 0x7f);
5151 vstore();
5155 ST_FUNC void gexpr(void)
5157 while (1) {
5158 expr_eq();
5159 if (tok != ',')
5160 break;
5161 vpop();
5162 next();
5166 /* parse an expression and return its type without any side effect. */
5167 static void expr_type(CType *type)
5169 int saved_nocode_wanted;
5171 saved_nocode_wanted = nocode_wanted;
5172 nocode_wanted = 1;
5173 gexpr();
5174 *type = vtop->type;
5175 vpop();
5176 nocode_wanted = saved_nocode_wanted;
5179 /* parse a unary expression and return its type without any side
5180 effect. */
5181 static void unary_type(CType *type)
5183 int a;
5185 a = nocode_wanted;
5186 nocode_wanted = 1;
5187 unary();
5188 *type = vtop->type;
5189 vpop();
5190 nocode_wanted = a;
5193 /* parse a constant expression and return value in vtop. */
5194 static void expr_const1(void)
5196 int a;
5197 a = const_wanted;
5198 const_wanted = 1;
5199 expr_cond();
5200 const_wanted = a;
5203 /* parse an integer constant and return its value. */
5204 ST_FUNC int expr_const(void)
5206 int c;
5207 expr_const1();
5208 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5209 expect("constant expression");
5210 c = vtop->c.i;
5211 vpop();
5212 return c;
5215 /* return the label token if current token is a label, otherwise
5216 return zero */
5217 static int is_label(void)
5219 int last_tok;
5221 /* fast test first */
5222 if (tok < TOK_UIDENT)
5223 return 0;
5224 /* no need to save tokc because tok is an identifier */
5225 last_tok = tok;
5226 next();
5227 if (tok == ':') {
5228 next();
5229 return last_tok;
5230 } else {
5231 unget_tok(last_tok);
5232 return 0;
5236 static void label_or_decl(int l)
5238 int last_tok;
5240 /* fast test first */
5241 if (tok >= TOK_UIDENT)
5243 /* no need to save tokc because tok is an identifier */
5244 last_tok = tok;
5245 next();
5246 if (tok == ':') {
5247 unget_tok(last_tok);
5248 return;
5250 unget_tok(last_tok);
5252 decl(l);
5255 static int case_cmp(const void *pa, const void *pb)
5257 int a = (*(struct case_t**) pa)->v1;
5258 int b = (*(struct case_t**) pb)->v1;
5259 return a < b ? -1 : a > b;
5262 static int gcase(struct case_t **base, int len, int case_reg, int *bsym)
5264 struct case_t *p;
5265 int e;
5266 while (len > 4) {
5267 /* binary search */
5268 p = base[len/2];
5269 vseti(case_reg, 0);
5270 vdup();
5271 vpushi(p->v2);
5272 gen_op(TOK_LE);
5273 e = gtst(1, 0);
5274 case_reg = gv(RC_INT);
5275 vpop();
5276 vseti(case_reg, 0);
5277 vdup();
5278 vpushi(p->v1);
5279 gen_op(TOK_GE);
5280 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5281 case_reg = gv(RC_INT);
5282 vpop();
5283 /* x < v1 */
5284 case_reg = gcase(base, len/2, case_reg, bsym);
5285 if (cur_switch->def_sym)
5286 gjmp_addr(cur_switch->def_sym);
5287 else
5288 *bsym = gjmp(*bsym);
5289 /* x > v2 */
5290 gsym(e);
5291 e = len/2 + 1;
5292 base += e; len -= e;
5294 /* linear scan */
5295 while (len--) {
5296 p = *base++;
5297 vseti(case_reg, 0);
5298 vdup();
5299 vpushi(p->v2);
5300 if (p->v1 == p->v2) {
5301 gen_op(TOK_EQ);
5302 gtst_addr(0, p->sym);
5303 } else {
5304 gen_op(TOK_LE);
5305 e = gtst(1, 0);
5306 case_reg = gv(RC_INT);
5307 vpop();
5308 vseti(case_reg, 0);
5309 vdup();
5310 vpushi(p->v1);
5311 gen_op(TOK_GE);
5312 gtst_addr(0, p->sym);
5313 gsym(e);
5315 case_reg = gv(RC_INT);
5316 vpop();
5318 return case_reg;
5321 static void block(int *bsym, int *csym, int is_expr)
5323 int a, b, c, d;
5324 Sym *s;
5326 /* generate line number info */
5327 if (tcc_state->do_debug &&
5328 (last_line_num != file->line_num || last_ind != ind)) {
5329 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5330 last_ind = ind;
5331 last_line_num = file->line_num;
5334 if (is_expr) {
5335 /* default return value is (void) */
5336 vpushi(0);
5337 vtop->type.t = VT_VOID;
5340 if (tok == TOK_IF) {
5341 /* if test */
5342 next();
5343 skip('(');
5344 gexpr();
5345 skip(')');
5346 a = gvtst(1, 0);
5347 block(bsym, csym, 0);
5348 c = tok;
5349 if (c == TOK_ELSE) {
5350 next();
5351 d = gjmp(0);
5352 gsym(a);
5353 block(bsym, csym, 0);
5354 gsym(d); /* patch else jmp */
5355 } else
5356 gsym(a);
5357 } else if (tok == TOK_WHILE) {
5358 next();
5359 d = ind;
5360 vla_sp_restore();
5361 skip('(');
5362 gexpr();
5363 skip(')');
5364 a = gvtst(1, 0);
5365 b = 0;
5366 ++local_scope;
5367 block(&a, &b, 0);
5368 --local_scope;
5369 if(!nocode_wanted)
5370 gjmp_addr(d);
5371 gsym(a);
5372 gsym_addr(b, d);
5373 } else if (tok == '{') {
5374 Sym *llabel;
5375 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5377 next();
5378 /* record local declaration stack position */
5379 s = local_stack;
5380 llabel = local_label_stack;
5381 ++local_scope;
5383 /* handle local labels declarations */
5384 if (tok == TOK_LABEL) {
5385 next();
5386 for(;;) {
5387 if (tok < TOK_UIDENT)
5388 expect("label identifier");
5389 label_push(&local_label_stack, tok, LABEL_DECLARED);
5390 next();
5391 if (tok == ',') {
5392 next();
5393 } else {
5394 skip(';');
5395 break;
5399 while (tok != '}') {
5400 label_or_decl(VT_LOCAL);
5401 if (tok != '}') {
5402 if (is_expr)
5403 vpop();
5404 block(bsym, csym, is_expr);
5407 /* pop locally defined labels */
5408 label_pop(&local_label_stack, llabel);
5409 if(is_expr) {
5410 /* XXX: this solution makes only valgrind happy...
5411 triggered by gcc.c-torture/execute/20000917-1.c */
5412 Sym *p;
5413 switch(vtop->type.t & VT_BTYPE) {
5414 /* case VT_PTR: */
5415 /* this breaks a compilation of the linux kernel v2.4.26 */
5416 /* pmd_t *new = ({ __asm__ __volatile__("ud2\n") ; ((pmd_t *)1); }); */
5417 /* Look a commit a80acab: Display error on statement expressions with complex return type */
5418 /* A pointer is not a complex return type */
5419 case VT_STRUCT:
5420 case VT_ENUM:
5421 case VT_FUNC:
5422 for(p=vtop->type.ref;p;p=p->prev)
5423 if(p->prev==s)
5424 tcc_error("unsupported expression type");
5427 /* pop locally defined symbols */
5428 --local_scope;
5429 sym_pop(&local_stack, s);
5431 /* Pop VLA frames and restore stack pointer if required */
5432 if (vlas_in_scope > saved_vlas_in_scope) {
5433 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5434 vla_sp_restore();
5436 vlas_in_scope = saved_vlas_in_scope;
5438 next();
5439 } else if (tok == TOK_RETURN) {
5440 next();
5441 if (tok != ';') {
5442 gexpr();
5443 gen_assign_cast(&func_vt);
5444 #ifdef TCC_TARGET_ARM64
5445 // Perhaps it would be better to use this for all backends:
5446 greturn();
5447 #else
5448 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5449 CType type, ret_type;
5450 int ret_align, ret_nregs, regsize;
5451 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5452 &ret_align, &regsize);
5453 if (0 == ret_nregs) {
5454 /* if returning structure, must copy it to implicit
5455 first pointer arg location */
5456 type = func_vt;
5457 mk_pointer(&type);
5458 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5459 indir();
5460 vswap();
5461 /* copy structure value to pointer */
5462 vstore();
5463 } else {
5464 /* returning structure packed into registers */
5465 int r, size, addr, align;
5466 size = type_size(&func_vt,&align);
5467 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5468 (vtop->c.i & (ret_align-1)))
5469 && (align & (ret_align-1))) {
5470 loc = (loc - size) & -ret_align;
5471 addr = loc;
5472 type = func_vt;
5473 vset(&type, VT_LOCAL | VT_LVAL, addr);
5474 vswap();
5475 vstore();
5476 vpop();
5477 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5479 vtop->type = ret_type;
5480 if (is_float(ret_type.t))
5481 r = rc_fret(ret_type.t);
5482 else
5483 r = RC_IRET;
5485 if (ret_nregs == 1)
5486 gv(r);
5487 else {
5488 for (;;) {
5489 vdup();
5490 gv(r);
5491 vpop();
5492 if (--ret_nregs == 0)
5493 break;
5494 /* We assume that when a structure is returned in multiple
5495 registers, their classes are consecutive values of the
5496 suite s(n) = 2^n */
5497 r <<= 1;
5498 vtop->c.i += regsize;
5502 } else if (is_float(func_vt.t)) {
5503 gv(rc_fret(func_vt.t));
5504 } else {
5505 gv(RC_IRET);
5507 #endif
5508 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5510 skip(';');
5511 /* jump unless last stmt in top-level block */
5512 if (tok != '}' || local_scope != 1)
5513 rsym = gjmp(rsym);
5514 } else if (tok == TOK_BREAK) {
5515 /* compute jump */
5516 if (!bsym)
5517 tcc_error("cannot break");
5518 *bsym = gjmp(*bsym);
5519 next();
5520 skip(';');
5521 } else if (tok == TOK_CONTINUE) {
5522 /* compute jump */
5523 if (!csym)
5524 tcc_error("cannot continue");
5525 vla_sp_restore_root();
5526 *csym = gjmp(*csym);
5527 next();
5528 skip(';');
5529 } else if (tok == TOK_FOR) {
5530 int e;
5531 next();
5532 skip('(');
5533 s = local_stack;
5534 ++local_scope;
5535 if (tok != ';') {
5536 /* c99 for-loop init decl? */
5537 if (!decl0(VT_LOCAL, 1)) {
5538 /* no, regular for-loop init expr */
5539 gexpr();
5540 vpop();
5543 skip(';');
5544 d = ind;
5545 c = ind;
5546 vla_sp_restore();
5547 a = 0;
5548 b = 0;
5549 if (tok != ';') {
5550 gexpr();
5551 a = gvtst(1, 0);
5553 skip(';');
5554 if (tok != ')') {
5555 e = gjmp(0);
5556 c = ind;
5557 vla_sp_restore();
5558 gexpr();
5559 vpop();
5560 gjmp_addr(d);
5561 gsym(e);
5563 skip(')');
5564 block(&a, &b, 0);
5565 if(!nocode_wanted)
5566 gjmp_addr(c);
5567 gsym(a);
5568 gsym_addr(b, c);
5569 --local_scope;
5570 sym_pop(&local_stack, s);
5572 } else
5573 if (tok == TOK_DO) {
5574 next();
5575 a = 0;
5576 b = 0;
5577 d = ind;
5578 vla_sp_restore();
5579 block(&a, &b, 0);
5580 skip(TOK_WHILE);
5581 skip('(');
5582 gsym(b);
5583 gexpr();
5584 c = gvtst(0, 0);
5585 gsym_addr(c, d);
5586 skip(')');
5587 gsym(a);
5588 skip(';');
5589 } else
5590 if (tok == TOK_SWITCH) {
5591 struct switch_t *saved, sw;
5592 next();
5593 skip('(');
5594 gexpr();
5595 /* XXX: other types than integer */
5596 c = gv(RC_INT);
5597 vpop();
5598 skip(')');
5599 a = 0;
5600 b = gjmp(0); /* jump to first case */
5601 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5602 saved = cur_switch;
5603 cur_switch = &sw;
5604 block(&a, csym, 0);
5605 a = gjmp(a); /* add implicit break */
5606 /* case lookup */
5607 gsym(b);
5608 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5609 for (b = 1; b < sw.n; b++)
5610 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5611 tcc_error("duplicate case value");
5612 gcase(sw.p, sw.n, c, &a);
5613 if (sw.def_sym)
5614 gjmp_addr(sw.def_sym);
5615 dynarray_reset(&sw.p, &sw.n);
5616 cur_switch = saved;
5617 /* break label */
5618 gsym(a);
5619 } else
5620 if (tok == TOK_CASE) {
5621 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5622 if (!cur_switch)
5623 expect("switch");
5624 next();
5625 cr->v1 = cr->v2 = expr_const();
5626 if (gnu_ext && tok == TOK_DOTS) {
5627 next();
5628 cr->v2 = expr_const();
5629 if (cr->v2 < cr->v1)
5630 tcc_warning("empty case range");
5632 cr->sym = ind;
5633 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5634 skip(':');
5635 is_expr = 0;
5636 goto block_after_label;
5637 } else
5638 if (tok == TOK_DEFAULT) {
5639 next();
5640 skip(':');
5641 if (!cur_switch)
5642 expect("switch");
5643 if (cur_switch->def_sym)
5644 tcc_error("too many 'default'");
5645 cur_switch->def_sym = ind;
5646 is_expr = 0;
5647 goto block_after_label;
5648 } else
5649 if (tok == TOK_GOTO) {
5650 next();
5651 if (tok == '*' && gnu_ext) {
5652 /* computed goto */
5653 next();
5654 gexpr();
5655 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5656 expect("pointer");
5657 ggoto();
5658 } else if (tok >= TOK_UIDENT) {
5659 s = label_find(tok);
5660 /* put forward definition if needed */
5661 if (!s) {
5662 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5663 } else {
5664 if (s->r == LABEL_DECLARED)
5665 s->r = LABEL_FORWARD;
5667 vla_sp_restore_root();
5668 if (s->r & LABEL_FORWARD)
5669 s->jnext = gjmp(s->jnext);
5670 else
5671 gjmp_addr(s->jnext);
5672 next();
5673 } else {
5674 expect("label identifier");
5676 skip(';');
5677 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5678 asm_instr();
5679 } else {
5680 b = is_label();
5681 if (b) {
5682 /* label case */
5683 s = label_find(b);
5684 if (s) {
5685 if (s->r == LABEL_DEFINED)
5686 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5687 gsym(s->jnext);
5688 s->r = LABEL_DEFINED;
5689 } else {
5690 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5692 s->jnext = ind;
5693 vla_sp_restore();
5694 /* we accept this, but it is a mistake */
5695 block_after_label:
5696 if (tok == '}') {
5697 tcc_warning("deprecated use of label at end of compound statement");
5698 } else {
5699 if (is_expr)
5700 vpop();
5701 block(bsym, csym, is_expr);
5703 } else {
5704 /* expression case */
5705 if (tok != ';') {
5706 if (is_expr) {
5707 vpop();
5708 gexpr();
5709 } else {
5710 gexpr();
5711 vpop();
5714 skip(';');
5719 #define EXPR_CONST 1
5720 #define EXPR_ANY 2
5722 static void parse_init_elem(int expr_type)
5724 int saved_global_expr;
5725 switch(expr_type) {
5726 case EXPR_CONST:
5727 /* compound literals must be allocated globally in this case */
5728 saved_global_expr = global_expr;
5729 global_expr = 1;
5730 expr_const1();
5731 global_expr = saved_global_expr;
5732 /* NOTE: symbols are accepted */
5733 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5734 tcc_error("initializer element is not constant");
5735 break;
5736 case EXPR_ANY:
5737 expr_eq();
5738 break;
5742 /* t is the array or struct type. c is the array or struct
5743 address. cur_field is the pointer to the current
5744 value, for arrays the 'c' member contains the current start
5745 index and the 'r' contains the end index (in case of range init).
5746 'size_only' is true if only size info is needed (only used
5747 in arrays) */
5748 static void decl_designator(CType *type, Section *sec, unsigned long c,
5749 Sym **cur_field, int size_only)
5751 Sym *s, *f;
5752 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5753 CType type1;
5755 notfirst = 0;
5756 elem_size = 0;
5757 nb_elems = 1;
5758 if (gnu_ext && (l = is_label()) != 0)
5759 goto struct_field;
5760 while (tok == '[' || tok == '.') {
5761 if (tok == '[') {
5762 if (!(type->t & VT_ARRAY))
5763 expect("array type");
5764 s = type->ref;
5765 next();
5766 index = expr_const();
5767 if (index < 0 || (s->c >= 0 && index >= s->c))
5768 tcc_error("invalid index");
5769 if (tok == TOK_DOTS && gnu_ext) {
5770 next();
5771 index_last = expr_const();
5772 if (index_last < 0 ||
5773 (s->c >= 0 && index_last >= s->c) ||
5774 index_last < index)
5775 tcc_error("invalid index");
5776 } else {
5777 index_last = index;
5779 skip(']');
5780 if (!notfirst) {
5781 (*cur_field)->c = index;
5782 (*cur_field)->r = index_last;
5784 type = pointed_type(type);
5785 elem_size = type_size(type, &align);
5786 c += index * elem_size;
5787 /* NOTE: we only support ranges for last designator */
5788 nb_elems = index_last - index + 1;
5789 if (nb_elems != 1) {
5790 notfirst = 1;
5791 break;
5793 } else {
5794 next();
5795 l = tok;
5796 next();
5797 struct_field:
5798 if ((type->t & VT_BTYPE) != VT_STRUCT)
5799 expect("struct/union type");
5800 f = find_field(type, l);
5801 if (!f)
5802 expect("field");
5803 if (!notfirst)
5804 *cur_field = f;
5805 /* XXX: fix this mess by using explicit storage field */
5806 type1 = f->type;
5807 type1.t |= (type->t & ~VT_TYPE);
5808 type = &type1;
5809 c += f->c;
5811 notfirst = 1;
5813 if (notfirst) {
5814 if (tok == '=') {
5815 next();
5816 } else {
5817 if (!gnu_ext)
5818 expect("=");
5820 } else {
5821 if (type->t & VT_ARRAY) {
5822 index = (*cur_field)->c;
5823 if (type->ref->c >= 0 && index >= type->ref->c)
5824 tcc_error("index too large");
5825 type = pointed_type(type);
5826 c += index * type_size(type, &align);
5827 } else {
5828 f = *cur_field;
5829 if (!f)
5830 tcc_error("too many field init");
5831 /* XXX: fix this mess by using explicit storage field */
5832 type1 = f->type;
5833 type1.t |= (type->t & ~VT_TYPE);
5834 type = &type1;
5835 c += f->c;
5838 decl_initializer(type, sec, c, 0, size_only);
5840 /* XXX: make it more general */
5841 if (!size_only && nb_elems > 1) {
5842 unsigned long c_end;
5843 uint8_t *src, *dst;
5844 int i;
5846 if (!sec) {
5847 vset(type, VT_LOCAL|VT_LVAL, c);
5848 for (i = 1; i < nb_elems; i++) {
5849 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
5850 vswap();
5851 vstore();
5853 vpop();
5854 } else {
5855 c_end = c + nb_elems * elem_size;
5856 if (c_end > sec->data_allocated)
5857 section_realloc(sec, c_end);
5858 src = sec->data + c;
5859 dst = src;
5860 for(i = 1; i < nb_elems; i++) {
5861 dst += elem_size;
5862 memcpy(dst, src, elem_size);
5868 /* store a value or an expression directly in global data or in local array */
5869 static void init_putv(CType *type, Section *sec, unsigned long c)
5871 int bt, bit_pos, bit_size;
5872 void *ptr;
5873 unsigned long long bit_mask;
5874 CType dtype;
5876 dtype = *type;
5877 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5879 if (sec) {
5880 int size, align;
5881 /* XXX: not portable */
5882 /* XXX: generate error if incorrect relocation */
5883 gen_assign_cast(&dtype);
5884 bt = type->t & VT_BTYPE;
5885 size = type_size(type, &align);
5886 if (c + size > sec->data_allocated) {
5887 section_realloc(sec, c + size);
5889 ptr = sec->data + c;
5890 /* XXX: make code faster ? */
5891 if (!(type->t & VT_BITFIELD)) {
5892 bit_pos = 0;
5893 bit_size = PTR_SIZE * 8;
5894 bit_mask = -1LL;
5895 } else {
5896 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
5897 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
5898 bit_mask = (1LL << bit_size) - 1;
5900 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
5901 vtop->sym->v >= SYM_FIRST_ANOM &&
5902 /* XXX This rejects compount literals like
5903 '(void *){ptr}'. The problem is that '&sym' is
5904 represented the same way, which would be ruled out
5905 by the SYM_FIRST_ANOM check above, but also '"string"'
5906 in 'char *p = "string"' is represented the same
5907 with the type being VT_PTR and the symbol being an
5908 anonymous one. That is, there's no difference in vtop
5909 between '(void *){x}' and '&(void *){x}'. Ignore
5910 pointer typed entities here. Hopefully no real code
5911 will every use compound literals with scalar type. */
5912 (vtop->type.t & VT_BTYPE) != VT_PTR) {
5913 /* These come from compound literals, memcpy stuff over. */
5914 Section *ssec;
5915 ElfW(Sym) *esym;
5916 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
5917 ssec = tcc_state->sections[esym->st_shndx];
5918 memmove (ptr, ssec->data + esym->st_value, size);
5919 } else {
5920 if ((vtop->r & VT_SYM) &&
5921 (bt == VT_BYTE ||
5922 bt == VT_SHORT ||
5923 bt == VT_DOUBLE ||
5924 bt == VT_LDOUBLE ||
5925 #if PTR_SIZE == 8
5926 (bt == VT_LLONG && bit_size != 64) ||
5927 bt == VT_INT
5928 #else
5929 bt == VT_LLONG ||
5930 (bt == VT_INT && bit_size != 32)
5931 #endif
5933 tcc_error("initializer element is not computable at load time");
5934 switch(bt) {
5935 /* XXX: when cross-compiling we assume that each type has the
5936 same representation on host and target, which is likely to
5937 be wrong in the case of long double */
5938 case VT_BOOL:
5939 vtop->c.i = (vtop->c.i != 0);
5940 case VT_BYTE:
5941 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
5942 break;
5943 case VT_SHORT:
5944 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
5945 break;
5946 case VT_DOUBLE:
5947 *(double *)ptr = vtop->c.d;
5948 break;
5949 case VT_LDOUBLE:
5950 if (sizeof(long double) == LDOUBLE_SIZE)
5951 *(long double *)ptr = vtop->c.ld;
5952 else if (sizeof(double) == LDOUBLE_SIZE)
5953 *(double *)ptr = vtop->c.ld;
5954 else
5955 tcc_error("can't cross compile long double constants");
5956 break;
5957 #if PTR_SIZE != 8
5958 case VT_LLONG:
5959 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
5960 break;
5961 #else
5962 case VT_LLONG:
5963 #endif
5964 case VT_PTR:
5966 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
5967 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5968 if (vtop->r & VT_SYM)
5969 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
5970 else
5971 *(addr_t *)ptr |= val;
5972 #else
5973 if (vtop->r & VT_SYM)
5974 greloc(sec, vtop->sym, c, R_DATA_PTR);
5975 *(addr_t *)ptr |= val;
5976 #endif
5977 break;
5979 default:
5981 int val = (vtop->c.i & bit_mask) << bit_pos;
5982 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5983 if (vtop->r & VT_SYM)
5984 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
5985 else
5986 *(int *)ptr |= val;
5987 #else
5988 if (vtop->r & VT_SYM)
5989 greloc(sec, vtop->sym, c, R_DATA_PTR);
5990 *(int *)ptr |= val;
5991 #endif
5992 break;
5996 vtop--;
5997 } else {
5998 vset(&dtype, VT_LOCAL|VT_LVAL, c);
5999 vswap();
6000 vstore();
6001 vpop();
6005 /* put zeros for variable based init */
6006 static void init_putz(Section *sec, unsigned long c, int size)
6008 if (sec) {
6009 /* nothing to do because globals are already set to zero */
6010 } else {
6011 vpush_global_sym(&func_old_type, TOK_memset);
6012 vseti(VT_LOCAL, c);
6013 #ifdef TCC_TARGET_ARM
6014 vpushs(size);
6015 vpushi(0);
6016 #else
6017 vpushi(0);
6018 vpushs(size);
6019 #endif
6020 gfunc_call(3);
6024 /* 't' contains the type and storage info. 'c' is the offset of the
6025 object in section 'sec'. If 'sec' is NULL, it means stack based
6026 allocation. 'first' is true if array '{' must be read (multi
6027 dimension implicit array init handling). 'size_only' is true if
6028 size only evaluation is wanted (only for arrays). */
6029 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6030 int first, int size_only)
6032 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6033 int size1, align1;
6034 int have_elem;
6035 Sym *s, *f;
6036 Sym indexsym;
6037 CType *t1;
6039 /* If we currently are at an '}' or ',' we have read an initializer
6040 element in one of our callers, and not yet consumed it. */
6041 have_elem = tok == '}' || tok == ',';
6042 if (!have_elem && tok != '{' &&
6043 /* In case of strings we have special handling for arrays, so
6044 don't consume them as initializer value (which would commit them
6045 to some anonymous symbol). */
6046 tok != TOK_LSTR && tok != TOK_STR &&
6047 !size_only) {
6048 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6049 have_elem = 1;
6052 if (have_elem &&
6053 !(type->t & VT_ARRAY) &&
6054 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6055 The source type might have VT_CONSTANT set, which is
6056 of course assignable to non-const elements. */
6057 is_compatible_parameter_types(type, &vtop->type)) {
6058 init_putv(type, sec, c);
6059 } else if (type->t & VT_ARRAY) {
6060 s = type->ref;
6061 n = s->c;
6062 array_length = 0;
6063 t1 = pointed_type(type);
6064 size1 = type_size(t1, &align1);
6066 no_oblock = 1;
6067 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6068 tok == '{') {
6069 if (tok != '{')
6070 tcc_error("character array initializer must be a literal,"
6071 " optionally enclosed in braces");
6072 skip('{');
6073 no_oblock = 0;
6076 /* only parse strings here if correct type (otherwise: handle
6077 them as ((w)char *) expressions */
6078 if ((tok == TOK_LSTR &&
6079 #ifdef TCC_TARGET_PE
6080 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6081 #else
6082 (t1->t & VT_BTYPE) == VT_INT
6083 #endif
6084 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6085 while (tok == TOK_STR || tok == TOK_LSTR) {
6086 int cstr_len, ch;
6088 /* compute maximum number of chars wanted */
6089 if (tok == TOK_STR)
6090 cstr_len = tokc.str.size;
6091 else
6092 cstr_len = tokc.str.size / sizeof(nwchar_t);
6093 cstr_len--;
6094 nb = cstr_len;
6095 if (n >= 0 && nb > (n - array_length))
6096 nb = n - array_length;
6097 if (!size_only) {
6098 if (cstr_len > nb)
6099 tcc_warning("initializer-string for array is too long");
6100 /* in order to go faster for common case (char
6101 string in global variable, we handle it
6102 specifically */
6103 if (sec && tok == TOK_STR && size1 == 1) {
6104 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6105 } else {
6106 for(i=0;i<nb;i++) {
6107 if (tok == TOK_STR)
6108 ch = ((unsigned char *)tokc.str.data)[i];
6109 else
6110 ch = ((nwchar_t *)tokc.str.data)[i];
6111 vpushi(ch);
6112 init_putv(t1, sec, c + (array_length + i) * size1);
6116 array_length += nb;
6117 next();
6119 /* only add trailing zero if enough storage (no
6120 warning in this case since it is standard) */
6121 if (n < 0 || array_length < n) {
6122 if (!size_only) {
6123 vpushi(0);
6124 init_putv(t1, sec, c + (array_length * size1));
6126 array_length++;
6128 } else {
6129 indexsym.c = 0;
6130 indexsym.r = 0;
6131 f = &indexsym;
6133 do_init_list:
6134 while (tok != '}' || have_elem) {
6135 decl_designator(type, sec, c, &f, size_only);
6136 have_elem = 0;
6137 index = f->c;
6138 /* must put zero in holes (note that doing it that way
6139 ensures that it even works with designators) */
6140 if (!size_only && array_length < index) {
6141 init_putz(sec, c + array_length * size1,
6142 (index - array_length) * size1);
6144 if (type->t & VT_ARRAY) {
6145 index = indexsym.c = ++indexsym.r;
6146 } else {
6147 index = index + type_size(&f->type, &align1);
6148 if (s->type.t == TOK_UNION)
6149 f = NULL;
6150 else
6151 f = f->next;
6153 if (index > array_length)
6154 array_length = index;
6156 if (type->t & VT_ARRAY) {
6157 /* special test for multi dimensional arrays (may not
6158 be strictly correct if designators are used at the
6159 same time) */
6160 if (no_oblock && index >= n)
6161 break;
6162 } else {
6163 if (no_oblock && f == NULL)
6164 break;
6166 if (tok == '}')
6167 break;
6168 skip(',');
6171 /* put zeros at the end */
6172 if (!size_only && array_length < n) {
6173 init_putz(sec, c + array_length * size1,
6174 (n - array_length) * size1);
6176 if (!no_oblock)
6177 skip('}');
6178 /* patch type size if needed, which happens only for array types */
6179 if (n < 0)
6180 s->c = array_length;
6181 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6182 size1 = 1;
6183 no_oblock = 1;
6184 if (first || tok == '{') {
6185 skip('{');
6186 no_oblock = 0;
6188 s = type->ref;
6189 f = s->next;
6190 array_length = 0;
6191 n = s->c;
6192 goto do_init_list;
6193 } else if (tok == '{') {
6194 next();
6195 decl_initializer(type, sec, c, first, size_only);
6196 skip('}');
6197 } else if (size_only) {
6198 /* If we supported only ISO C we wouldn't have to accept calling
6199 this on anything than an array size_only==1 (and even then
6200 only on the outermost level, so no recursion would be needed),
6201 because initializing a flex array member isn't supported.
6202 But GNU C supports it, so we need to recurse even into
6203 subfields of structs and arrays when size_only is set. */
6204 /* just skip expression */
6205 parlevel = parlevel1 = 0;
6206 while ((parlevel > 0 || parlevel1 > 0 ||
6207 (tok != '}' && tok != ',')) && tok != -1) {
6208 if (tok == '(')
6209 parlevel++;
6210 else if (tok == ')') {
6211 if (parlevel == 0 && parlevel1 == 0)
6212 break;
6213 parlevel--;
6215 else if (tok == '{')
6216 parlevel1++;
6217 else if (tok == '}') {
6218 if (parlevel == 0 && parlevel1 == 0)
6219 break;
6220 parlevel1--;
6222 next();
6224 } else {
6225 if (!have_elem) {
6226 /* This should happen only when we haven't parsed
6227 the init element above for fear of committing a
6228 string constant to memory too early. */
6229 if (tok != TOK_STR && tok != TOK_LSTR)
6230 expect("string constant");
6231 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6233 init_putv(type, sec, c);
6237 /* parse an initializer for type 't' if 'has_init' is non zero, and
6238 allocate space in local or global data space ('r' is either
6239 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6240 variable 'v' of scope 'scope' is declared before initializers
6241 are parsed. If 'v' is zero, then a reference to the new object
6242 is put in the value stack. If 'has_init' is 2, a special parsing
6243 is done to handle string constants. */
6244 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6245 int has_init, int v, int scope)
6247 int size, align, addr, data_offset;
6248 int level;
6249 ParseState saved_parse_state = {0};
6250 TokenString *init_str = NULL;
6251 Section *sec;
6252 Sym *flexible_array;
6254 flexible_array = NULL;
6255 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6256 Sym *field = type->ref->next;
6257 if (field) {
6258 while (field->next)
6259 field = field->next;
6260 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6261 flexible_array = field;
6265 size = type_size(type, &align);
6266 /* If unknown size, we must evaluate it before
6267 evaluating initializers because
6268 initializers can generate global data too
6269 (e.g. string pointers or ISOC99 compound
6270 literals). It also simplifies local
6271 initializers handling */
6272 if (size < 0 || (flexible_array && has_init)) {
6273 if (!has_init)
6274 tcc_error("unknown type size");
6275 /* get all init string */
6276 init_str = tok_str_alloc();
6277 if (has_init == 2) {
6278 /* only get strings */
6279 while (tok == TOK_STR || tok == TOK_LSTR) {
6280 tok_str_add_tok(init_str);
6281 next();
6283 } else {
6284 level = 0;
6285 while (level > 0 || (tok != ',' && tok != ';')) {
6286 if (tok < 0)
6287 tcc_error("unexpected end of file in initializer");
6288 tok_str_add_tok(init_str);
6289 if (tok == '{')
6290 level++;
6291 else if (tok == '}') {
6292 level--;
6293 if (level <= 0) {
6294 next();
6295 break;
6298 next();
6301 tok_str_add(init_str, -1);
6302 tok_str_add(init_str, 0);
6304 /* compute size */
6305 save_parse_state(&saved_parse_state);
6307 begin_macro(init_str, 1);
6308 next();
6309 decl_initializer(type, NULL, 0, 1, 1);
6310 /* prepare second initializer parsing */
6311 macro_ptr = init_str->str;
6312 next();
6314 /* if still unknown size, error */
6315 size = type_size(type, &align);
6316 if (size < 0)
6317 tcc_error("unknown type size");
6319 /* If there's a flex member and it was used in the initializer
6320 adjust size. */
6321 if (flexible_array &&
6322 flexible_array->type.ref->c > 0)
6323 size += flexible_array->type.ref->c
6324 * pointed_size(&flexible_array->type);
6325 /* take into account specified alignment if bigger */
6326 if (ad->a.aligned) {
6327 if (ad->a.aligned > align)
6328 align = ad->a.aligned;
6329 } else if (ad->a.packed) {
6330 align = 1;
6332 if ((r & VT_VALMASK) == VT_LOCAL) {
6333 sec = NULL;
6334 #ifdef CONFIG_TCC_BCHECK
6335 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6336 loc--;
6338 #endif
6339 loc = (loc - size) & -align;
6340 addr = loc;
6341 #ifdef CONFIG_TCC_BCHECK
6342 /* handles bounds */
6343 /* XXX: currently, since we do only one pass, we cannot track
6344 '&' operators, so we add only arrays */
6345 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6346 addr_t *bounds_ptr;
6347 /* add padding between regions */
6348 loc--;
6349 /* then add local bound info */
6350 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6351 bounds_ptr[0] = addr;
6352 bounds_ptr[1] = size;
6354 #endif
6355 if (v) {
6356 /* local variable */
6357 sym_push(v, type, r, addr);
6358 } else {
6359 /* push local reference */
6360 vset(type, r, addr);
6362 } else {
6363 Sym *sym;
6365 sym = NULL;
6366 if (v && scope == VT_CONST) {
6367 /* see if the symbol was already defined */
6368 sym = sym_find(v);
6369 if (sym) {
6370 if (!is_compatible_types(&sym->type, type))
6371 tcc_error("incompatible types for redefinition of '%s'",
6372 get_tok_str(v, NULL));
6373 if (sym->type.t & VT_EXTERN) {
6374 /* if the variable is extern, it was not allocated */
6375 sym->type.t &= ~VT_EXTERN;
6376 /* set array size if it was omitted in extern
6377 declaration */
6378 if ((sym->type.t & VT_ARRAY) &&
6379 sym->type.ref->c < 0 &&
6380 type->ref->c >= 0)
6381 sym->type.ref->c = type->ref->c;
6382 } else {
6383 /* we accept several definitions of the same
6384 global variable. this is tricky, because we
6385 must play with the SHN_COMMON type of the symbol */
6386 /* XXX: should check if the variable was already
6387 initialized. It is incorrect to initialized it
6388 twice */
6389 /* no init data, we won't add more to the symbol */
6390 if (!has_init)
6391 goto no_alloc;
6396 /* allocate symbol in corresponding section */
6397 sec = ad->section;
6398 if (!sec) {
6399 if (has_init)
6400 sec = data_section;
6401 else if (tcc_state->nocommon)
6402 sec = bss_section;
6404 if (sec) {
6405 data_offset = sec->data_offset;
6406 data_offset = (data_offset + align - 1) & -align;
6407 addr = data_offset;
6408 /* very important to increment global pointer at this time
6409 because initializers themselves can create new initializers */
6410 data_offset += size;
6411 #ifdef CONFIG_TCC_BCHECK
6412 /* add padding if bound check */
6413 if (tcc_state->do_bounds_check)
6414 data_offset++;
6415 #endif
6416 sec->data_offset = data_offset;
6417 /* allocate section space to put the data */
6418 if (sec->sh_type != SHT_NOBITS &&
6419 data_offset > sec->data_allocated)
6420 section_realloc(sec, data_offset);
6421 /* align section if needed */
6422 if (align > sec->sh_addralign)
6423 sec->sh_addralign = align;
6424 } else {
6425 addr = 0; /* avoid warning */
6428 if (v) {
6429 if (scope != VT_CONST || !sym) {
6430 sym = sym_push(v, type, r | VT_SYM, 0);
6431 sym->asm_label = ad->asm_label;
6433 /* update symbol definition */
6434 if (sec) {
6435 put_extern_sym(sym, sec, addr, size);
6436 } else {
6437 ElfW(Sym) *esym;
6438 /* put a common area */
6439 put_extern_sym(sym, NULL, align, size);
6440 /* XXX: find a nicer way */
6441 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6442 esym->st_shndx = SHN_COMMON;
6444 } else {
6445 /* push global reference */
6446 sym = get_sym_ref(type, sec, addr, size);
6447 vpushsym(type, sym);
6449 /* patch symbol weakness */
6450 if (type->t & VT_WEAK)
6451 weaken_symbol(sym);
6452 apply_visibility(sym, type);
6453 #ifdef CONFIG_TCC_BCHECK
6454 /* handles bounds now because the symbol must be defined
6455 before for the relocation */
6456 if (tcc_state->do_bounds_check) {
6457 addr_t *bounds_ptr;
6459 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6460 /* then add global bound info */
6461 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6462 bounds_ptr[0] = 0; /* relocated */
6463 bounds_ptr[1] = size;
6465 #endif
6467 if (type->t & VT_VLA) {
6468 int a;
6470 /* save current stack pointer */
6471 if (vlas_in_scope == 0) {
6472 if (vla_sp_root_loc == -1)
6473 vla_sp_root_loc = (loc -= PTR_SIZE);
6474 gen_vla_sp_save(vla_sp_root_loc);
6477 vla_runtime_type_size(type, &a);
6478 gen_vla_alloc(type, a);
6479 gen_vla_sp_save(addr);
6480 vla_sp_loc = addr;
6481 vlas_in_scope++;
6482 } else if (has_init) {
6483 decl_initializer(type, sec, addr, 1, 0);
6484 /* patch flexible array member size back to -1, */
6485 /* for possible subsequent similar declarations */
6486 if (flexible_array)
6487 flexible_array->type.ref->c = -1;
6489 no_alloc: ;
6490 /* restore parse state if needed */
6491 if (init_str) {
6492 end_macro();
6493 restore_parse_state(&saved_parse_state);
6497 static void put_func_debug(Sym *sym)
6499 char buf[512];
6501 /* stabs info */
6502 /* XXX: we put here a dummy type */
6503 snprintf(buf, sizeof(buf), "%s:%c1",
6504 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6505 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6506 cur_text_section, sym->c);
6507 /* //gr gdb wants a line at the function */
6508 put_stabn(N_SLINE, 0, file->line_num, 0);
6509 last_ind = 0;
6510 last_line_num = 0;
6513 /* parse an old style function declaration list */
6514 /* XXX: check multiple parameter */
6515 static void func_decl_list(Sym *func_sym)
6517 AttributeDef ad;
6518 int v;
6519 Sym *s;
6520 CType btype, type;
6522 /* parse each declaration */
6523 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6524 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6525 if (!parse_btype(&btype, &ad))
6526 expect("declaration list");
6527 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6528 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6529 tok == ';') {
6530 /* we accept no variable after */
6531 } else {
6532 for(;;) {
6533 type = btype;
6534 type_decl(&type, &ad, &v, TYPE_DIRECT);
6535 /* find parameter in function parameter list */
6536 s = func_sym->next;
6537 while (s != NULL) {
6538 if ((s->v & ~SYM_FIELD) == v)
6539 goto found;
6540 s = s->next;
6542 tcc_error("declaration for parameter '%s' but no such parameter",
6543 get_tok_str(v, NULL));
6544 found:
6545 /* check that no storage specifier except 'register' was given */
6546 if (type.t & VT_STORAGE)
6547 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6548 convert_parameter_type(&type);
6549 /* we can add the type (NOTE: it could be local to the function) */
6550 s->type = type;
6551 /* accept other parameters */
6552 if (tok == ',')
6553 next();
6554 else
6555 break;
6558 skip(';');
6562 /* parse a function defined by symbol 'sym' and generate its code in
6563 'cur_text_section' */
6564 static void gen_function(Sym *sym)
6566 int saved_nocode_wanted = nocode_wanted;
6568 nocode_wanted = 0;
6569 ind = cur_text_section->data_offset;
6570 /* NOTE: we patch the symbol size later */
6571 put_extern_sym(sym, cur_text_section, ind, 0);
6572 funcname = get_tok_str(sym->v, NULL);
6573 func_ind = ind;
6574 /* Initialize VLA state */
6575 vla_sp_loc = -1;
6576 vla_sp_root_loc = -1;
6577 /* put debug symbol */
6578 if (tcc_state->do_debug)
6579 put_func_debug(sym);
6581 /* push a dummy symbol to enable local sym storage */
6582 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6583 local_scope = 1; /* for function parameters */
6584 gfunc_prolog(&sym->type);
6585 local_scope = 0;
6587 rsym = 0;
6588 block(NULL, NULL, 0);
6589 gsym(rsym);
6590 gfunc_epilog();
6591 cur_text_section->data_offset = ind;
6592 label_pop(&global_label_stack, NULL);
6593 /* reset local stack */
6594 local_scope = 0;
6595 sym_pop(&local_stack, NULL);
6596 /* end of function */
6597 /* patch symbol size */
6598 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6599 ind - func_ind;
6600 /* patch symbol weakness (this definition overrules any prototype) */
6601 if (sym->type.t & VT_WEAK)
6602 weaken_symbol(sym);
6603 apply_visibility(sym, &sym->type);
6604 if (tcc_state->do_debug) {
6605 put_stabn(N_FUN, 0, 0, ind - func_ind);
6607 /* It's better to crash than to generate wrong code */
6608 cur_text_section = NULL;
6609 funcname = ""; /* for safety */
6610 func_vt.t = VT_VOID; /* for safety */
6611 func_var = 0; /* for safety */
6612 ind = 0; /* for safety */
6613 nocode_wanted = saved_nocode_wanted;
6614 check_vstack();
6617 static void gen_inline_functions(TCCState *s)
6619 Sym *sym;
6620 int inline_generated, i, ln;
6621 struct InlineFunc *fn;
6623 ln = file->line_num;
6624 /* iterate while inline function are referenced */
6625 for(;;) {
6626 inline_generated = 0;
6627 for (i = 0; i < s->nb_inline_fns; ++i) {
6628 fn = s->inline_fns[i];
6629 sym = fn->sym;
6630 if (sym && sym->c) {
6631 /* the function was used: generate its code and
6632 convert it to a normal function */
6633 fn->sym = NULL;
6634 if (file)
6635 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6636 sym->r = VT_SYM | VT_CONST;
6637 sym->type.t &= ~VT_INLINE;
6639 begin_macro(fn->func_str, 1);
6640 next();
6641 cur_text_section = text_section;
6642 gen_function(sym);
6643 end_macro();
6645 inline_generated = 1;
6648 if (!inline_generated)
6649 break;
6651 file->line_num = ln;
6654 ST_FUNC void free_inline_functions(TCCState *s)
6656 int i;
6657 /* free tokens of unused inline functions */
6658 for (i = 0; i < s->nb_inline_fns; ++i) {
6659 struct InlineFunc *fn = s->inline_fns[i];
6660 if (fn->sym)
6661 tok_str_free(fn->func_str);
6663 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6666 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6667 static int decl0(int l, int is_for_loop_init)
6669 int v, has_init, r;
6670 CType type, btype;
6671 Sym *sym;
6672 AttributeDef ad;
6674 while (1) {
6675 if (!parse_btype(&btype, &ad)) {
6676 if (is_for_loop_init)
6677 return 0;
6678 /* skip redundant ';' */
6679 /* XXX: find more elegant solution */
6680 if (tok == ';') {
6681 next();
6682 continue;
6684 if (l == VT_CONST &&
6685 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6686 /* global asm block */
6687 asm_global_instr();
6688 continue;
6690 /* special test for old K&R protos without explicit int
6691 type. Only accepted when defining global data */
6692 if (l == VT_LOCAL || tok < TOK_UIDENT)
6693 break;
6694 btype.t = VT_INT;
6696 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6697 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6698 tok == ';') {
6699 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6700 int v = btype.ref->v;
6701 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6702 tcc_warning("unnamed struct/union that defines no instances");
6704 next();
6705 continue;
6707 while (1) { /* iterate thru each declaration */
6708 type = btype;
6709 /* If the base type itself was an array type of unspecified
6710 size (like in 'typedef int arr[]; arr x = {1};') then
6711 we will overwrite the unknown size by the real one for
6712 this decl. We need to unshare the ref symbol holding
6713 that size. */
6714 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6715 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6717 type_decl(&type, &ad, &v, TYPE_DIRECT);
6718 #if 0
6720 char buf[500];
6721 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6722 printf("type = '%s'\n", buf);
6724 #endif
6725 if ((type.t & VT_BTYPE) == VT_FUNC) {
6726 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6727 tcc_error("function without file scope cannot be static");
6729 /* if old style function prototype, we accept a
6730 declaration list */
6731 sym = type.ref;
6732 if (sym->c == FUNC_OLD)
6733 func_decl_list(sym);
6736 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6737 ad.asm_label = asm_label_instr();
6738 /* parse one last attribute list, after asm label */
6739 parse_attribute(&ad);
6740 if (tok == '{')
6741 expect(";");
6744 if (ad.a.weak)
6745 type.t |= VT_WEAK;
6746 #ifdef TCC_TARGET_PE
6747 if (ad.a.func_import)
6748 type.t |= VT_IMPORT;
6749 if (ad.a.func_export)
6750 type.t |= VT_EXPORT;
6751 #endif
6752 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6754 if (tok == '{') {
6755 if (l == VT_LOCAL)
6756 tcc_error("cannot use local functions");
6757 if ((type.t & VT_BTYPE) != VT_FUNC)
6758 expect("function definition");
6760 /* reject abstract declarators in function definition */
6761 sym = type.ref;
6762 while ((sym = sym->next) != NULL)
6763 if (!(sym->v & ~SYM_FIELD))
6764 expect("identifier");
6766 /* XXX: cannot do better now: convert extern line to static inline */
6767 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
6768 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6770 sym = sym_find(v);
6771 if (sym) {
6772 Sym *ref;
6773 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
6774 goto func_error1;
6776 ref = sym->type.ref;
6777 if (0 == ref->a.func_proto)
6778 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6780 /* use func_call from prototype if not defined */
6781 if (ref->a.func_call != FUNC_CDECL
6782 && type.ref->a.func_call == FUNC_CDECL)
6783 type.ref->a.func_call = ref->a.func_call;
6785 /* use export from prototype */
6786 if (ref->a.func_export)
6787 type.ref->a.func_export = 1;
6789 /* use static from prototype */
6790 if (sym->type.t & VT_STATIC)
6791 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6793 /* If the definition has no visibility use the
6794 one from prototype. */
6795 if (! (type.t & VT_VIS_MASK))
6796 type.t |= sym->type.t & VT_VIS_MASK;
6798 if (!is_compatible_types(&sym->type, &type)) {
6799 func_error1:
6800 tcc_error("incompatible types for redefinition of '%s'",
6801 get_tok_str(v, NULL));
6803 type.ref->a.func_proto = 0;
6804 /* if symbol is already defined, then put complete type */
6805 sym->type = type;
6806 } else {
6807 /* put function symbol */
6808 sym = global_identifier_push(v, type.t, 0);
6809 sym->type.ref = type.ref;
6812 /* static inline functions are just recorded as a kind
6813 of macro. Their code will be emitted at the end of
6814 the compilation unit only if they are used */
6815 if ((type.t & (VT_INLINE | VT_STATIC)) ==
6816 (VT_INLINE | VT_STATIC)) {
6817 int block_level;
6818 struct InlineFunc *fn;
6819 const char *filename;
6821 filename = file ? file->filename : "";
6822 fn = tcc_malloc(sizeof *fn + strlen(filename));
6823 strcpy(fn->filename, filename);
6824 fn->sym = sym;
6825 fn->func_str = tok_str_alloc();
6827 block_level = 0;
6828 for(;;) {
6829 int t;
6830 if (tok == TOK_EOF)
6831 tcc_error("unexpected end of file");
6832 tok_str_add_tok(fn->func_str);
6833 t = tok;
6834 next();
6835 if (t == '{') {
6836 block_level++;
6837 } else if (t == '}') {
6838 block_level--;
6839 if (block_level == 0)
6840 break;
6843 tok_str_add(fn->func_str, -1);
6844 tok_str_add(fn->func_str, 0);
6845 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
6847 } else {
6848 /* compute text section */
6849 cur_text_section = ad.section;
6850 if (!cur_text_section)
6851 cur_text_section = text_section;
6852 sym->r = VT_SYM | VT_CONST;
6853 gen_function(sym);
6855 break;
6856 } else {
6857 if (btype.t & VT_TYPEDEF) {
6858 /* save typedefed type */
6859 /* XXX: test storage specifiers ? */
6860 sym = sym_find(v);
6861 if (sym && sym->scope == local_scope) {
6862 if (!is_compatible_types(&sym->type, &type)
6863 || !(sym->type.t & VT_TYPEDEF))
6864 tcc_error("incompatible redefinition of '%s'",
6865 get_tok_str(v, NULL));
6866 sym->type = type;
6867 } else {
6868 sym = sym_push(v, &type, 0, 0);
6870 sym->a = ad.a;
6871 sym->type.t |= VT_TYPEDEF;
6872 } else {
6873 r = 0;
6874 if ((type.t & VT_BTYPE) == VT_FUNC) {
6875 /* external function definition */
6876 /* specific case for func_call attribute */
6877 ad.a.func_proto = 1;
6878 type.ref->a = ad.a;
6879 } else if (!(type.t & VT_ARRAY)) {
6880 /* not lvalue if array */
6881 r |= lvalue_type(type.t);
6883 has_init = (tok == '=');
6884 if (has_init && (type.t & VT_VLA))
6885 tcc_error("variable length array cannot be initialized");
6886 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
6887 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
6888 !has_init && l == VT_CONST && type.ref->c < 0)) {
6889 /* external variable or function */
6890 /* NOTE: as GCC, uninitialized global static
6891 arrays of null size are considered as
6892 extern */
6893 sym = external_sym(v, &type, r);
6894 sym->asm_label = ad.asm_label;
6896 if (ad.alias_target) {
6897 Section tsec;
6898 Elf32_Sym *esym;
6899 Sym *alias_target;
6901 alias_target = sym_find(ad.alias_target);
6902 if (!alias_target || !alias_target->c)
6903 tcc_error("unsupported forward __alias__ attribute");
6904 esym = &((Elf32_Sym *)symtab_section->data)[alias_target->c];
6905 tsec.sh_num = esym->st_shndx;
6906 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
6908 } else {
6909 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
6910 if (type.t & VT_STATIC)
6911 r |= VT_CONST;
6912 else
6913 r |= l;
6914 if (has_init)
6915 next();
6916 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
6919 if (tok != ',') {
6920 if (is_for_loop_init)
6921 return 1;
6922 skip(';');
6923 break;
6925 next();
6927 ad.a.aligned = 0;
6930 return 0;
6933 ST_FUNC void decl(int l)
6935 decl0(l, 0);
6938 /* ------------------------------------------------------------------------- */