bitfields: fix PCC layout
[tinycc.git] / tccgen.c
blob67330e1474ddb8b1f9df00e337e07ec7bbf4a427
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
72 static void gen_cast(CType *type);
73 static inline CType *pointed_type(CType *type);
74 static int is_compatible_types(CType *type1, CType *type2);
75 static int parse_btype(CType *type, AttributeDef *ad);
76 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
77 static void parse_expr_type(CType *type);
78 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
79 static void block(int *bsym, int *csym, int is_expr);
80 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
81 static int decl0(int l, int is_for_loop_init);
82 static void expr_eq(void);
83 static void expr_lor_const(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 ST_FUNC void tccgen_start(TCCState *s1)
144 cur_text_section = NULL;
145 funcname = "";
146 anon_sym = SYM_FIRST_ANOM;
147 section_sym = 0;
148 nocode_wanted = 1;
150 /* define some often used types */
151 int_type.t = VT_INT;
152 char_pointer_type.t = VT_BYTE;
153 mk_pointer(&char_pointer_type);
154 #if PTR_SIZE == 4
155 size_type.t = VT_INT;
156 #else
157 size_type.t = VT_LLONG;
158 #endif
159 func_old_type.t = VT_FUNC;
160 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
162 if (s1->do_debug) {
163 char buf[512];
165 /* file info: full path + filename */
166 section_sym = put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
168 text_section->sh_num, NULL);
169 getcwd(buf, sizeof(buf));
170 #ifdef _WIN32
171 normalize_slashes(buf);
172 #endif
173 pstrcat(buf, sizeof(buf), "/");
174 put_stabs_r(buf, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
176 put_stabs_r(file->filename, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
179 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
180 symbols can be safely used */
181 put_elf_sym(symtab_section, 0, 0,
182 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
183 SHN_ABS, file->filename);
185 #ifdef TCC_TARGET_ARM
186 arm_init(s1);
187 #endif
190 ST_FUNC void tccgen_end(TCCState *s1)
192 gen_inline_functions(s1);
193 check_vstack();
194 /* end of translation unit info */
195 if (s1->do_debug) {
196 put_stabs_r(NULL, N_SO, 0, 0,
197 text_section->data_offset, text_section, section_sym);
201 /* ------------------------------------------------------------------------- */
202 /* update sym->c so that it points to an external symbol in section
203 'section' with value 'value' */
205 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
206 addr_t value, unsigned long size,
207 int can_add_underscore)
209 int sym_type, sym_bind, sh_num, info, other;
210 ElfW(Sym) *esym;
211 const char *name;
212 char buf1[256];
214 #ifdef CONFIG_TCC_BCHECK
215 char buf[32];
216 #endif
218 if (section == NULL)
219 sh_num = SHN_UNDEF;
220 else if (section == SECTION_ABS)
221 sh_num = SHN_ABS;
222 else
223 sh_num = section->sh_num;
225 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
226 sym_type = STT_FUNC;
227 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
228 sym_type = STT_NOTYPE;
229 } else {
230 sym_type = STT_OBJECT;
233 if (sym->type.t & VT_STATIC)
234 sym_bind = STB_LOCAL;
235 else {
236 if (sym->type.t & VT_WEAK)
237 sym_bind = STB_WEAK;
238 else
239 sym_bind = STB_GLOBAL;
242 if (!sym->c) {
243 name = get_tok_str(sym->v, NULL);
244 #ifdef CONFIG_TCC_BCHECK
245 if (tcc_state->do_bounds_check) {
246 /* XXX: avoid doing that for statics ? */
247 /* if bound checking is activated, we change some function
248 names by adding the "__bound" prefix */
249 switch(sym->v) {
250 #ifdef TCC_TARGET_PE
251 /* XXX: we rely only on malloc hooks */
252 case TOK_malloc:
253 case TOK_free:
254 case TOK_realloc:
255 case TOK_memalign:
256 case TOK_calloc:
257 #endif
258 case TOK_memcpy:
259 case TOK_memmove:
260 case TOK_memset:
261 case TOK_strlen:
262 case TOK_strcpy:
263 case TOK_alloca:
264 strcpy(buf, "__bound_");
265 strcat(buf, name);
266 name = buf;
267 break;
270 #endif
271 other = 0;
273 #ifdef TCC_TARGET_PE
274 if (sym->type.t & VT_EXPORT)
275 other |= ST_PE_EXPORT;
276 if (sym_type == STT_FUNC && sym->type.ref) {
277 Sym *ref = sym->type.ref;
278 if (ref->a.func_export)
279 other |= ST_PE_EXPORT;
280 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
281 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
282 name = buf1;
283 other |= ST_PE_STDCALL;
284 can_add_underscore = 0;
286 } else {
287 if (find_elf_sym(tcc_state->dynsymtab_section, name))
288 other |= ST_PE_IMPORT;
289 if (sym->type.t & VT_IMPORT)
290 other |= ST_PE_IMPORT;
292 #else
293 if (! (sym->type.t & VT_STATIC))
294 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
295 #endif
296 if (tcc_state->leading_underscore && can_add_underscore) {
297 buf1[0] = '_';
298 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
299 name = buf1;
301 if (sym->asm_label) {
302 name = get_tok_str(sym->asm_label, NULL);
304 info = ELFW(ST_INFO)(sym_bind, sym_type);
305 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
306 } else {
307 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
308 esym->st_value = value;
309 esym->st_size = size;
310 esym->st_shndx = sh_num;
314 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
315 addr_t value, unsigned long size)
317 put_extern_sym2(sym, section, value, size, 1);
320 /* add a new relocation entry to symbol 'sym' in section 's' */
321 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
322 addr_t addend)
324 int c = 0;
325 if (sym) {
326 if (0 == sym->c)
327 put_extern_sym(sym, NULL, 0, 0);
328 c = sym->c;
330 /* now we can add ELF relocation info */
331 put_elf_reloca(symtab_section, s, offset, type, c, addend);
334 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
336 greloca(s, sym, offset, type, 0);
339 /* ------------------------------------------------------------------------- */
340 /* symbol allocator */
341 static Sym *__sym_malloc(void)
343 Sym *sym_pool, *sym, *last_sym;
344 int i;
346 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
347 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
349 last_sym = sym_free_first;
350 sym = sym_pool;
351 for(i = 0; i < SYM_POOL_NB; i++) {
352 sym->next = last_sym;
353 last_sym = sym;
354 sym++;
356 sym_free_first = last_sym;
357 return last_sym;
360 static inline Sym *sym_malloc(void)
362 Sym *sym;
363 #ifndef SYM_DEBUG
364 sym = sym_free_first;
365 if (!sym)
366 sym = __sym_malloc();
367 sym_free_first = sym->next;
368 return sym;
369 #else
370 sym = tcc_malloc(sizeof(Sym));
371 return sym;
372 #endif
375 ST_INLN void sym_free(Sym *sym)
377 #ifndef SYM_DEBUG
378 sym->next = sym_free_first;
379 sym_free_first = sym;
380 #else
381 tcc_free(sym);
382 #endif
385 /* push, without hashing */
386 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
388 Sym *s;
390 s = sym_malloc();
391 s->scope = 0;
392 s->v = v;
393 s->type.t = t;
394 s->type.ref = NULL;
395 #ifdef _WIN64
396 s->d = NULL;
397 #endif
398 s->c = c;
399 s->next = NULL;
400 /* add in stack */
401 s->prev = *ps;
402 *ps = s;
403 return s;
406 /* find a symbol and return its associated structure. 's' is the top
407 of the symbol stack */
408 ST_FUNC Sym *sym_find2(Sym *s, int v)
410 while (s) {
411 if (s->v == v)
412 return s;
413 else if (s->v == -1)
414 return NULL;
415 s = s->prev;
417 return NULL;
420 /* structure lookup */
421 ST_INLN Sym *struct_find(int v)
423 v -= TOK_IDENT;
424 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
425 return NULL;
426 return table_ident[v]->sym_struct;
429 /* find an identifier */
430 ST_INLN Sym *sym_find(int v)
432 v -= TOK_IDENT;
433 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
434 return NULL;
435 return table_ident[v]->sym_identifier;
438 /* push a given symbol on the symbol stack */
439 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
441 Sym *s, **ps;
442 TokenSym *ts;
444 if (local_stack)
445 ps = &local_stack;
446 else
447 ps = &global_stack;
448 s = sym_push2(ps, v, type->t, c);
449 s->type.ref = type->ref;
450 s->r = r;
451 /* don't record fields or anonymous symbols */
452 /* XXX: simplify */
453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
454 /* record symbol in token array */
455 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
456 if (v & SYM_STRUCT)
457 ps = &ts->sym_struct;
458 else
459 ps = &ts->sym_identifier;
460 s->prev_tok = *ps;
461 *ps = s;
462 s->scope = local_scope;
463 if (s->prev_tok && s->prev_tok->scope == s->scope)
464 tcc_error("redeclaration of '%s'",
465 get_tok_str(v & ~SYM_STRUCT, NULL));
467 return s;
470 /* push a global identifier */
471 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
473 Sym *s, **ps;
474 s = sym_push2(&global_stack, v, t, c);
475 /* don't record anonymous symbol */
476 if (v < SYM_FIRST_ANOM) {
477 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
478 /* modify the top most local identifier, so that
479 sym_identifier will point to 's' when popped */
480 while (*ps != NULL)
481 ps = &(*ps)->prev_tok;
482 s->prev_tok = NULL;
483 *ps = s;
485 return s;
488 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
489 pop them yet from the list, but do remove them from the token array. */
490 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
492 Sym *s, *ss, **ps;
493 TokenSym *ts;
494 int v;
496 s = *ptop;
497 while(s != b) {
498 ss = s->prev;
499 v = s->v;
500 /* remove symbol in token array */
501 /* XXX: simplify */
502 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
503 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
504 if (v & SYM_STRUCT)
505 ps = &ts->sym_struct;
506 else
507 ps = &ts->sym_identifier;
508 *ps = s->prev_tok;
510 if (!keep)
511 sym_free(s);
512 s = ss;
514 if (!keep)
515 *ptop = b;
518 static void weaken_symbol(Sym *sym)
520 sym->type.t |= VT_WEAK;
521 if (sym->c > 0) {
522 int esym_type;
523 ElfW(Sym) *esym;
525 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
526 esym_type = ELFW(ST_TYPE)(esym->st_info);
527 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
531 static void apply_visibility(Sym *sym, CType *type)
533 int vis = sym->type.t & VT_VIS_MASK;
534 int vis2 = type->t & VT_VIS_MASK;
535 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
536 vis = vis2;
537 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
539 else
540 vis = (vis < vis2) ? vis : vis2;
541 sym->type.t &= ~VT_VIS_MASK;
542 sym->type.t |= vis;
544 if (sym->c > 0) {
545 ElfW(Sym) *esym;
547 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
548 vis >>= VT_VIS_SHIFT;
549 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
553 /* ------------------------------------------------------------------------- */
555 ST_FUNC void swap(int *p, int *q)
557 int t;
558 t = *p;
559 *p = *q;
560 *q = t;
563 static void vsetc(CType *type, int r, CValue *vc)
565 int v;
567 if (vtop >= vstack + (VSTACK_SIZE - 1))
568 tcc_error("memory full (vstack)");
569 /* cannot let cpu flags if other instruction are generated. Also
570 avoid leaving VT_JMP anywhere except on the top of the stack
571 because it would complicate the code generator. */
572 if (vtop >= vstack) {
573 v = vtop->r & VT_VALMASK;
574 if (v == VT_CMP || (v & ~1) == VT_JMP)
575 gv(RC_INT);
577 vtop++;
578 vtop->type = *type;
579 vtop->r = r;
580 vtop->r2 = VT_CONST;
581 vtop->c = *vc;
582 vtop->sym = NULL;
585 /* push constant of type "type" with useless value */
586 ST_FUNC void vpush(CType *type)
588 CValue cval;
589 vsetc(type, VT_CONST, &cval);
592 /* push integer constant */
593 ST_FUNC void vpushi(int v)
595 CValue cval;
596 cval.i = v;
597 vsetc(&int_type, VT_CONST, &cval);
600 /* push a pointer sized constant */
601 static void vpushs(addr_t v)
603 CValue cval;
604 cval.i = v;
605 vsetc(&size_type, VT_CONST, &cval);
608 /* push arbitrary 64bit constant */
609 ST_FUNC void vpush64(int ty, unsigned long long v)
611 CValue cval;
612 CType ctype;
613 ctype.t = ty;
614 ctype.ref = NULL;
615 cval.i = v;
616 vsetc(&ctype, VT_CONST, &cval);
619 /* push long long constant */
620 static inline void vpushll(long long v)
622 vpush64(VT_LLONG, v);
625 /* push a symbol value of TYPE */
626 static inline void vpushsym(CType *type, Sym *sym)
628 CValue cval;
629 cval.i = 0;
630 vsetc(type, VT_CONST | VT_SYM, &cval);
631 vtop->sym = sym;
634 /* Return a static symbol pointing to a section */
635 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
637 int v;
638 Sym *sym;
640 v = anon_sym++;
641 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
642 sym->type.ref = type->ref;
643 sym->r = VT_CONST | VT_SYM;
644 put_extern_sym(sym, sec, offset, size);
645 return sym;
648 /* push a reference to a section offset by adding a dummy symbol */
649 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
651 vpushsym(type, get_sym_ref(type, sec, offset, size));
654 /* define a new external reference to a symbol 'v' of type 'u' */
655 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
663 s->type.ref = type->ref;
664 s->r = r | VT_CONST | VT_SYM;
666 return s;
669 /* define a new external reference to a symbol 'v' */
670 static Sym *external_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
678 s->type.t |= VT_EXTERN;
679 } else if (s->type.ref == func_old_type.ref) {
680 s->type.ref = type->ref;
681 s->r = r | VT_CONST | VT_SYM;
682 s->type.t |= VT_EXTERN;
683 } else if (!is_compatible_types(&s->type, type)) {
684 tcc_error("incompatible types for redefinition of '%s'",
685 get_tok_str(v, NULL));
687 /* Merge some storage attributes. */
688 if (type->t & VT_WEAK)
689 weaken_symbol(s);
691 if (type->t & VT_VIS_MASK)
692 apply_visibility(s, type);
694 return s;
697 /* push a reference to global symbol v */
698 ST_FUNC void vpush_global_sym(CType *type, int v)
700 vpushsym(type, external_global_sym(v, type, 0));
703 ST_FUNC void vset(CType *type, int r, long v)
705 CValue cval;
707 cval.i = v;
708 vsetc(type, r, &cval);
711 static void vseti(int r, int v)
713 CType type;
714 type.t = VT_INT;
715 type.ref = 0;
716 vset(&type, r, v);
719 ST_FUNC void vswap(void)
721 SValue tmp;
722 /* cannot let cpu flags if other instruction are generated. Also
723 avoid leaving VT_JMP anywhere except on the top of the stack
724 because it would complicate the code generator. */
725 if (vtop >= vstack) {
726 int v = vtop->r & VT_VALMASK;
727 if (v == VT_CMP || (v & ~1) == VT_JMP)
728 gv(RC_INT);
730 tmp = vtop[0];
731 vtop[0] = vtop[-1];
732 vtop[-1] = tmp;
734 /* XXX: +2% overall speed possible with optimized memswap
736 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
740 ST_FUNC void vpushv(SValue *v)
742 if (vtop >= vstack + (VSTACK_SIZE - 1))
743 tcc_error("memory full (vstack)");
744 vtop++;
745 *vtop = *v;
748 static void vdup(void)
750 vpushv(vtop);
753 /* save registers up to (vtop - n) stack entry */
754 ST_FUNC void save_regs(int n)
756 SValue *p, *p1;
757 for(p = vstack, p1 = vtop - n; p <= p1; p++)
758 save_reg(p->r);
761 /* save r to the memory stack, and mark it as being free */
762 ST_FUNC void save_reg(int r)
764 save_reg_upstack(r, 0);
767 /* save r to the memory stack, and mark it as being free,
768 if seen up to (vtop - n) stack entry */
769 ST_FUNC void save_reg_upstack(int r, int n)
771 int l, saved, size, align;
772 SValue *p, *p1, sv;
773 CType *type;
775 if ((r &= VT_VALMASK) >= VT_CONST)
776 return;
778 /* modify all stack values */
779 saved = 0;
780 l = 0;
781 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
782 if ((p->r & VT_VALMASK) == r ||
783 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
784 /* must save value on stack if not already done */
785 if (!saved) {
786 /* NOTE: must reload 'r' because r might be equal to r2 */
787 r = p->r & VT_VALMASK;
788 /* store register in the stack */
789 type = &p->type;
790 if ((p->r & VT_LVAL) ||
791 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
792 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
793 type = &char_pointer_type;
794 #else
795 type = &int_type;
796 #endif
797 size = type_size(type, &align);
798 loc = (loc - size) & -align;
799 sv.type.t = type->t;
800 sv.r = VT_LOCAL | VT_LVAL;
801 sv.c.i = loc;
802 store(r, &sv);
803 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
804 /* x86 specific: need to pop fp register ST0 if saved */
805 if (r == TREG_ST0) {
806 o(0xd8dd); /* fstp %st(0) */
808 #endif
809 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
810 /* special long long case */
811 if ((type->t & VT_BTYPE) == VT_LLONG) {
812 sv.c.i += 4;
813 store(p->r2, &sv);
815 #endif
816 l = loc;
817 saved = 1;
819 /* mark that stack entry as being saved on the stack */
820 if (p->r & VT_LVAL) {
821 /* also clear the bounded flag because the
822 relocation address of the function was stored in
823 p->c.i */
824 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
825 } else {
826 p->r = lvalue_type(p->type.t) | VT_LOCAL;
828 p->r2 = VT_CONST;
829 p->c.i = l;
834 #ifdef TCC_TARGET_ARM
835 /* find a register of class 'rc2' with at most one reference on stack.
836 * If none, call get_reg(rc) */
837 ST_FUNC int get_reg_ex(int rc, int rc2)
839 int r;
840 SValue *p;
842 for(r=0;r<NB_REGS;r++) {
843 if (reg_classes[r] & rc2) {
844 int n;
845 n=0;
846 for(p = vstack; p <= vtop; p++) {
847 if ((p->r & VT_VALMASK) == r ||
848 (p->r2 & VT_VALMASK) == r)
849 n++;
851 if (n <= 1)
852 return r;
855 return get_reg(rc);
857 #endif
859 /* find a free register of class 'rc'. If none, save one register */
860 ST_FUNC int get_reg(int rc)
862 int r;
863 SValue *p;
865 /* find a free register */
866 for(r=0;r<NB_REGS;r++) {
867 if (reg_classes[r] & rc) {
868 for(p=vstack;p<=vtop;p++) {
869 if ((p->r & VT_VALMASK) == r ||
870 (p->r2 & VT_VALMASK) == r)
871 goto notfound;
873 return r;
875 notfound: ;
878 /* no register left : free the first one on the stack (VERY
879 IMPORTANT to start from the bottom to ensure that we don't
880 spill registers used in gen_opi()) */
881 for(p=vstack;p<=vtop;p++) {
882 /* look at second register (if long long) */
883 r = p->r2 & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc))
885 goto save_found;
886 r = p->r & VT_VALMASK;
887 if (r < VT_CONST && (reg_classes[r] & rc)) {
888 save_found:
889 save_reg(r);
890 return r;
893 /* Should never comes here */
894 return -1;
897 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
898 if needed */
899 static void move_reg(int r, int s, int t)
901 SValue sv;
903 if (r != s) {
904 save_reg(r);
905 sv.type.t = t;
906 sv.type.ref = NULL;
907 sv.r = s;
908 sv.c.i = 0;
909 load(r, &sv);
913 /* get address of vtop (vtop MUST BE an lvalue) */
914 ST_FUNC void gaddrof(void)
916 if (vtop->r & VT_REF && !nocode_wanted)
917 gv(RC_INT);
918 vtop->r &= ~VT_LVAL;
919 /* tricky: if saved lvalue, then we can go back to lvalue */
920 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
921 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
926 #ifdef CONFIG_TCC_BCHECK
927 /* generate lvalue bound code */
928 static void gbound(void)
930 int lval_type;
931 CType type1;
933 vtop->r &= ~VT_MUSTBOUND;
934 /* if lvalue, then use checking code before dereferencing */
935 if (vtop->r & VT_LVAL) {
936 /* if not VT_BOUNDED value, then make one */
937 if (!(vtop->r & VT_BOUNDED)) {
938 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
939 /* must save type because we must set it to int to get pointer */
940 type1 = vtop->type;
941 vtop->type.t = VT_PTR;
942 gaddrof();
943 vpushi(0);
944 gen_bounded_ptr_add();
945 vtop->r |= lval_type;
946 vtop->type = type1;
948 /* then check for dereferencing */
949 gen_bounded_ptr_deref();
952 #endif
954 /* store vtop a register belonging to class 'rc'. lvalues are
955 converted to values. Cannot be used if cannot be converted to
956 register value (such as structures). */
957 ST_FUNC int gv(int rc)
959 int r, bit_pos, bit_size, size, align, i;
960 int rc2;
962 /* NOTE: get_reg can modify vstack[] */
963 if (vtop->type.t & VT_BITFIELD) {
964 CType type;
965 int bits = 32;
966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
968 /* remove bit field info to avoid loops */
969 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
970 /* cast to int to propagate signedness in following ops */
971 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
972 type.t = VT_LLONG;
973 bits = 64;
974 } else
975 type.t = VT_INT;
976 if((vtop->type.t & VT_UNSIGNED) ||
977 (vtop->type.t & VT_BTYPE) == VT_BOOL)
978 type.t |= VT_UNSIGNED;
979 gen_cast(&type);
980 /* generate shifts */
981 vpushi(bits - (bit_pos + bit_size));
982 gen_op(TOK_SHL);
983 vpushi(bits - bit_size);
984 /* NOTE: transformed to SHR if unsigned */
985 gen_op(TOK_SAR);
986 r = gv(rc);
987 } else {
988 if (is_float(vtop->type.t) &&
989 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
990 Sym *sym;
991 int *ptr;
992 unsigned long offset;
993 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
994 CValue check;
995 #endif
997 /* XXX: unify with initializers handling ? */
998 /* CPUs usually cannot use float constants, so we store them
999 generically in data segment */
1000 size = type_size(&vtop->type, &align);
1001 offset = (data_section->data_offset + align - 1) & -align;
1002 data_section->data_offset = offset;
1003 /* XXX: not portable yet */
1004 #if defined(__i386__) || defined(__x86_64__)
1005 /* Zero pad x87 tenbyte long doubles */
1006 if (size == LDOUBLE_SIZE) {
1007 vtop->c.tab[2] &= 0xffff;
1008 #if LDOUBLE_SIZE == 16
1009 vtop->c.tab[3] = 0;
1010 #endif
1012 #endif
1013 ptr = section_ptr_add(data_section, size);
1014 size = size >> 2;
1015 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1016 check.d = 1;
1017 if(check.tab[0])
1018 for(i=0;i<size;i++)
1019 ptr[i] = vtop->c.tab[size-1-i];
1020 else
1021 #endif
1022 for(i=0;i<size;i++)
1023 ptr[i] = vtop->c.tab[i];
1024 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1025 vtop->r |= VT_LVAL | VT_SYM;
1026 vtop->sym = sym;
1027 vtop->c.i = 0;
1029 #ifdef CONFIG_TCC_BCHECK
1030 if (vtop->r & VT_MUSTBOUND)
1031 gbound();
1032 #endif
1034 r = vtop->r & VT_VALMASK;
1035 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1036 #ifndef TCC_TARGET_ARM64
1037 if (rc == RC_IRET)
1038 rc2 = RC_LRET;
1039 #ifdef TCC_TARGET_X86_64
1040 else if (rc == RC_FRET)
1041 rc2 = RC_QRET;
1042 #endif
1043 #endif
1045 /* need to reload if:
1046 - constant
1047 - lvalue (need to dereference pointer)
1048 - already a register, but not in the right class */
1049 if (r >= VT_CONST
1050 || (vtop->r & VT_LVAL)
1051 || !(reg_classes[r] & rc)
1052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1053 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1054 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1055 #else
1056 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1057 #endif
1060 r = get_reg(rc);
1061 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1062 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1063 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1064 #else
1065 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1066 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1067 unsigned long long ll;
1068 #endif
1069 int r2, original_type;
1070 original_type = vtop->type.t;
1071 /* two register type load : expand to two words
1072 temporarily */
1073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1074 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1075 /* load constant */
1076 ll = vtop->c.i;
1077 vtop->c.i = ll; /* first word */
1078 load(r, vtop);
1079 vtop->r = r; /* save register value */
1080 vpushi(ll >> 32); /* second word */
1081 } else
1082 #endif
1083 if (vtop->r & VT_LVAL) {
1084 /* We do not want to modifier the long long
1085 pointer here, so the safest (and less
1086 efficient) is to save all the other registers
1087 in the stack. XXX: totally inefficient. */
1088 #if 0
1089 save_regs(1);
1090 #else
1091 /* lvalue_save: save only if used further down the stack */
1092 save_reg_upstack(vtop->r, 1);
1093 #endif
1094 /* load from memory */
1095 vtop->type.t = load_type;
1096 load(r, vtop);
1097 vdup();
1098 vtop[-1].r = r; /* save register value */
1099 /* increment pointer to get second word */
1100 vtop->type.t = addr_type;
1101 gaddrof();
1102 vpushi(load_size);
1103 gen_op('+');
1104 vtop->r |= VT_LVAL;
1105 vtop->type.t = load_type;
1106 } else {
1107 /* move registers */
1108 load(r, vtop);
1109 vdup();
1110 vtop[-1].r = r; /* save register value */
1111 vtop->r = vtop[-1].r2;
1113 /* Allocate second register. Here we rely on the fact that
1114 get_reg() tries first to free r2 of an SValue. */
1115 r2 = get_reg(rc2);
1116 load(r2, vtop);
1117 vpop();
1118 /* write second register */
1119 vtop->r2 = r2;
1120 vtop->type.t = original_type;
1121 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1122 int t1, t;
1123 /* lvalue of scalar type : need to use lvalue type
1124 because of possible cast */
1125 t = vtop->type.t;
1126 t1 = t;
1127 /* compute memory access type */
1128 if (vtop->r & VT_REF)
1129 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1130 t = VT_PTR;
1131 #else
1132 t = VT_INT;
1133 #endif
1134 else if (vtop->r & VT_LVAL_BYTE)
1135 t = VT_BYTE;
1136 else if (vtop->r & VT_LVAL_SHORT)
1137 t = VT_SHORT;
1138 if (vtop->r & VT_LVAL_UNSIGNED)
1139 t |= VT_UNSIGNED;
1140 vtop->type.t = t;
1141 load(r, vtop);
1142 /* restore wanted type */
1143 vtop->type.t = t1;
1144 } else {
1145 /* one register type load */
1146 load(r, vtop);
1149 vtop->r = r;
1150 #ifdef TCC_TARGET_C67
1151 /* uses register pairs for doubles */
1152 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1153 vtop->r2 = r+1;
1154 #endif
1156 return r;
1159 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1160 ST_FUNC void gv2(int rc1, int rc2)
1162 int v;
1164 /* generate more generic register first. But VT_JMP or VT_CMP
1165 values must be generated first in all cases to avoid possible
1166 reload errors */
1167 v = vtop[0].r & VT_VALMASK;
1168 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1169 vswap();
1170 gv(rc1);
1171 vswap();
1172 gv(rc2);
1173 /* test if reload is needed for first register */
1174 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1175 vswap();
1176 gv(rc1);
1177 vswap();
1179 } else {
1180 gv(rc2);
1181 vswap();
1182 gv(rc1);
1183 vswap();
1184 /* test if reload is needed for first register */
1185 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1186 gv(rc2);
1191 #ifndef TCC_TARGET_ARM64
1192 /* wrapper around RC_FRET to return a register by type */
1193 static int rc_fret(int t)
1195 #ifdef TCC_TARGET_X86_64
1196 if (t == VT_LDOUBLE) {
1197 return RC_ST0;
1199 #endif
1200 return RC_FRET;
1202 #endif
1204 /* wrapper around REG_FRET to return a register by type */
1205 static int reg_fret(int t)
1207 #ifdef TCC_TARGET_X86_64
1208 if (t == VT_LDOUBLE) {
1209 return TREG_ST0;
1211 #endif
1212 return REG_FRET;
1215 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1216 /* expand 64bit on stack in two ints */
1217 static void lexpand(void)
1219 int u, v;
1220 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1221 v = vtop->r & (VT_VALMASK | VT_LVAL);
1222 if (v == VT_CONST) {
1223 vdup();
1224 vtop[0].c.i >>= 32;
1225 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1226 vdup();
1227 vtop[0].c.i += 4;
1228 } else {
1229 gv(RC_INT);
1230 vdup();
1231 vtop[0].r = vtop[-1].r2;
1232 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1234 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1236 #endif
1238 #ifdef TCC_TARGET_ARM
1239 /* expand long long on stack */
1240 ST_FUNC void lexpand_nr(void)
1242 int u,v;
1244 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1245 vdup();
1246 vtop->r2 = VT_CONST;
1247 vtop->type.t = VT_INT | u;
1248 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1249 if (v == VT_CONST) {
1250 vtop[-1].c.i = vtop->c.i;
1251 vtop->c.i = vtop->c.i >> 32;
1252 vtop->r = VT_CONST;
1253 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1254 vtop->c.i += 4;
1255 vtop->r = vtop[-1].r;
1256 } else if (v > VT_CONST) {
1257 vtop--;
1258 lexpand();
1259 } else
1260 vtop->r = vtop[-1].r2;
1261 vtop[-1].r2 = VT_CONST;
1262 vtop[-1].type.t = VT_INT | u;
1264 #endif
1266 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1267 /* build a long long from two ints */
1268 static void lbuild(int t)
1270 gv2(RC_INT, RC_INT);
1271 vtop[-1].r2 = vtop[0].r;
1272 vtop[-1].type.t = t;
1273 vpop();
1275 #endif
1277 /* rotate n first stack elements to the bottom
1278 I1 ... In -> I2 ... In I1 [top is right]
1280 ST_FUNC void vrotb(int n)
1282 int i;
1283 SValue tmp;
1285 tmp = vtop[-n + 1];
1286 for(i=-n+1;i!=0;i++)
1287 vtop[i] = vtop[i+1];
1288 vtop[0] = tmp;
1291 /* rotate the n elements before entry e towards the top
1292 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1294 ST_FUNC void vrote(SValue *e, int n)
1296 int i;
1297 SValue tmp;
1299 tmp = *e;
1300 for(i = 0;i < n - 1; i++)
1301 e[-i] = e[-i - 1];
1302 e[-n + 1] = tmp;
1305 /* rotate n first stack elements to the top
1306 I1 ... In -> In I1 ... I(n-1) [top is right]
1308 ST_FUNC void vrott(int n)
1310 vrote(vtop, n);
1313 /* pop stack value */
1314 ST_FUNC void vpop(void)
1316 int v;
1317 v = vtop->r & VT_VALMASK;
1318 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1319 /* for x86, we need to pop the FP stack */
1320 if (v == TREG_ST0 && !nocode_wanted) {
1321 o(0xd8dd); /* fstp %st(0) */
1322 } else
1323 #endif
1324 if (v == VT_JMP || v == VT_JMPI) {
1325 /* need to put correct jump if && or || without test */
1326 gsym(vtop->c.i);
1328 vtop--;
1331 /* convert stack entry to register and duplicate its value in another
1332 register */
1333 static void gv_dup(void)
1335 int rc, t, r, r1;
1336 SValue sv;
1338 t = vtop->type.t;
1339 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1340 if ((t & VT_BTYPE) == VT_LLONG) {
1341 lexpand();
1342 gv_dup();
1343 vswap();
1344 vrotb(3);
1345 gv_dup();
1346 vrotb(4);
1347 /* stack: H L L1 H1 */
1348 lbuild(t);
1349 vrotb(3);
1350 vrotb(3);
1351 vswap();
1352 lbuild(t);
1353 vswap();
1354 } else
1355 #endif
1357 /* duplicate value */
1358 rc = RC_INT;
1359 sv.type.t = VT_INT;
1360 if (is_float(t)) {
1361 rc = RC_FLOAT;
1362 #ifdef TCC_TARGET_X86_64
1363 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1364 rc = RC_ST0;
1366 #endif
1367 sv.type.t = t;
1369 r = gv(rc);
1370 r1 = get_reg(rc);
1371 sv.r = r;
1372 sv.c.i = 0;
1373 load(r1, &sv); /* move r to r1 */
1374 vdup();
1375 /* duplicates value */
1376 if (r != r1)
1377 vtop->r = r1;
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 ST_FUNC int gvtst(int inv, int t)
1386 int v = vtop->r & VT_VALMASK;
1387 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1388 vpushi(0);
1389 gen_op(TOK_NE);
1391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1392 /* constant jmp optimization */
1393 if ((vtop->c.i != 0) != inv)
1394 t = gjmp(t);
1395 vtop--;
1396 return t;
1398 return gtst(inv, t);
1401 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1402 /* generate CPU independent (unsigned) long long operations */
1403 static void gen_opl(int op)
1405 int t, a, b, op1, c, i;
1406 int func;
1407 unsigned short reg_iret = REG_IRET;
1408 unsigned short reg_lret = REG_LRET;
1409 SValue tmp;
1411 switch(op) {
1412 case '/':
1413 case TOK_PDIV:
1414 func = TOK___divdi3;
1415 goto gen_func;
1416 case TOK_UDIV:
1417 func = TOK___udivdi3;
1418 goto gen_func;
1419 case '%':
1420 func = TOK___moddi3;
1421 goto gen_mod_func;
1422 case TOK_UMOD:
1423 func = TOK___umoddi3;
1424 gen_mod_func:
1425 #ifdef TCC_ARM_EABI
1426 reg_iret = TREG_R2;
1427 reg_lret = TREG_R3;
1428 #endif
1429 gen_func:
1430 /* call generic long long function */
1431 vpush_global_sym(&func_old_type, func);
1432 vrott(3);
1433 gfunc_call(2);
1434 vpushi(0);
1435 vtop->r = reg_iret;
1436 vtop->r2 = reg_lret;
1437 break;
1438 case '^':
1439 case '&':
1440 case '|':
1441 case '*':
1442 case '+':
1443 case '-':
1444 //pv("gen_opl A",0,2);
1445 t = vtop->type.t;
1446 vswap();
1447 lexpand();
1448 vrotb(3);
1449 lexpand();
1450 /* stack: L1 H1 L2 H2 */
1451 tmp = vtop[0];
1452 vtop[0] = vtop[-3];
1453 vtop[-3] = tmp;
1454 tmp = vtop[-2];
1455 vtop[-2] = vtop[-3];
1456 vtop[-3] = tmp;
1457 vswap();
1458 /* stack: H1 H2 L1 L2 */
1459 //pv("gen_opl B",0,4);
1460 if (op == '*') {
1461 vpushv(vtop - 1);
1462 vpushv(vtop - 1);
1463 gen_op(TOK_UMULL);
1464 lexpand();
1465 /* stack: H1 H2 L1 L2 ML MH */
1466 for(i=0;i<4;i++)
1467 vrotb(6);
1468 /* stack: ML MH H1 H2 L1 L2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-2];
1471 vtop[-2] = tmp;
1472 /* stack: ML MH H1 L2 H2 L1 */
1473 gen_op('*');
1474 vrotb(3);
1475 vrotb(3);
1476 gen_op('*');
1477 /* stack: ML MH M1 M2 */
1478 gen_op('+');
1479 gen_op('+');
1480 } else if (op == '+' || op == '-') {
1481 /* XXX: add non carry method too (for MIPS or alpha) */
1482 if (op == '+')
1483 op1 = TOK_ADDC1;
1484 else
1485 op1 = TOK_SUBC1;
1486 gen_op(op1);
1487 /* stack: H1 H2 (L1 op L2) */
1488 vrotb(3);
1489 vrotb(3);
1490 gen_op(op1 + 1); /* TOK_xxxC2 */
1491 } else {
1492 gen_op(op);
1493 /* stack: H1 H2 (L1 op L2) */
1494 vrotb(3);
1495 vrotb(3);
1496 /* stack: (L1 op L2) H1 H2 */
1497 gen_op(op);
1498 /* stack: (L1 op L2) (H1 op H2) */
1500 /* stack: L H */
1501 lbuild(t);
1502 break;
1503 case TOK_SAR:
1504 case TOK_SHR:
1505 case TOK_SHL:
1506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1507 t = vtop[-1].type.t;
1508 vswap();
1509 lexpand();
1510 vrotb(3);
1511 /* stack: L H shift */
1512 c = (int)vtop->c.i;
1513 /* constant: simpler */
1514 /* NOTE: all comments are for SHL. the other cases are
1515 done by swaping words */
1516 vpop();
1517 if (op != TOK_SHL)
1518 vswap();
1519 if (c >= 32) {
1520 /* stack: L H */
1521 vpop();
1522 if (c > 32) {
1523 vpushi(c - 32);
1524 gen_op(op);
1526 if (op != TOK_SAR) {
1527 vpushi(0);
1528 } else {
1529 gv_dup();
1530 vpushi(31);
1531 gen_op(TOK_SAR);
1533 vswap();
1534 } else {
1535 vswap();
1536 gv_dup();
1537 /* stack: H L L */
1538 vpushi(c);
1539 gen_op(op);
1540 vswap();
1541 vpushi(32 - c);
1542 if (op == TOK_SHL)
1543 gen_op(TOK_SHR);
1544 else
1545 gen_op(TOK_SHL);
1546 vrotb(3);
1547 /* stack: L L H */
1548 vpushi(c);
1549 if (op == TOK_SHL)
1550 gen_op(TOK_SHL);
1551 else
1552 gen_op(TOK_SHR);
1553 gen_op('|');
1555 if (op != TOK_SHL)
1556 vswap();
1557 lbuild(t);
1558 } else {
1559 /* XXX: should provide a faster fallback on x86 ? */
1560 switch(op) {
1561 case TOK_SAR:
1562 func = TOK___ashrdi3;
1563 goto gen_func;
1564 case TOK_SHR:
1565 func = TOK___lshrdi3;
1566 goto gen_func;
1567 case TOK_SHL:
1568 func = TOK___ashldi3;
1569 goto gen_func;
1572 break;
1573 default:
1574 /* compare operations */
1575 t = vtop->type.t;
1576 vswap();
1577 lexpand();
1578 vrotb(3);
1579 lexpand();
1580 /* stack: L1 H1 L2 H2 */
1581 tmp = vtop[-1];
1582 vtop[-1] = vtop[-2];
1583 vtop[-2] = tmp;
1584 /* stack: L1 L2 H1 H2 */
1585 /* compare high */
1586 op1 = op;
1587 /* when values are equal, we need to compare low words. since
1588 the jump is inverted, we invert the test too. */
1589 if (op1 == TOK_LT)
1590 op1 = TOK_LE;
1591 else if (op1 == TOK_GT)
1592 op1 = TOK_GE;
1593 else if (op1 == TOK_ULT)
1594 op1 = TOK_ULE;
1595 else if (op1 == TOK_UGT)
1596 op1 = TOK_UGE;
1597 a = 0;
1598 b = 0;
1599 gen_op(op1);
1600 if (op1 != TOK_NE) {
1601 a = gvtst(1, 0);
1603 if (op != TOK_EQ) {
1604 /* generate non equal test */
1605 /* XXX: NOT PORTABLE yet */
1606 if (a == 0) {
1607 b = gvtst(0, 0);
1608 } else {
1609 #if defined(TCC_TARGET_I386)
1610 b = psym(0x850f, 0);
1611 #elif defined(TCC_TARGET_ARM)
1612 b = ind;
1613 o(0x1A000000 | encbranch(ind, 0, 1));
1614 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1615 tcc_error("not implemented");
1616 #else
1617 #error not supported
1618 #endif
1621 /* compare low. Always unsigned */
1622 op1 = op;
1623 if (op1 == TOK_LT)
1624 op1 = TOK_ULT;
1625 else if (op1 == TOK_LE)
1626 op1 = TOK_ULE;
1627 else if (op1 == TOK_GT)
1628 op1 = TOK_UGT;
1629 else if (op1 == TOK_GE)
1630 op1 = TOK_UGE;
1631 gen_op(op1);
1632 a = gvtst(1, a);
1633 gsym(b);
1634 vseti(VT_JMPI, a);
1635 break;
1638 #endif
1640 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1642 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1643 return (a ^ b) >> 63 ? -x : x;
1646 static int gen_opic_lt(uint64_t a, uint64_t b)
1648 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1651 /* handle integer constant optimizations and various machine
1652 independent opt */
1653 static void gen_opic(int op)
1655 SValue *v1 = vtop - 1;
1656 SValue *v2 = vtop;
1657 int t1 = v1->type.t & VT_BTYPE;
1658 int t2 = v2->type.t & VT_BTYPE;
1659 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1661 uint64_t l1 = c1 ? v1->c.i : 0;
1662 uint64_t l2 = c2 ? v2->c.i : 0;
1663 int shm = (t1 == VT_LLONG) ? 63 : 31;
1665 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1666 l1 = ((uint32_t)l1 |
1667 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1668 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1669 l2 = ((uint32_t)l2 |
1670 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1672 if (c1 && c2) {
1673 switch(op) {
1674 case '+': l1 += l2; break;
1675 case '-': l1 -= l2; break;
1676 case '&': l1 &= l2; break;
1677 case '^': l1 ^= l2; break;
1678 case '|': l1 |= l2; break;
1679 case '*': l1 *= l2; break;
1681 case TOK_PDIV:
1682 case '/':
1683 case '%':
1684 case TOK_UDIV:
1685 case TOK_UMOD:
1686 /* if division by zero, generate explicit division */
1687 if (l2 == 0) {
1688 if (const_wanted)
1689 tcc_error("division by zero in constant");
1690 goto general_case;
1692 switch(op) {
1693 default: l1 = gen_opic_sdiv(l1, l2); break;
1694 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1695 case TOK_UDIV: l1 = l1 / l2; break;
1696 case TOK_UMOD: l1 = l1 % l2; break;
1698 break;
1699 case TOK_SHL: l1 <<= (l2 & shm); break;
1700 case TOK_SHR: l1 >>= (l2 & shm); break;
1701 case TOK_SAR:
1702 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1703 break;
1704 /* tests */
1705 case TOK_ULT: l1 = l1 < l2; break;
1706 case TOK_UGE: l1 = l1 >= l2; break;
1707 case TOK_EQ: l1 = l1 == l2; break;
1708 case TOK_NE: l1 = l1 != l2; break;
1709 case TOK_ULE: l1 = l1 <= l2; break;
1710 case TOK_UGT: l1 = l1 > l2; break;
1711 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1712 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1713 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1714 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1715 /* logical */
1716 case TOK_LAND: l1 = l1 && l2; break;
1717 case TOK_LOR: l1 = l1 || l2; break;
1718 default:
1719 goto general_case;
1721 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1722 l1 = ((uint32_t)l1 |
1723 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1724 v1->c.i = l1;
1725 vtop--;
1726 } else {
1727 /* if commutative ops, put c2 as constant */
1728 if (c1 && (op == '+' || op == '&' || op == '^' ||
1729 op == '|' || op == '*')) {
1730 vswap();
1731 c2 = c1; //c = c1, c1 = c2, c2 = c;
1732 l2 = l1; //l = l1, l1 = l2, l2 = l;
1734 if (!const_wanted &&
1735 c1 && ((l1 == 0 &&
1736 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1737 (l1 == -1 && op == TOK_SAR))) {
1738 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1739 vtop--;
1740 } else if (!const_wanted &&
1741 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1742 (l2 == -1 && op == '|') ||
1743 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1744 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1745 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1746 if (l2 == 1)
1747 vtop->c.i = 0;
1748 vswap();
1749 vtop--;
1750 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1751 op == TOK_PDIV) &&
1752 l2 == 1) ||
1753 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1754 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1755 l2 == 0) ||
1756 (op == '&' &&
1757 l2 == -1))) {
1758 /* filter out NOP operations like x*1, x-0, x&-1... */
1759 vtop--;
1760 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1761 /* try to use shifts instead of muls or divs */
1762 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1763 int n = -1;
1764 while (l2) {
1765 l2 >>= 1;
1766 n++;
1768 vtop->c.i = n;
1769 if (op == '*')
1770 op = TOK_SHL;
1771 else if (op == TOK_PDIV)
1772 op = TOK_SAR;
1773 else
1774 op = TOK_SHR;
1776 goto general_case;
1777 } else if (c2 && (op == '+' || op == '-') &&
1778 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1779 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1780 /* symbol + constant case */
1781 if (op == '-')
1782 l2 = -l2;
1783 l2 += vtop[-1].c.i;
1784 /* The backends can't always deal with addends to symbols
1785 larger than +-1<<31. Don't construct such. */
1786 if ((int)l2 != l2)
1787 goto general_case;
1788 vtop--;
1789 vtop->c.i = l2;
1790 } else {
1791 general_case:
1792 if (!nocode_wanted) {
1793 /* call low level op generator */
1794 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1795 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1796 gen_opl(op);
1797 else
1798 gen_opi(op);
1799 } else {
1800 vtop--;
1801 /* Ensure vtop isn't marked VT_CONST in case something
1802 up our callchain is interested in const-ness of the
1803 expression. Also make it a non-LVAL if it was,
1804 so that further code can't accidentally generate
1805 a deref (happen only for buggy uses of e.g.
1806 gv() under nocode_wanted). */
1807 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 if (!nocode_wanted) {
1870 gen_opf(op);
1871 } else {
1872 vtop--;
1877 static int pointed_size(CType *type)
1879 int align;
1880 return type_size(pointed_type(type), &align);
1883 static void vla_runtime_pointed_size(CType *type)
1885 int align;
1886 vla_runtime_type_size(pointed_type(type), &align);
1889 static inline int is_null_pointer(SValue *p)
1891 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1892 return 0;
1893 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1894 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1895 ((p->type.t & VT_BTYPE) == VT_PTR &&
1896 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1899 static inline int is_integer_btype(int bt)
1901 return (bt == VT_BYTE || bt == VT_SHORT ||
1902 bt == VT_INT || bt == VT_LLONG);
1905 /* check types for comparison or subtraction of pointers */
1906 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1908 CType *type1, *type2, tmp_type1, tmp_type2;
1909 int bt1, bt2;
1911 /* null pointers are accepted for all comparisons as gcc */
1912 if (is_null_pointer(p1) || is_null_pointer(p2))
1913 return;
1914 type1 = &p1->type;
1915 type2 = &p2->type;
1916 bt1 = type1->t & VT_BTYPE;
1917 bt2 = type2->t & VT_BTYPE;
1918 /* accept comparison between pointer and integer with a warning */
1919 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1920 if (op != TOK_LOR && op != TOK_LAND )
1921 tcc_warning("comparison between pointer and integer");
1922 return;
1925 /* both must be pointers or implicit function pointers */
1926 if (bt1 == VT_PTR) {
1927 type1 = pointed_type(type1);
1928 } else if (bt1 != VT_FUNC)
1929 goto invalid_operands;
1931 if (bt2 == VT_PTR) {
1932 type2 = pointed_type(type2);
1933 } else if (bt2 != VT_FUNC) {
1934 invalid_operands:
1935 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1937 if ((type1->t & VT_BTYPE) == VT_VOID ||
1938 (type2->t & VT_BTYPE) == VT_VOID)
1939 return;
1940 tmp_type1 = *type1;
1941 tmp_type2 = *type2;
1942 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1943 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1944 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1945 /* gcc-like error if '-' is used */
1946 if (op == '-')
1947 goto invalid_operands;
1948 else
1949 tcc_warning("comparison of distinct pointer types lacks a cast");
1953 /* generic gen_op: handles types problems */
1954 ST_FUNC void gen_op(int op)
1956 int u, t1, t2, bt1, bt2, t;
1957 CType type1;
1959 redo:
1960 t1 = vtop[-1].type.t;
1961 t2 = vtop[0].type.t;
1962 bt1 = t1 & VT_BTYPE;
1963 bt2 = t2 & VT_BTYPE;
1965 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1966 tcc_error("operation on a struct");
1967 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1968 if (bt2 == VT_FUNC) {
1969 mk_pointer(&vtop->type);
1970 gaddrof();
1972 if (bt1 == VT_FUNC) {
1973 vswap();
1974 mk_pointer(&vtop->type);
1975 gaddrof();
1976 vswap();
1978 goto redo;
1979 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1980 /* at least one operand is a pointer */
1981 /* relationnal op: must be both pointers */
1982 if (op >= TOK_ULT && op <= TOK_LOR) {
1983 check_comparison_pointer_types(vtop - 1, vtop, op);
1984 /* pointers are handled are unsigned */
1985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1986 t = VT_LLONG | VT_UNSIGNED;
1987 #else
1988 t = VT_INT | VT_UNSIGNED;
1989 #endif
1990 goto std_op;
1992 /* if both pointers, then it must be the '-' op */
1993 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1994 if (op != '-')
1995 tcc_error("cannot use pointers here");
1996 check_comparison_pointer_types(vtop - 1, vtop, op);
1997 /* XXX: check that types are compatible */
1998 if (vtop[-1].type.t & VT_VLA) {
1999 vla_runtime_pointed_size(&vtop[-1].type);
2000 } else {
2001 vpushi(pointed_size(&vtop[-1].type));
2003 vrott(3);
2004 gen_opic(op);
2005 /* set to integer type */
2006 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2007 vtop->type.t = VT_LLONG;
2008 #else
2009 vtop->type.t = VT_INT;
2010 #endif
2011 vswap();
2012 gen_op(TOK_PDIV);
2013 } else {
2014 /* exactly one pointer : must be '+' or '-'. */
2015 if (op != '-' && op != '+')
2016 tcc_error("cannot use pointers here");
2017 /* Put pointer as first operand */
2018 if (bt2 == VT_PTR) {
2019 vswap();
2020 swap(&t1, &t2);
2022 #if PTR_SIZE == 4
2023 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2024 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2025 gen_cast(&int_type);
2026 #endif
2027 type1 = vtop[-1].type;
2028 type1.t &= ~VT_ARRAY;
2029 if (vtop[-1].type.t & VT_VLA)
2030 vla_runtime_pointed_size(&vtop[-1].type);
2031 else {
2032 u = pointed_size(&vtop[-1].type);
2033 if (u < 0)
2034 tcc_error("unknown array element size");
2035 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2036 vpushll(u);
2037 #else
2038 /* XXX: cast to int ? (long long case) */
2039 vpushi(u);
2040 #endif
2042 gen_op('*');
2043 #if 0
2044 /* #ifdef CONFIG_TCC_BCHECK
2045 The main reason to removing this code:
2046 #include <stdio.h>
2047 int main ()
2049 int v[10];
2050 int i = 10;
2051 int j = 9;
2052 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2053 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2055 When this code is on. then the output looks like
2056 v+i-j = 0xfffffffe
2057 v+(i-j) = 0xbff84000
2059 /* if evaluating constant expression, no code should be
2060 generated, so no bound check */
2061 if (tcc_state->do_bounds_check && !const_wanted) {
2062 /* if bounded pointers, we generate a special code to
2063 test bounds */
2064 if (op == '-') {
2065 vpushi(0);
2066 vswap();
2067 gen_op('-');
2069 gen_bounded_ptr_add();
2070 } else
2071 #endif
2073 gen_opic(op);
2075 /* put again type if gen_opic() swaped operands */
2076 vtop->type = type1;
2078 } else if (is_float(bt1) || is_float(bt2)) {
2079 /* compute bigger type and do implicit casts */
2080 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2081 t = VT_LDOUBLE;
2082 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2083 t = VT_DOUBLE;
2084 } else {
2085 t = VT_FLOAT;
2087 /* floats can only be used for a few operations */
2088 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2089 (op < TOK_ULT || op > TOK_GT))
2090 tcc_error("invalid operands for binary operation");
2091 goto std_op;
2092 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2093 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2095 t |= VT_UNSIGNED;
2096 goto std_op;
2097 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2098 /* cast to biggest op */
2099 t = VT_LLONG;
2100 /* convert to unsigned if it does not fit in a long long */
2101 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2102 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2103 t |= VT_UNSIGNED;
2104 goto std_op;
2105 } else {
2106 /* integer operations */
2107 t = VT_INT;
2108 /* convert to unsigned if it does not fit in an integer */
2109 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2110 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2111 t |= VT_UNSIGNED;
2112 std_op:
2113 /* XXX: currently, some unsigned operations are explicit, so
2114 we modify them here */
2115 if (t & VT_UNSIGNED) {
2116 if (op == TOK_SAR)
2117 op = TOK_SHR;
2118 else if (op == '/')
2119 op = TOK_UDIV;
2120 else if (op == '%')
2121 op = TOK_UMOD;
2122 else if (op == TOK_LT)
2123 op = TOK_ULT;
2124 else if (op == TOK_GT)
2125 op = TOK_UGT;
2126 else if (op == TOK_LE)
2127 op = TOK_ULE;
2128 else if (op == TOK_GE)
2129 op = TOK_UGE;
2131 vswap();
2132 type1.t = t;
2133 gen_cast(&type1);
2134 vswap();
2135 /* special case for shifts and long long: we keep the shift as
2136 an integer */
2137 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2138 type1.t = VT_INT;
2139 gen_cast(&type1);
2140 if (is_float(t))
2141 gen_opif(op);
2142 else
2143 gen_opic(op);
2144 if (op >= TOK_ULT && op <= TOK_GT) {
2145 /* relationnal op: the result is an int */
2146 vtop->type.t = VT_INT;
2147 } else {
2148 vtop->type.t = t;
2151 // Make sure that we have converted to an rvalue:
2152 if (vtop->r & VT_LVAL && !nocode_wanted)
2153 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2156 #ifndef TCC_TARGET_ARM
2157 /* generic itof for unsigned long long case */
2158 static void gen_cvt_itof1(int t)
2160 #ifdef TCC_TARGET_ARM64
2161 gen_cvt_itof(t);
2162 #else
2163 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2164 (VT_LLONG | VT_UNSIGNED)) {
2166 if (t == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___floatundisf);
2168 #if LDOUBLE_SIZE != 8
2169 else if (t == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___floatundixf);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___floatundidf);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = reg_fret(t);
2178 } else {
2179 gen_cvt_itof(t);
2181 #endif
2183 #endif
2185 /* generic ftoi for unsigned long long case */
2186 static void gen_cvt_ftoi1(int t)
2188 #ifdef TCC_TARGET_ARM64
2189 gen_cvt_ftoi(t);
2190 #else
2191 int st;
2193 if (t == (VT_LLONG | VT_UNSIGNED)) {
2194 /* not handled natively */
2195 st = vtop->type.t & VT_BTYPE;
2196 if (st == VT_FLOAT)
2197 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2198 #if LDOUBLE_SIZE != 8
2199 else if (st == VT_LDOUBLE)
2200 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2201 #endif
2202 else
2203 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2204 vrott(2);
2205 gfunc_call(1);
2206 vpushi(0);
2207 vtop->r = REG_IRET;
2208 vtop->r2 = REG_LRET;
2209 } else {
2210 gen_cvt_ftoi(t);
2212 #endif
2215 /* force char or short cast */
2216 static void force_charshort_cast(int t)
2218 int bits, dbt;
2219 dbt = t & VT_BTYPE;
2220 /* XXX: add optimization if lvalue : just change type and offset */
2221 if (dbt == VT_BYTE)
2222 bits = 8;
2223 else
2224 bits = 16;
2225 if (t & VT_UNSIGNED) {
2226 vpushi((1 << bits) - 1);
2227 gen_op('&');
2228 } else {
2229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2230 bits = 64 - bits;
2231 else
2232 bits = 32 - bits;
2233 vpushi(bits);
2234 gen_op(TOK_SHL);
2235 /* result must be signed or the SAR is converted to an SHL
2236 This was not the case when "t" was a signed short
2237 and the last value on the stack was an unsigned int */
2238 vtop->type.t &= ~VT_UNSIGNED;
2239 vpushi(bits);
2240 gen_op(TOK_SAR);
2244 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2245 static void gen_cast(CType *type)
2247 int sbt, dbt, sf, df, c, p;
2249 /* special delayed cast for char/short */
2250 /* XXX: in some cases (multiple cascaded casts), it may still
2251 be incorrect */
2252 if (vtop->r & VT_MUSTCAST) {
2253 vtop->r &= ~VT_MUSTCAST;
2254 force_charshort_cast(vtop->type.t);
2257 /* bitfields first get cast to ints */
2258 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2259 gv(RC_INT);
2262 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2263 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2265 if (sbt != dbt) {
2266 sf = is_float(sbt);
2267 df = is_float(dbt);
2268 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2269 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2270 if (c) {
2271 /* constant case: we can do it now */
2272 /* XXX: in ISOC, cannot do it if error in convert */
2273 if (sbt == VT_FLOAT)
2274 vtop->c.ld = vtop->c.f;
2275 else if (sbt == VT_DOUBLE)
2276 vtop->c.ld = vtop->c.d;
2278 if (df) {
2279 if ((sbt & VT_BTYPE) == VT_LLONG) {
2280 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2281 vtop->c.ld = vtop->c.i;
2282 else
2283 vtop->c.ld = -(long double)-vtop->c.i;
2284 } else if(!sf) {
2285 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2286 vtop->c.ld = (uint32_t)vtop->c.i;
2287 else
2288 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2291 if (dbt == VT_FLOAT)
2292 vtop->c.f = (float)vtop->c.ld;
2293 else if (dbt == VT_DOUBLE)
2294 vtop->c.d = (double)vtop->c.ld;
2295 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2296 vtop->c.i = vtop->c.ld;
2297 } else if (sf && dbt == VT_BOOL) {
2298 vtop->c.i = (vtop->c.ld != 0);
2299 } else {
2300 if(sf)
2301 vtop->c.i = vtop->c.ld;
2302 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2304 else if (sbt & VT_UNSIGNED)
2305 vtop->c.i = (uint32_t)vtop->c.i;
2306 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2307 else if (sbt == VT_PTR)
2309 #endif
2310 else if (sbt != VT_LLONG)
2311 vtop->c.i = ((uint32_t)vtop->c.i |
2312 -(vtop->c.i & 0x80000000));
2314 if (dbt == (VT_LLONG|VT_UNSIGNED))
2316 else if (dbt == VT_BOOL)
2317 vtop->c.i = (vtop->c.i != 0);
2318 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2319 else if (dbt == VT_PTR)
2321 #endif
2322 else if (dbt != VT_LLONG) {
2323 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2324 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2325 0xffffffff);
2326 vtop->c.i &= m;
2327 if (!(dbt & VT_UNSIGNED))
2328 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2331 } else if (p && dbt == VT_BOOL) {
2332 vtop->r = VT_CONST;
2333 vtop->c.i = 1;
2334 } else if (!nocode_wanted) {
2335 /* non constant case: generate code */
2336 if (sf && df) {
2337 /* convert from fp to fp */
2338 gen_cvt_ftof(dbt);
2339 } else if (df) {
2340 /* convert int to fp */
2341 gen_cvt_itof1(dbt);
2342 } else if (sf) {
2343 /* convert fp to int */
2344 if (dbt == VT_BOOL) {
2345 vpushi(0);
2346 gen_op(TOK_NE);
2347 } else {
2348 /* we handle char/short/etc... with generic code */
2349 if (dbt != (VT_INT | VT_UNSIGNED) &&
2350 dbt != (VT_LLONG | VT_UNSIGNED) &&
2351 dbt != VT_LLONG)
2352 dbt = VT_INT;
2353 gen_cvt_ftoi1(dbt);
2354 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2355 /* additional cast for char/short... */
2356 vtop->type.t = dbt;
2357 gen_cast(type);
2360 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2361 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2362 if ((sbt & VT_BTYPE) != VT_LLONG) {
2363 /* scalar to long long */
2364 /* machine independent conversion */
2365 gv(RC_INT);
2366 /* generate high word */
2367 if (sbt == (VT_INT | VT_UNSIGNED)) {
2368 vpushi(0);
2369 gv(RC_INT);
2370 } else {
2371 if (sbt == VT_PTR) {
2372 /* cast from pointer to int before we apply
2373 shift operation, which pointers don't support*/
2374 gen_cast(&int_type);
2376 gv_dup();
2377 vpushi(31);
2378 gen_op(TOK_SAR);
2380 /* patch second register */
2381 vtop[-1].r2 = vtop->r;
2382 vpop();
2384 #else
2385 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2386 (dbt & VT_BTYPE) == VT_PTR ||
2387 (dbt & VT_BTYPE) == VT_FUNC) {
2388 if ((sbt & VT_BTYPE) != VT_LLONG &&
2389 (sbt & VT_BTYPE) != VT_PTR &&
2390 (sbt & VT_BTYPE) != VT_FUNC) {
2391 /* need to convert from 32bit to 64bit */
2392 gv(RC_INT);
2393 if (sbt != (VT_INT | VT_UNSIGNED)) {
2394 #if defined(TCC_TARGET_ARM64)
2395 gen_cvt_sxtw();
2396 #elif defined(TCC_TARGET_X86_64)
2397 int r = gv(RC_INT);
2398 /* x86_64 specific: movslq */
2399 o(0x6348);
2400 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2401 #else
2402 #error
2403 #endif
2406 #endif
2407 } else if (dbt == VT_BOOL) {
2408 /* scalar to bool */
2409 vpushi(0);
2410 gen_op(TOK_NE);
2411 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2412 (dbt & VT_BTYPE) == VT_SHORT) {
2413 if (sbt == VT_PTR) {
2414 vtop->type.t = VT_INT;
2415 tcc_warning("nonportable conversion from pointer to char/short");
2417 force_charshort_cast(dbt);
2418 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2419 } else if ((dbt & VT_BTYPE) == VT_INT) {
2420 /* scalar to int */
2421 if ((sbt & VT_BTYPE) == VT_LLONG) {
2422 /* from long long: just take low order word */
2423 lexpand();
2424 vpop();
2426 /* if lvalue and single word type, nothing to do because
2427 the lvalue already contains the real type size (see
2428 VT_LVAL_xxx constants) */
2429 #endif
2432 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2433 /* if we are casting between pointer types,
2434 we must update the VT_LVAL_xxx size */
2435 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2436 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2438 vtop->type = *type;
2441 /* return type size as known at compile time. Put alignment at 'a' */
2442 ST_FUNC int type_size(CType *type, int *a)
2444 Sym *s;
2445 int bt;
2447 bt = type->t & VT_BTYPE;
2448 if (bt == VT_STRUCT) {
2449 /* struct/union */
2450 s = type->ref;
2451 *a = s->r;
2452 return s->c;
2453 } else if (bt == VT_PTR) {
2454 if (type->t & VT_ARRAY) {
2455 int ts;
2457 s = type->ref;
2458 ts = type_size(&s->type, a);
2460 if (ts < 0 && s->c < 0)
2461 ts = -ts;
2463 return ts * s->c;
2464 } else {
2465 *a = PTR_SIZE;
2466 return PTR_SIZE;
2468 } else if (bt == VT_LDOUBLE) {
2469 *a = LDOUBLE_ALIGN;
2470 return LDOUBLE_SIZE;
2471 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2472 #ifdef TCC_TARGET_I386
2473 #ifdef TCC_TARGET_PE
2474 *a = 8;
2475 #else
2476 *a = 4;
2477 #endif
2478 #elif defined(TCC_TARGET_ARM)
2479 #ifdef TCC_ARM_EABI
2480 *a = 8;
2481 #else
2482 *a = 4;
2483 #endif
2484 #else
2485 *a = 8;
2486 #endif
2487 return 8;
2488 } else if (bt == VT_INT || bt == VT_FLOAT) {
2489 *a = 4;
2490 return 4;
2491 } else if (bt == VT_SHORT) {
2492 *a = 2;
2493 return 2;
2494 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2495 *a = 8;
2496 return 16;
2497 } else if (bt == VT_ENUM) {
2498 *a = 4;
2499 /* Enums might be incomplete, so don't just return '4' here. */
2500 return type->ref->c;
2501 } else {
2502 /* char, void, function, _Bool */
2503 *a = 1;
2504 return 1;
2508 /* push type size as known at runtime time on top of value stack. Put
2509 alignment at 'a' */
2510 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2512 if (type->t & VT_VLA) {
2513 type_size(&type->ref->type, a);
2514 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2515 } else {
2516 vpushi(type_size(type, a));
2520 static void vla_sp_restore(void) {
2521 if (vlas_in_scope) {
2522 gen_vla_sp_restore(vla_sp_loc);
2526 static void vla_sp_restore_root(void) {
2527 if (vlas_in_scope) {
2528 gen_vla_sp_restore(vla_sp_root_loc);
2532 /* return the pointed type of t */
2533 static inline CType *pointed_type(CType *type)
2535 return &type->ref->type;
2538 /* modify type so that its it is a pointer to type. */
2539 ST_FUNC void mk_pointer(CType *type)
2541 Sym *s;
2542 s = sym_push(SYM_FIELD, type, 0, -1);
2543 type->t = VT_PTR | (type->t & ~VT_TYPE);
2544 type->ref = s;
2547 /* compare function types. OLD functions match any new functions */
2548 static int is_compatible_func(CType *type1, CType *type2)
2550 Sym *s1, *s2;
2552 s1 = type1->ref;
2553 s2 = type2->ref;
2554 if (!is_compatible_types(&s1->type, &s2->type))
2555 return 0;
2556 /* check func_call */
2557 if (s1->a.func_call != s2->a.func_call)
2558 return 0;
2559 /* XXX: not complete */
2560 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2561 return 1;
2562 if (s1->c != s2->c)
2563 return 0;
2564 while (s1 != NULL) {
2565 if (s2 == NULL)
2566 return 0;
2567 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2568 return 0;
2569 s1 = s1->next;
2570 s2 = s2->next;
2572 if (s2)
2573 return 0;
2574 return 1;
2577 /* return true if type1 and type2 are the same. If unqualified is
2578 true, qualifiers on the types are ignored.
2580 - enums are not checked as gcc __builtin_types_compatible_p ()
2582 static int compare_types(CType *type1, CType *type2, int unqualified)
2584 int bt1, t1, t2;
2586 t1 = type1->t & VT_TYPE;
2587 t2 = type2->t & VT_TYPE;
2588 if (unqualified) {
2589 /* strip qualifiers before comparing */
2590 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2591 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2593 /* Default Vs explicit signedness only matters for char */
2594 if ((t1 & VT_BTYPE) != VT_BYTE) {
2595 t1 &= ~VT_DEFSIGN;
2596 t2 &= ~VT_DEFSIGN;
2598 /* An enum is compatible with (unsigned) int. Ideally we would
2599 store the enums signedness in type->ref.a.<some_bit> and
2600 only accept unsigned enums with unsigned int and vice versa.
2601 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2602 from pointer target types, so we can't add it here either. */
2603 if ((t1 & VT_BTYPE) == VT_ENUM) {
2604 t1 = VT_INT;
2605 if (type1->ref->a.unsigned_enum)
2606 t1 |= VT_UNSIGNED;
2608 if ((t2 & VT_BTYPE) == VT_ENUM) {
2609 t2 = VT_INT;
2610 if (type2->ref->a.unsigned_enum)
2611 t2 |= VT_UNSIGNED;
2613 /* XXX: bitfields ? */
2614 if (t1 != t2)
2615 return 0;
2616 /* test more complicated cases */
2617 bt1 = t1 & VT_BTYPE;
2618 if (bt1 == VT_PTR) {
2619 type1 = pointed_type(type1);
2620 type2 = pointed_type(type2);
2621 return is_compatible_types(type1, type2);
2622 } else if (bt1 == VT_STRUCT) {
2623 return (type1->ref == type2->ref);
2624 } else if (bt1 == VT_FUNC) {
2625 return is_compatible_func(type1, type2);
2626 } else {
2627 return 1;
2631 /* return true if type1 and type2 are exactly the same (including
2632 qualifiers).
2634 static int is_compatible_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,0);
2639 /* return true if type1 and type2 are the same (ignoring qualifiers).
2641 static int is_compatible_parameter_types(CType *type1, CType *type2)
2643 return compare_types(type1,type2,1);
2646 /* print a type. If 'varstr' is not NULL, then the variable is also
2647 printed in the type */
2648 /* XXX: union */
2649 /* XXX: add array and function pointers */
2650 static void type_to_str(char *buf, int buf_size,
2651 CType *type, const char *varstr)
2653 int bt, v, t;
2654 Sym *s, *sa;
2655 char buf1[256];
2656 const char *tstr;
2658 t = type->t & VT_TYPE;
2659 bt = t & VT_BTYPE;
2660 buf[0] = '\0';
2661 if (t & VT_CONSTANT)
2662 pstrcat(buf, buf_size, "const ");
2663 if (t & VT_VOLATILE)
2664 pstrcat(buf, buf_size, "volatile ");
2665 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2666 pstrcat(buf, buf_size, "unsigned ");
2667 else if (t & VT_DEFSIGN)
2668 pstrcat(buf, buf_size, "signed ");
2669 switch(bt) {
2670 case VT_VOID:
2671 tstr = "void";
2672 goto add_tstr;
2673 case VT_BOOL:
2674 tstr = "_Bool";
2675 goto add_tstr;
2676 case VT_BYTE:
2677 tstr = "char";
2678 goto add_tstr;
2679 case VT_SHORT:
2680 tstr = "short";
2681 goto add_tstr;
2682 case VT_INT:
2683 tstr = "int";
2684 goto add_tstr;
2685 case VT_LONG:
2686 tstr = "long";
2687 goto add_tstr;
2688 case VT_LLONG:
2689 tstr = "long long";
2690 goto add_tstr;
2691 case VT_FLOAT:
2692 tstr = "float";
2693 goto add_tstr;
2694 case VT_DOUBLE:
2695 tstr = "double";
2696 goto add_tstr;
2697 case VT_LDOUBLE:
2698 tstr = "long double";
2699 add_tstr:
2700 pstrcat(buf, buf_size, tstr);
2701 break;
2702 case VT_ENUM:
2703 case VT_STRUCT:
2704 if (bt == VT_STRUCT)
2705 tstr = "struct ";
2706 else
2707 tstr = "enum ";
2708 pstrcat(buf, buf_size, tstr);
2709 v = type->ref->v & ~SYM_STRUCT;
2710 if (v >= SYM_FIRST_ANOM)
2711 pstrcat(buf, buf_size, "<anonymous>");
2712 else
2713 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2714 break;
2715 case VT_FUNC:
2716 s = type->ref;
2717 type_to_str(buf, buf_size, &s->type, varstr);
2718 pstrcat(buf, buf_size, "(");
2719 sa = s->next;
2720 while (sa != NULL) {
2721 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2722 pstrcat(buf, buf_size, buf1);
2723 sa = sa->next;
2724 if (sa)
2725 pstrcat(buf, buf_size, ", ");
2727 pstrcat(buf, buf_size, ")");
2728 goto no_var;
2729 case VT_PTR:
2730 s = type->ref;
2731 if (t & VT_ARRAY) {
2732 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2733 type_to_str(buf, buf_size, &s->type, buf1);
2734 goto no_var;
2736 pstrcpy(buf1, sizeof(buf1), "*");
2737 if (t & VT_CONSTANT)
2738 pstrcat(buf1, buf_size, "const ");
2739 if (t & VT_VOLATILE)
2740 pstrcat(buf1, buf_size, "volatile ");
2741 if (varstr)
2742 pstrcat(buf1, sizeof(buf1), varstr);
2743 type_to_str(buf, buf_size, &s->type, buf1);
2744 goto no_var;
2746 if (varstr) {
2747 pstrcat(buf, buf_size, " ");
2748 pstrcat(buf, buf_size, varstr);
2750 no_var: ;
2753 /* verify type compatibility to store vtop in 'dt' type, and generate
2754 casts if needed. */
2755 static void gen_assign_cast(CType *dt)
2757 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2758 char buf1[256], buf2[256];
2759 int dbt, sbt;
2761 st = &vtop->type; /* source type */
2762 dbt = dt->t & VT_BTYPE;
2763 sbt = st->t & VT_BTYPE;
2764 if (sbt == VT_VOID || dbt == VT_VOID) {
2765 if (sbt == VT_VOID && dbt == VT_VOID)
2766 ; /*
2767 It is Ok if both are void
2768 A test program:
2769 void func1() {}
2770 void func2() {
2771 return func1();
2773 gcc accepts this program
2775 else
2776 tcc_error("cannot cast from/to void");
2778 if (dt->t & VT_CONSTANT)
2779 tcc_warning("assignment of read-only location");
2780 switch(dbt) {
2781 case VT_PTR:
2782 /* special cases for pointers */
2783 /* '0' can also be a pointer */
2784 if (is_null_pointer(vtop))
2785 goto type_ok;
2786 /* accept implicit pointer to integer cast with warning */
2787 if (is_integer_btype(sbt)) {
2788 tcc_warning("assignment makes pointer from integer without a cast");
2789 goto type_ok;
2791 type1 = pointed_type(dt);
2792 /* a function is implicitely a function pointer */
2793 if (sbt == VT_FUNC) {
2794 if ((type1->t & VT_BTYPE) != VT_VOID &&
2795 !is_compatible_types(pointed_type(dt), st))
2796 tcc_warning("assignment from incompatible pointer type");
2797 goto type_ok;
2799 if (sbt != VT_PTR)
2800 goto error;
2801 type2 = pointed_type(st);
2802 if ((type1->t & VT_BTYPE) == VT_VOID ||
2803 (type2->t & VT_BTYPE) == VT_VOID) {
2804 /* void * can match anything */
2805 } else {
2806 /* exact type match, except for qualifiers */
2807 tmp_type1 = *type1;
2808 tmp_type2 = *type2;
2809 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2812 /* Like GCC don't warn by default for merely changes
2813 in pointer target signedness. Do warn for different
2814 base types, though, in particular for unsigned enums
2815 and signed int targets. */
2816 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2817 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2818 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2820 else
2821 tcc_warning("assignment from incompatible pointer type");
2824 /* check const and volatile */
2825 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2826 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2827 tcc_warning("assignment discards qualifiers from pointer target type");
2828 break;
2829 case VT_BYTE:
2830 case VT_SHORT:
2831 case VT_INT:
2832 case VT_LLONG:
2833 if (sbt == VT_PTR || sbt == VT_FUNC) {
2834 tcc_warning("assignment makes integer from pointer without a cast");
2835 } else if (sbt == VT_STRUCT) {
2836 goto case_VT_STRUCT;
2838 /* XXX: more tests */
2839 break;
2840 case VT_STRUCT:
2841 case_VT_STRUCT:
2842 tmp_type1 = *dt;
2843 tmp_type2 = *st;
2844 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2847 error:
2848 type_to_str(buf1, sizeof(buf1), st, NULL);
2849 type_to_str(buf2, sizeof(buf2), dt, NULL);
2850 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2852 break;
2854 type_ok:
2855 gen_cast(dt);
2858 /* store vtop in lvalue pushed on stack */
2859 ST_FUNC void vstore(void)
2861 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2863 ft = vtop[-1].type.t;
2864 sbt = vtop->type.t & VT_BTYPE;
2865 dbt = ft & VT_BTYPE;
2866 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2867 (sbt == VT_INT && dbt == VT_SHORT))
2868 && !(vtop->type.t & VT_BITFIELD)) {
2869 /* optimize char/short casts */
2870 delayed_cast = VT_MUSTCAST;
2871 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2872 ((1 << VT_STRUCT_SHIFT) - 1));
2873 /* XXX: factorize */
2874 if (ft & VT_CONSTANT)
2875 tcc_warning("assignment of read-only location");
2876 } else {
2877 delayed_cast = 0;
2878 if (!(ft & VT_BITFIELD))
2879 gen_assign_cast(&vtop[-1].type);
2882 if (sbt == VT_STRUCT) {
2883 /* if structure, only generate pointer */
2884 /* structure assignment : generate memcpy */
2885 /* XXX: optimize if small size */
2886 if (!nocode_wanted) {
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2913 } else {
2914 vswap();
2915 vpop();
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 if (!nocode_wanted) {
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop[-1].r & VT_MUSTBOUND) {
2968 vswap();
2969 gbound();
2970 vswap();
2972 #endif
2973 rc = RC_INT;
2974 if (is_float(ft)) {
2975 rc = RC_FLOAT;
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2978 rc = RC_ST0;
2979 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2980 rc = RC_FRET;
2982 #endif
2984 r = gv(rc); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2987 SValue sv;
2988 t = get_reg(RC_INT);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 sv.type.t = VT_PTR;
2991 #else
2992 sv.type.t = VT_INT;
2993 #endif
2994 sv.r = VT_LOCAL | VT_LVAL;
2995 sv.c.i = vtop[-1].c.i;
2996 load(t, &sv);
2997 vtop[-1].r = t | VT_LVAL;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3002 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3003 #else
3004 if ((ft & VT_BTYPE) == VT_LLONG) {
3005 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3006 #endif
3007 vtop[-1].type.t = load_type;
3008 store(r, vtop - 1);
3009 vswap();
3010 /* convert to int to increment easily */
3011 vtop->type.t = addr_type;
3012 gaddrof();
3013 vpushi(load_size);
3014 gen_op('+');
3015 vtop->r |= VT_LVAL;
3016 vswap();
3017 vtop[-1].type.t = load_type;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop->r2, vtop - 1);
3020 } else {
3021 store(r, vtop - 1);
3024 vswap();
3025 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop->r |= delayed_cast;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC void inc(int post, int c)
3033 test_lvalue();
3034 vdup(); /* save lvalue */
3035 if (post) {
3036 if (!nocode_wanted)
3037 gv_dup(); /* duplicate value */
3038 else
3039 vdup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* Parse GNUC __attribute__ extension. Currently, the following
3066 extensions are recognized:
3067 - aligned(n) : set data/function alignment.
3068 - packed : force data alignment to 1
3069 - section(x) : generate data/code in this section.
3070 - unused : currently ignored, but may be used someday.
3071 - regparm(n) : pass function parameters in registers (i386 only)
3073 static void parse_attribute(AttributeDef *ad)
3075 int t, n;
3076 CString astr;
3078 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3079 next();
3080 skip('(');
3081 skip('(');
3082 while (tok != ')') {
3083 if (tok < TOK_IDENT)
3084 expect("attribute name");
3085 t = tok;
3086 next();
3087 switch(t) {
3088 case TOK_SECTION1:
3089 case TOK_SECTION2:
3090 skip('(');
3091 parse_mult_str(&astr, "section name");
3092 ad->section = find_section(tcc_state, (char *)astr.data);
3093 skip(')');
3094 cstr_free(&astr);
3095 break;
3096 case TOK_ALIAS1:
3097 case TOK_ALIAS2:
3098 skip('(');
3099 parse_mult_str(&astr, "alias(\"target\")");
3100 ad->alias_target = /* save string as token, for later */
3101 tok_alloc((char*)astr.data, astr.size-1)->tok;
3102 skip(')');
3103 cstr_free(&astr);
3104 break;
3105 case TOK_VISIBILITY1:
3106 case TOK_VISIBILITY2:
3107 skip('(');
3108 parse_mult_str(&astr,
3109 "visibility(\"default|hidden|internal|protected\")");
3110 if (!strcmp (astr.data, "default"))
3111 ad->a.visibility = STV_DEFAULT;
3112 else if (!strcmp (astr.data, "hidden"))
3113 ad->a.visibility = STV_HIDDEN;
3114 else if (!strcmp (astr.data, "internal"))
3115 ad->a.visibility = STV_INTERNAL;
3116 else if (!strcmp (astr.data, "protected"))
3117 ad->a.visibility = STV_PROTECTED;
3118 else
3119 expect("visibility(\"default|hidden|internal|protected\")");
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_ALIGNED1:
3124 case TOK_ALIGNED2:
3125 if (tok == '(') {
3126 next();
3127 n = expr_const();
3128 if (n <= 0 || (n & (n - 1)) != 0)
3129 tcc_error("alignment must be a positive power of two");
3130 skip(')');
3131 } else {
3132 n = MAX_ALIGN;
3134 ad->a.aligned = n;
3135 break;
3136 case TOK_PACKED1:
3137 case TOK_PACKED2:
3138 ad->a.packed = 1;
3139 break;
3140 case TOK_WEAK1:
3141 case TOK_WEAK2:
3142 ad->a.weak = 1;
3143 break;
3144 case TOK_UNUSED1:
3145 case TOK_UNUSED2:
3146 /* currently, no need to handle it because tcc does not
3147 track unused objects */
3148 break;
3149 case TOK_NORETURN1:
3150 case TOK_NORETURN2:
3151 /* currently, no need to handle it because tcc does not
3152 track unused objects */
3153 break;
3154 case TOK_CDECL1:
3155 case TOK_CDECL2:
3156 case TOK_CDECL3:
3157 ad->a.func_call = FUNC_CDECL;
3158 break;
3159 case TOK_STDCALL1:
3160 case TOK_STDCALL2:
3161 case TOK_STDCALL3:
3162 ad->a.func_call = FUNC_STDCALL;
3163 break;
3164 #ifdef TCC_TARGET_I386
3165 case TOK_REGPARM1:
3166 case TOK_REGPARM2:
3167 skip('(');
3168 n = expr_const();
3169 if (n > 3)
3170 n = 3;
3171 else if (n < 0)
3172 n = 0;
3173 if (n > 0)
3174 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3175 skip(')');
3176 break;
3177 case TOK_FASTCALL1:
3178 case TOK_FASTCALL2:
3179 case TOK_FASTCALL3:
3180 ad->a.func_call = FUNC_FASTCALLW;
3181 break;
3182 #endif
3183 case TOK_MODE:
3184 skip('(');
3185 switch(tok) {
3186 case TOK_MODE_DI:
3187 ad->a.mode = VT_LLONG + 1;
3188 break;
3189 case TOK_MODE_QI:
3190 ad->a.mode = VT_BYTE + 1;
3191 break;
3192 case TOK_MODE_HI:
3193 ad->a.mode = VT_SHORT + 1;
3194 break;
3195 case TOK_MODE_SI:
3196 case TOK_MODE_word:
3197 ad->a.mode = VT_INT + 1;
3198 break;
3199 default:
3200 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3201 break;
3203 next();
3204 skip(')');
3205 break;
3206 case TOK_DLLEXPORT:
3207 ad->a.func_export = 1;
3208 break;
3209 case TOK_DLLIMPORT:
3210 ad->a.func_import = 1;
3211 break;
3212 default:
3213 if (tcc_state->warn_unsupported)
3214 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3215 /* skip parameters */
3216 if (tok == '(') {
3217 int parenthesis = 0;
3218 do {
3219 if (tok == '(')
3220 parenthesis++;
3221 else if (tok == ')')
3222 parenthesis--;
3223 next();
3224 } while (parenthesis && tok != -1);
3226 break;
3228 if (tok != ',')
3229 break;
3230 next();
3232 skip(')');
3233 skip(')');
3237 static Sym * find_field (CType *type, int v)
3239 Sym *s = type->ref;
3240 v |= SYM_FIELD;
3241 while ((s = s->next) != NULL) {
3242 if ((s->v & SYM_FIELD) &&
3243 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3244 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3245 Sym *ret = find_field (&s->type, v);
3246 if (ret)
3247 return ret;
3249 if (s->v == v)
3250 break;
3252 return s;
3255 static void struct_add_offset (Sym *s, int offset)
3257 while ((s = s->next) != NULL) {
3258 if ((s->v & SYM_FIELD) &&
3259 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3260 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3261 struct_add_offset(s->type.ref, offset);
3262 } else
3263 s->c += offset;
3267 static void struct_layout(CType *type, AttributeDef *ad)
3269 int align, maxalign, offset, c, bit_pos, bt, prevbt;
3270 int pcc = !tcc_state->ms_bitfields;
3271 Sym *f;
3272 if (ad->a.aligned)
3273 maxalign = ad->a.aligned;
3274 else
3275 maxalign = 1;
3276 offset = 0;
3277 c = 0;
3278 bit_pos = 0;
3279 prevbt = VT_STRUCT; /* make it never match */
3280 for (f = type->ref->next; f; f = f->next) {
3281 int extra_bytes = 0;
3282 int typealign, bit_size;
3283 int size = type_size(&f->type, &typealign);
3284 if (f->type.t & VT_BITFIELD) {
3285 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3286 /* without ms-bitfields, allocate the
3287 * minimum number of bytes necessary,
3288 * adding single bytes as needed */
3289 if (0 && !tcc_state->ms_bitfields) {
3290 if (bit_pos == 0)
3291 /* minimum bytes for new bitfield */
3292 size = (bit_size + 7) / 8;
3293 else {
3294 /* enough spare bits already allocated? */
3295 int add_size = (bit_pos - 1) % 8 + 1 + bit_size;
3296 if (add_size > 8) /* doesn't fit */
3297 extra_bytes = (add_size - 1) / 8;
3300 } else
3301 bit_size = -1;
3302 if (bit_size == 0 && pcc) {
3303 /* Zero-width bit-fields in PCC mode aren't affected
3304 by any packing (attribute or pragma). */
3305 align = typealign;
3306 } else if (f->r > 1) {
3307 align = f->r;
3308 } else if (ad->a.packed || f->r == 1) {
3309 align = 1;
3310 typealign = 1;
3311 } else {
3312 align = typealign;
3314 if (extra_bytes) c += extra_bytes;
3315 else if (bit_size < 0) {
3316 prevbt = VT_STRUCT;
3317 if (type->ref->type.t == TOK_STRUCT) {
3318 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3319 c = (c + addbytes + align - 1) & -align;
3320 offset = c;
3321 if (size > 0)
3322 c += size;
3323 } else {
3324 union_tail:
3325 offset = 0;
3326 if (size > c)
3327 c = size;
3329 if (align > maxalign)
3330 maxalign = align;
3331 bit_pos = 0;
3332 } else if (type->ref->type.t != TOK_STRUCT) {
3333 if (pcc)
3334 size = (bit_size + 7) >> 3;
3335 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3336 | (0 << VT_STRUCT_SHIFT);
3337 goto union_tail;
3338 } else {
3339 /* A bit-field. Layout is more complicated. There are two
3340 options TCC implements: PCC compatible and MS compatible
3341 (PCC compatible is what GCC uses for almost all targets). */
3342 if (pcc) {
3343 /* In PCC layout a non-packed bit-field is placed adjacent
3344 to the preceding bit-fields, except if it would overflow
3345 its container (depending on base type) or it's a zero-width
3346 bit-field. Packed non-zero-width bit-fields always are
3347 placed adjacent. */
3348 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3349 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3350 /*if ((typealign != 1 &&
3351 //bit_pos + bit_size > size * 8) ||
3352 (((c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign)
3353 != ((c + ((bit_pos + bit_size + 7) >> 3) + typealign - 1) & -typealign))) ||
3354 bit_size == 0 ||
3355 (bit_pos + bit_size > size * 8)
3356 ) {*/
3357 if (bit_size == 0 ||
3358 (typealign != 1 && (ofs2 / (typealign * 8)) > ((size*8)/(typealign*8)))) {
3359 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3360 bit_pos = 0;
3362 offset = c;
3363 /* In PCC layout named bit-fields influence the alignment
3364 of the containing struct using the base types alignment,
3365 except for packed fields or zero-width fields. */
3366 if (!(f->v & SYM_FIRST_ANOM)) {
3367 if (align > maxalign)
3368 maxalign = align;
3369 if (typealign > maxalign)
3370 maxalign = typealign;
3372 } else {
3373 bt = f->type.t & VT_BTYPE;
3374 if (
3376 bit_pos + bit_size > size * 8) ||
3377 (bit_size == 0 && prevbt == bt) ||
3378 (bit_size > 0 && bt != prevbt))) {
3379 c = (c + typealign - 1) & -typealign;
3380 offset = c;
3381 bit_pos = 0;
3382 /* In MS bitfield mode a bit-field run always uses
3383 at least as many bits as the underlying type. */
3384 c += size;
3386 if (bit_size > 0 || prevbt == bt) {
3387 if (align > maxalign)
3388 maxalign = align;
3389 if (typealign > maxalign)
3390 maxalign = typealign;
3392 prevbt = bt;
3394 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3395 | (bit_pos << VT_STRUCT_SHIFT);
3396 bit_pos += bit_size;
3397 if (pcc && bit_pos >= size * 8) {
3398 c += size;
3399 bit_pos -= size * 8;
3402 #if 0
3403 printf("set field %s offset=%d c=%d",
3404 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3405 if (f->type.t & VT_BITFIELD) {
3406 printf(" pos=%d size=%d",
3407 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3408 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3410 printf("\n");
3411 #endif
3413 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3414 Sym *ass;
3415 /* An anonymous struct/union. Adjust member offsets
3416 to reflect the real offset of our containing struct.
3417 Also set the offset of this anon member inside
3418 the outer struct to be zero. Via this it
3419 works when accessing the field offset directly
3420 (from base object), as well as when recursing
3421 members in initializer handling. */
3422 int v2 = f->type.ref->v;
3423 if (!(v2 & SYM_FIELD) &&
3424 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3425 Sym **pps;
3426 /* This happens only with MS extensions. The
3427 anon member has a named struct type, so it
3428 potentially is shared with other references.
3429 We need to unshare members so we can modify
3430 them. */
3431 ass = f->type.ref;
3432 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3433 &f->type.ref->type, 0,
3434 f->type.ref->c);
3435 pps = &f->type.ref->next;
3436 while ((ass = ass->next) != NULL) {
3437 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3438 pps = &((*pps)->next);
3440 *pps = NULL;
3442 struct_add_offset(f->type.ref, offset);
3443 f->c = 0;
3444 } else {
3445 f->c = offset;
3448 f->r = 0;
3450 /* store size and alignment */
3451 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3452 + maxalign - 1) & -maxalign;
3453 type->ref->r = maxalign;
3456 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3457 static void struct_decl(CType *type, AttributeDef *ad, int u)
3459 int extra_bytes;
3460 int a, v, size, align, flexible, alignoverride;
3461 long c;
3462 int bit_size, bit_pos, bsize, bt, prevbt;
3463 Sym *s, *ss, **ps;
3464 AttributeDef ad1;
3465 CType type1, btype;
3467 a = tok; /* save decl type */
3468 next();
3469 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3470 parse_attribute(ad);
3471 if (tok != '{') {
3472 v = tok;
3473 next();
3474 /* struct already defined ? return it */
3475 if (v < TOK_IDENT)
3476 expect("struct/union/enum name");
3477 s = struct_find(v);
3478 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3479 if (s->type.t != a)
3480 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3481 goto do_decl;
3483 } else {
3484 v = anon_sym++;
3486 /* Record the original enum/struct/union token. */
3487 type1.t = a;
3488 type1.ref = NULL;
3489 /* we put an undefined size for struct/union */
3490 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3491 s->r = 0; /* default alignment is zero as gcc */
3492 /* put struct/union/enum name in type */
3493 do_decl:
3494 type->t = u;
3495 type->ref = s;
3497 if (tok == '{') {
3498 next();
3499 if (s->c != -1)
3500 tcc_error("struct/union/enum already defined");
3501 /* cannot be empty */
3502 c = 0;
3503 /* non empty enums are not allowed */
3504 if (a == TOK_ENUM) {
3505 int seen_neg = 0;
3506 int seen_wide = 0;
3507 for(;;) {
3508 CType *t = &int_type;
3509 v = tok;
3510 if (v < TOK_UIDENT)
3511 expect("identifier");
3512 ss = sym_find(v);
3513 if (ss && !local_stack)
3514 tcc_error("redefinition of enumerator '%s'",
3515 get_tok_str(v, NULL));
3516 next();
3517 if (tok == '=') {
3518 next();
3519 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3520 c = expr_const64();
3521 #else
3522 /* We really want to support long long enums
3523 on i386 as well, but the Sym structure only
3524 holds a 'long' for associated constants,
3525 and enlarging it would bump its size (no
3526 available padding). So punt for now. */
3527 c = expr_const();
3528 #endif
3530 if (c < 0)
3531 seen_neg = 1;
3532 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3533 seen_wide = 1, t = &size_type;
3534 /* enum symbols have static storage */
3535 ss = sym_push(v, t, VT_CONST, c);
3536 ss->type.t |= VT_STATIC;
3537 if (tok != ',')
3538 break;
3539 next();
3540 c++;
3541 /* NOTE: we accept a trailing comma */
3542 if (tok == '}')
3543 break;
3545 if (!seen_neg)
3546 s->a.unsigned_enum = 1;
3547 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3548 skip('}');
3549 } else {
3550 ps = &s->next;
3551 prevbt = VT_INT;
3552 bit_pos = 0;
3553 flexible = 0;
3554 while (tok != '}') {
3555 if (!parse_btype(&btype, &ad1)) {
3556 skip(';');
3557 continue;
3559 while (1) {
3560 extra_bytes = 0;
3561 if (flexible)
3562 tcc_error("flexible array member '%s' not at the end of struct",
3563 get_tok_str(v, NULL));
3564 bit_size = -1;
3565 v = 0;
3566 type1 = btype;
3567 if (tok != ':') {
3568 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3569 if (v == 0) {
3570 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3571 expect("identifier");
3572 else {
3573 int v = btype.ref->v;
3574 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3575 if (tcc_state->ms_extensions == 0)
3576 expect("identifier");
3580 if (type_size(&type1, &align) < 0) {
3581 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3582 flexible = 1;
3583 else
3584 tcc_error("field '%s' has incomplete type",
3585 get_tok_str(v, NULL));
3587 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3588 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3589 tcc_error("invalid type for '%s'",
3590 get_tok_str(v, NULL));
3592 if (tok == ':') {
3593 next();
3594 bit_size = expr_const();
3595 /* XXX: handle v = 0 case for messages */
3596 if (bit_size < 0)
3597 tcc_error("negative width in bit-field '%s'",
3598 get_tok_str(v, NULL));
3599 if (v && bit_size == 0)
3600 tcc_error("zero width for bit-field '%s'",
3601 get_tok_str(v, NULL));
3602 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3603 parse_attribute(&ad1);
3605 size = type_size(&type1, &align);
3606 /* Only remember non-default alignment. */
3607 alignoverride = 0;
3608 if (ad1.a.aligned) {
3609 if (align < ad1.a.aligned)
3610 alignoverride = ad1.a.aligned;
3611 } else if (ad1.a.packed || ad->a.packed) {
3612 alignoverride = 1;
3613 } else if (*tcc_state->pack_stack_ptr) {
3614 if (align > *tcc_state->pack_stack_ptr)
3615 alignoverride = *tcc_state->pack_stack_ptr;
3617 if (bit_size >= 0) {
3618 bt = type1.t & VT_BTYPE;
3619 if (bt != VT_INT &&
3620 bt != VT_BYTE &&
3621 bt != VT_SHORT &&
3622 bt != VT_BOOL &&
3623 bt != VT_ENUM &&
3624 bt != VT_LLONG)
3625 tcc_error("bitfields must have scalar type");
3626 bsize = size * 8;
3627 if (bit_size > bsize) {
3628 tcc_error("width of '%s' exceeds its type",
3629 get_tok_str(v, NULL));
3630 } else if (bit_size == bsize) {
3631 /* no need for bit fields */
3632 bit_pos = 0;
3633 } else {
3634 /* if type change, union, or will overrun
3635 * allignment slot, start at a newly
3636 * alligned slot */
3637 if ((bit_pos + bit_size) > bsize ||
3638 bt != prevbt || a == TOK_UNION)
3639 bit_pos = 0;
3640 /* XXX: handle LSB first */
3641 type1.t |= VT_BITFIELD |
3642 (bit_pos << VT_STRUCT_SHIFT) |
3643 (bit_size << (VT_STRUCT_SHIFT + 6));
3644 bit_pos += bit_size;
3646 prevbt = bt;
3647 } else {
3648 bit_pos = 0;
3650 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3651 /* Remember we've seen a real field to check
3652 for placement of flexible array member. */
3653 c = 1;
3655 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3656 /* See struct_layout for special casing
3657 anonymous member of struct type. */
3658 v = anon_sym++;
3660 if (v == 0 && bit_size >= 0) {
3661 /* Need to remember anon bit-fields as well.
3662 They influence layout. */
3663 v = anon_sym++;
3665 if (v) {
3666 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, extra_bytes);
3667 *ps = ss;
3668 ps = &ss->next;
3670 if (tok == ';' || tok == TOK_EOF)
3671 break;
3672 skip(',');
3674 skip(';');
3676 skip('}');
3677 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3678 parse_attribute(ad);
3679 struct_layout(type, ad);
3684 /* return 1 if basic type is a type size (short, long, long long) */
3685 ST_FUNC int is_btype_size(int bt)
3687 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3690 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3691 are added to the element type, copied because it could be a typedef. */
3692 static void parse_btype_qualify(CType *type, int qualifiers)
3694 while (type->t & VT_ARRAY) {
3695 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3696 type = &type->ref->type;
3698 type->t |= qualifiers;
3701 /* return 0 if no type declaration. otherwise, return the basic type
3702 and skip it.
3704 static int parse_btype(CType *type, AttributeDef *ad)
3706 int t, u, bt_size, complete, type_found, typespec_found;
3707 Sym *s;
3708 CType type1;
3710 memset(ad, 0, sizeof(AttributeDef));
3711 complete = 0;
3712 type_found = 0;
3713 typespec_found = 0;
3714 t = 0;
3715 while(1) {
3716 switch(tok) {
3717 case TOK_EXTENSION:
3718 /* currently, we really ignore extension */
3719 next();
3720 continue;
3722 /* basic types */
3723 case TOK_CHAR:
3724 u = VT_BYTE;
3725 basic_type:
3726 next();
3727 basic_type1:
3728 if (complete)
3729 tcc_error("too many basic types");
3730 t |= u;
3731 bt_size = is_btype_size (u & VT_BTYPE);
3732 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3733 complete = 1;
3734 typespec_found = 1;
3735 break;
3736 case TOK_VOID:
3737 u = VT_VOID;
3738 goto basic_type;
3739 case TOK_SHORT:
3740 u = VT_SHORT;
3741 goto basic_type;
3742 case TOK_INT:
3743 u = VT_INT;
3744 goto basic_type;
3745 case TOK_LONG:
3746 next();
3747 if ((t & VT_BTYPE) == VT_DOUBLE) {
3748 #ifndef TCC_TARGET_PE
3749 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3750 #endif
3751 } else if ((t & VT_BTYPE) == VT_LONG) {
3752 t = (t & ~VT_BTYPE) | VT_LLONG;
3753 } else {
3754 u = VT_LONG;
3755 goto basic_type1;
3757 break;
3758 #ifdef TCC_TARGET_ARM64
3759 case TOK_UINT128:
3760 /* GCC's __uint128_t appears in some Linux header files. Make it a
3761 synonym for long double to get the size and alignment right. */
3762 u = VT_LDOUBLE;
3763 goto basic_type;
3764 #endif
3765 case TOK_BOOL:
3766 u = VT_BOOL;
3767 goto basic_type;
3768 case TOK_FLOAT:
3769 u = VT_FLOAT;
3770 goto basic_type;
3771 case TOK_DOUBLE:
3772 next();
3773 if ((t & VT_BTYPE) == VT_LONG) {
3774 #ifdef TCC_TARGET_PE
3775 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3776 #else
3777 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3778 #endif
3779 } else {
3780 u = VT_DOUBLE;
3781 goto basic_type1;
3783 break;
3784 case TOK_ENUM:
3785 struct_decl(&type1, ad, VT_ENUM);
3786 basic_type2:
3787 u = type1.t;
3788 type->ref = type1.ref;
3789 goto basic_type1;
3790 case TOK_STRUCT:
3791 case TOK_UNION:
3792 struct_decl(&type1, ad, VT_STRUCT);
3793 goto basic_type2;
3795 /* type modifiers */
3796 case TOK_CONST1:
3797 case TOK_CONST2:
3798 case TOK_CONST3:
3799 type->t = t;
3800 parse_btype_qualify(type, VT_CONSTANT);
3801 t = type->t;
3802 next();
3803 break;
3804 case TOK_VOLATILE1:
3805 case TOK_VOLATILE2:
3806 case TOK_VOLATILE3:
3807 type->t = t;
3808 parse_btype_qualify(type, VT_VOLATILE);
3809 t = type->t;
3810 next();
3811 break;
3812 case TOK_SIGNED1:
3813 case TOK_SIGNED2:
3814 case TOK_SIGNED3:
3815 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3816 tcc_error("signed and unsigned modifier");
3817 typespec_found = 1;
3818 t |= VT_DEFSIGN;
3819 next();
3820 break;
3821 case TOK_REGISTER:
3822 case TOK_AUTO:
3823 case TOK_RESTRICT1:
3824 case TOK_RESTRICT2:
3825 case TOK_RESTRICT3:
3826 next();
3827 break;
3828 case TOK_UNSIGNED:
3829 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3830 tcc_error("signed and unsigned modifier");
3831 t |= VT_DEFSIGN | VT_UNSIGNED;
3832 next();
3833 typespec_found = 1;
3834 break;
3836 /* storage */
3837 case TOK_EXTERN:
3838 t |= VT_EXTERN;
3839 next();
3840 break;
3841 case TOK_STATIC:
3842 t |= VT_STATIC;
3843 next();
3844 break;
3845 case TOK_TYPEDEF:
3846 t |= VT_TYPEDEF;
3847 next();
3848 break;
3849 case TOK_INLINE1:
3850 case TOK_INLINE2:
3851 case TOK_INLINE3:
3852 t |= VT_INLINE;
3853 next();
3854 break;
3856 /* GNUC attribute */
3857 case TOK_ATTRIBUTE1:
3858 case TOK_ATTRIBUTE2:
3859 parse_attribute(ad);
3860 if (ad->a.mode) {
3861 u = ad->a.mode -1;
3862 t = (t & ~VT_BTYPE) | u;
3864 break;
3865 /* GNUC typeof */
3866 case TOK_TYPEOF1:
3867 case TOK_TYPEOF2:
3868 case TOK_TYPEOF3:
3869 next();
3870 parse_expr_type(&type1);
3871 /* remove all storage modifiers except typedef */
3872 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3873 goto basic_type2;
3874 default:
3875 if (typespec_found)
3876 goto the_end;
3877 s = sym_find(tok);
3878 if (!s || !(s->type.t & VT_TYPEDEF))
3879 goto the_end;
3881 type->t = ((s->type.t & ~VT_TYPEDEF) |
3882 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3883 type->ref = s->type.ref;
3884 if (t & (VT_CONSTANT | VT_VOLATILE))
3885 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3886 t = type->t;
3888 if (s->r) {
3889 /* get attributes from typedef */
3890 if (0 == ad->a.aligned)
3891 ad->a.aligned = s->a.aligned;
3892 if (0 == ad->a.func_call)
3893 ad->a.func_call = s->a.func_call;
3894 ad->a.packed |= s->a.packed;
3896 next();
3897 typespec_found = 1;
3898 break;
3900 type_found = 1;
3902 the_end:
3903 if (tcc_state->char_is_unsigned) {
3904 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3905 t |= VT_UNSIGNED;
3908 /* long is never used as type */
3909 if ((t & VT_BTYPE) == VT_LONG)
3910 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3911 defined TCC_TARGET_PE
3912 t = (t & ~VT_BTYPE) | VT_INT;
3913 #else
3914 t = (t & ~VT_BTYPE) | VT_LLONG;
3915 #endif
3916 type->t = t;
3917 return type_found;
3920 /* convert a function parameter type (array to pointer and function to
3921 function pointer) */
3922 static inline void convert_parameter_type(CType *pt)
3924 /* remove const and volatile qualifiers (XXX: const could be used
3925 to indicate a const function parameter */
3926 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3927 /* array must be transformed to pointer according to ANSI C */
3928 pt->t &= ~VT_ARRAY;
3929 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3930 mk_pointer(pt);
3934 ST_FUNC void parse_asm_str(CString *astr)
3936 skip('(');
3937 parse_mult_str(astr, "string constant");
3940 /* Parse an asm label and return the token */
3941 static int asm_label_instr(void)
3943 int v;
3944 CString astr;
3946 next();
3947 parse_asm_str(&astr);
3948 skip(')');
3949 #ifdef ASM_DEBUG
3950 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3951 #endif
3952 v = tok_alloc(astr.data, astr.size - 1)->tok;
3953 cstr_free(&astr);
3954 return v;
3957 static void post_type(CType *type, AttributeDef *ad, int storage)
3959 int n, l, t1, arg_size, align;
3960 Sym **plast, *s, *first;
3961 AttributeDef ad1;
3962 CType pt;
3964 if (tok == '(') {
3965 /* function declaration */
3966 next();
3967 l = 0;
3968 first = NULL;
3969 plast = &first;
3970 arg_size = 0;
3971 if (tok != ')') {
3972 for(;;) {
3973 /* read param name and compute offset */
3974 if (l != FUNC_OLD) {
3975 if (!parse_btype(&pt, &ad1)) {
3976 if (l) {
3977 tcc_error("invalid type");
3978 } else {
3979 l = FUNC_OLD;
3980 goto old_proto;
3983 l = FUNC_NEW;
3984 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3985 break;
3986 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3987 if ((pt.t & VT_BTYPE) == VT_VOID)
3988 tcc_error("parameter declared as void");
3989 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3990 } else {
3991 old_proto:
3992 n = tok;
3993 if (n < TOK_UIDENT)
3994 expect("identifier");
3995 pt.t = VT_INT;
3996 next();
3998 convert_parameter_type(&pt);
3999 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4000 *plast = s;
4001 plast = &s->next;
4002 if (tok == ')')
4003 break;
4004 skip(',');
4005 if (l == FUNC_NEW && tok == TOK_DOTS) {
4006 l = FUNC_ELLIPSIS;
4007 next();
4008 break;
4012 /* if no parameters, then old type prototype */
4013 if (l == 0)
4014 l = FUNC_OLD;
4015 skip(')');
4016 /* NOTE: const is ignored in returned type as it has a special
4017 meaning in gcc / C++ */
4018 type->t &= ~VT_CONSTANT;
4019 /* some ancient pre-K&R C allows a function to return an array
4020 and the array brackets to be put after the arguments, such
4021 that "int c()[]" means something like "int[] c()" */
4022 if (tok == '[') {
4023 next();
4024 skip(']'); /* only handle simple "[]" */
4025 type->t |= VT_PTR;
4027 /* we push a anonymous symbol which will contain the function prototype */
4028 ad->a.func_args = arg_size;
4029 s = sym_push(SYM_FIELD, type, 0, l);
4030 s->a = ad->a;
4031 s->next = first;
4032 type->t = VT_FUNC;
4033 type->ref = s;
4034 } else if (tok == '[') {
4035 int saved_nocode_wanted = nocode_wanted;
4036 /* array definition */
4037 next();
4038 if (tok == TOK_RESTRICT1)
4039 next();
4040 n = -1;
4041 t1 = 0;
4042 if (tok != ']') {
4043 if (!local_stack || (storage & VT_STATIC))
4044 vpushi(expr_const());
4045 else {
4046 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4047 length must always be evaluated, even under nocode_wanted,
4048 so that its size slot is initialized (e.g. under sizeof
4049 or typeof). */
4050 nocode_wanted = 0;
4051 gexpr();
4053 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4054 n = vtop->c.i;
4055 if (n < 0)
4056 tcc_error("invalid array size");
4057 } else {
4058 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4059 tcc_error("size of variable length array should be an integer");
4060 t1 = VT_VLA;
4063 skip(']');
4064 /* parse next post type */
4065 post_type(type, ad, storage);
4066 if (type->t == VT_FUNC)
4067 tcc_error("declaration of an array of functions");
4068 t1 |= type->t & VT_VLA;
4070 if (t1 & VT_VLA) {
4071 loc -= type_size(&int_type, &align);
4072 loc &= -align;
4073 n = loc;
4075 vla_runtime_type_size(type, &align);
4076 gen_op('*');
4077 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4078 vswap();
4079 vstore();
4081 if (n != -1)
4082 vpop();
4083 nocode_wanted = saved_nocode_wanted;
4085 /* we push an anonymous symbol which will contain the array
4086 element type */
4087 s = sym_push(SYM_FIELD, type, 0, n);
4088 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4089 type->ref = s;
4093 /* Parse a type declaration (except basic type), and return the type
4094 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4095 expected. 'type' should contain the basic type. 'ad' is the
4096 attribute definition of the basic type. It can be modified by
4097 type_decl().
4099 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4101 Sym *s;
4102 CType type1, *type2;
4103 int qualifiers, storage;
4105 while (tok == '*') {
4106 qualifiers = 0;
4107 redo:
4108 next();
4109 switch(tok) {
4110 case TOK_CONST1:
4111 case TOK_CONST2:
4112 case TOK_CONST3:
4113 qualifiers |= VT_CONSTANT;
4114 goto redo;
4115 case TOK_VOLATILE1:
4116 case TOK_VOLATILE2:
4117 case TOK_VOLATILE3:
4118 qualifiers |= VT_VOLATILE;
4119 goto redo;
4120 case TOK_RESTRICT1:
4121 case TOK_RESTRICT2:
4122 case TOK_RESTRICT3:
4123 goto redo;
4124 /* XXX: clarify attribute handling */
4125 case TOK_ATTRIBUTE1:
4126 case TOK_ATTRIBUTE2:
4127 parse_attribute(ad);
4128 break;
4130 mk_pointer(type);
4131 type->t |= qualifiers;
4134 /* recursive type */
4135 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4136 type1.t = 0; /* XXX: same as int */
4137 if (tok == '(') {
4138 next();
4139 /* XXX: this is not correct to modify 'ad' at this point, but
4140 the syntax is not clear */
4141 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4142 parse_attribute(ad);
4143 type_decl(&type1, ad, v, td);
4144 skip(')');
4145 } else {
4146 /* type identifier */
4147 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4148 *v = tok;
4149 next();
4150 } else {
4151 if (!(td & TYPE_ABSTRACT))
4152 expect("identifier");
4153 *v = 0;
4156 storage = type->t & VT_STORAGE;
4157 type->t &= ~VT_STORAGE;
4158 post_type(type, ad, storage);
4159 type->t |= storage;
4160 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4161 parse_attribute(ad);
4163 if (!type1.t)
4164 return;
4165 /* append type at the end of type1 */
4166 type2 = &type1;
4167 for(;;) {
4168 s = type2->ref;
4169 type2 = &s->type;
4170 if (!type2->t) {
4171 *type2 = *type;
4172 break;
4175 *type = type1;
4178 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4179 ST_FUNC int lvalue_type(int t)
4181 int bt, r;
4182 r = VT_LVAL;
4183 bt = t & VT_BTYPE;
4184 if (bt == VT_BYTE || bt == VT_BOOL)
4185 r |= VT_LVAL_BYTE;
4186 else if (bt == VT_SHORT)
4187 r |= VT_LVAL_SHORT;
4188 else
4189 return r;
4190 if (t & VT_UNSIGNED)
4191 r |= VT_LVAL_UNSIGNED;
4192 return r;
4195 /* indirection with full error checking and bound check */
4196 ST_FUNC void indir(void)
4198 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4199 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4200 return;
4201 expect("pointer");
4203 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4204 gv(RC_INT);
4205 vtop->type = *pointed_type(&vtop->type);
4206 /* Arrays and functions are never lvalues */
4207 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4208 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4209 vtop->r |= lvalue_type(vtop->type.t);
4210 /* if bound checking, the referenced pointer must be checked */
4211 #ifdef CONFIG_TCC_BCHECK
4212 if (tcc_state->do_bounds_check)
4213 vtop->r |= VT_MUSTBOUND;
4214 #endif
4218 /* pass a parameter to a function and do type checking and casting */
4219 static void gfunc_param_typed(Sym *func, Sym *arg)
4221 int func_type;
4222 CType type;
4224 func_type = func->c;
4225 if (func_type == FUNC_OLD ||
4226 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4227 /* default casting : only need to convert float to double */
4228 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4229 type.t = VT_DOUBLE;
4230 gen_cast(&type);
4231 } else if (vtop->type.t & VT_BITFIELD) {
4232 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4233 type.ref = vtop->type.ref;
4234 gen_cast(&type);
4236 } else if (arg == NULL) {
4237 tcc_error("too many arguments to function");
4238 } else {
4239 type = arg->type;
4240 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4241 gen_assign_cast(&type);
4245 /* parse an expression of the form '(type)' or '(expr)' and return its
4246 type */
4247 static void parse_expr_type(CType *type)
4249 int n;
4250 AttributeDef ad;
4252 skip('(');
4253 if (parse_btype(type, &ad)) {
4254 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4255 } else {
4256 expr_type(type);
4258 skip(')');
4261 static void parse_type(CType *type)
4263 AttributeDef ad;
4264 int n;
4266 if (!parse_btype(type, &ad)) {
4267 expect("type");
4269 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4272 static void vpush_tokc(int t)
4274 CType type;
4275 type.t = t;
4276 type.ref = 0;
4277 vsetc(&type, VT_CONST, &tokc);
4280 ST_FUNC void unary(void)
4282 int n, t, align, size, r, sizeof_caller;
4283 CType type;
4284 Sym *s;
4285 AttributeDef ad;
4287 sizeof_caller = in_sizeof;
4288 in_sizeof = 0;
4289 /* XXX: GCC 2.95.3 does not generate a table although it should be
4290 better here */
4291 tok_next:
4292 switch(tok) {
4293 case TOK_EXTENSION:
4294 next();
4295 goto tok_next;
4296 case TOK_CINT:
4297 case TOK_CCHAR:
4298 case TOK_LCHAR:
4299 vpushi(tokc.i);
4300 next();
4301 break;
4302 case TOK_CUINT:
4303 vpush_tokc(VT_INT | VT_UNSIGNED);
4304 next();
4305 break;
4306 case TOK_CLLONG:
4307 vpush_tokc(VT_LLONG);
4308 next();
4309 break;
4310 case TOK_CULLONG:
4311 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4312 next();
4313 break;
4314 case TOK_CFLOAT:
4315 vpush_tokc(VT_FLOAT);
4316 next();
4317 break;
4318 case TOK_CDOUBLE:
4319 vpush_tokc(VT_DOUBLE);
4320 next();
4321 break;
4322 case TOK_CLDOUBLE:
4323 vpush_tokc(VT_LDOUBLE);
4324 next();
4325 break;
4326 case TOK___FUNCTION__:
4327 if (!gnu_ext)
4328 goto tok_identifier;
4329 /* fall thru */
4330 case TOK___FUNC__:
4332 void *ptr;
4333 int len;
4334 /* special function name identifier */
4335 len = strlen(funcname) + 1;
4336 /* generate char[len] type */
4337 type.t = VT_BYTE;
4338 mk_pointer(&type);
4339 type.t |= VT_ARRAY;
4340 type.ref->c = len;
4341 vpush_ref(&type, data_section, data_section->data_offset, len);
4342 ptr = section_ptr_add(data_section, len);
4343 memcpy(ptr, funcname, len);
4344 next();
4346 break;
4347 case TOK_LSTR:
4348 #ifdef TCC_TARGET_PE
4349 t = VT_SHORT | VT_UNSIGNED;
4350 #else
4351 t = VT_INT;
4352 #endif
4353 goto str_init;
4354 case TOK_STR:
4355 /* string parsing */
4356 t = VT_BYTE;
4357 str_init:
4358 if (tcc_state->warn_write_strings)
4359 t |= VT_CONSTANT;
4360 type.t = t;
4361 mk_pointer(&type);
4362 type.t |= VT_ARRAY;
4363 memset(&ad, 0, sizeof(AttributeDef));
4364 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4365 break;
4366 case '(':
4367 next();
4368 /* cast ? */
4369 if (parse_btype(&type, &ad)) {
4370 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4371 skip(')');
4372 /* check ISOC99 compound literal */
4373 if (tok == '{') {
4374 /* data is allocated locally by default */
4375 if (global_expr)
4376 r = VT_CONST;
4377 else
4378 r = VT_LOCAL;
4379 /* all except arrays are lvalues */
4380 if (!(type.t & VT_ARRAY))
4381 r |= lvalue_type(type.t);
4382 memset(&ad, 0, sizeof(AttributeDef));
4383 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4384 } else {
4385 if (sizeof_caller) {
4386 vpush(&type);
4387 return;
4389 unary();
4390 gen_cast(&type);
4392 } else if (tok == '{') {
4393 if (const_wanted)
4394 tcc_error("expected constant");
4395 /* save all registers */
4396 if (!nocode_wanted)
4397 save_regs(0);
4398 /* statement expression : we do not accept break/continue
4399 inside as GCC does */
4400 block(NULL, NULL, 1);
4401 skip(')');
4402 } else {
4403 gexpr();
4404 skip(')');
4406 break;
4407 case '*':
4408 next();
4409 unary();
4410 indir();
4411 break;
4412 case '&':
4413 next();
4414 unary();
4415 /* functions names must be treated as function pointers,
4416 except for unary '&' and sizeof. Since we consider that
4417 functions are not lvalues, we only have to handle it
4418 there and in function calls. */
4419 /* arrays can also be used although they are not lvalues */
4420 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4421 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4422 test_lvalue();
4423 mk_pointer(&vtop->type);
4424 gaddrof();
4425 break;
4426 case '!':
4427 next();
4428 unary();
4429 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4430 CType boolean;
4431 boolean.t = VT_BOOL;
4432 gen_cast(&boolean);
4433 vtop->c.i = !vtop->c.i;
4434 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4435 vtop->c.i ^= 1;
4436 else {
4437 save_regs(1);
4438 vseti(VT_JMP, gvtst(1, 0));
4440 break;
4441 case '~':
4442 next();
4443 unary();
4444 vpushi(-1);
4445 gen_op('^');
4446 break;
4447 case '+':
4448 next();
4449 unary();
4450 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4451 tcc_error("pointer not accepted for unary plus");
4452 /* In order to force cast, we add zero, except for floating point
4453 where we really need an noop (otherwise -0.0 will be transformed
4454 into +0.0). */
4455 if (!is_float(vtop->type.t)) {
4456 vpushi(0);
4457 gen_op('+');
4459 break;
4460 case TOK_SIZEOF:
4461 case TOK_ALIGNOF1:
4462 case TOK_ALIGNOF2:
4463 t = tok;
4464 next();
4465 in_sizeof++;
4466 unary_type(&type); // Perform a in_sizeof = 0;
4467 size = type_size(&type, &align);
4468 if (t == TOK_SIZEOF) {
4469 if (!(type.t & VT_VLA)) {
4470 if (size < 0)
4471 tcc_error("sizeof applied to an incomplete type");
4472 vpushs(size);
4473 } else {
4474 vla_runtime_type_size(&type, &align);
4476 } else {
4477 vpushs(align);
4479 vtop->type.t |= VT_UNSIGNED;
4480 break;
4482 case TOK_builtin_expect:
4484 /* __builtin_expect is a no-op for now */
4485 int saved_nocode_wanted;
4486 next();
4487 skip('(');
4488 expr_eq();
4489 skip(',');
4490 saved_nocode_wanted = nocode_wanted;
4491 nocode_wanted = 1;
4492 expr_lor_const();
4493 vpop();
4494 nocode_wanted = saved_nocode_wanted;
4495 skip(')');
4497 break;
4498 case TOK_builtin_types_compatible_p:
4500 CType type1, type2;
4501 next();
4502 skip('(');
4503 parse_type(&type1);
4504 skip(',');
4505 parse_type(&type2);
4506 skip(')');
4507 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4508 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4509 vpushi(is_compatible_types(&type1, &type2));
4511 break;
4512 case TOK_builtin_choose_expr:
4514 int saved_nocode_wanted;
4515 int64_t c;
4516 next();
4517 skip('(');
4518 c = expr_const64();
4519 skip(',');
4520 if (!c) {
4521 saved_nocode_wanted = nocode_wanted;
4522 nocode_wanted = 1;
4524 expr_eq();
4525 if (!c) {
4526 vpop();
4527 nocode_wanted = saved_nocode_wanted;
4529 skip(',');
4530 if (c) {
4531 saved_nocode_wanted = nocode_wanted;
4532 nocode_wanted = 1;
4534 expr_eq();
4535 if (c) {
4536 vpop();
4537 nocode_wanted = saved_nocode_wanted;
4539 skip(')');
4541 break;
4542 case TOK_builtin_constant_p:
4544 int saved_nocode_wanted, res;
4545 next();
4546 skip('(');
4547 saved_nocode_wanted = nocode_wanted;
4548 nocode_wanted = 1;
4549 gexpr();
4550 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4551 vpop();
4552 nocode_wanted = saved_nocode_wanted;
4553 skip(')');
4554 vpushi(res);
4556 break;
4557 case TOK_builtin_frame_address:
4558 case TOK_builtin_return_address:
4560 int tok1 = tok;
4561 int level;
4562 CType type;
4563 next();
4564 skip('(');
4565 if (tok != TOK_CINT) {
4566 tcc_error("%s only takes positive integers",
4567 tok1 == TOK_builtin_return_address ?
4568 "__builtin_return_address" :
4569 "__builtin_frame_address");
4571 level = (uint32_t)tokc.i;
4572 next();
4573 skip(')');
4574 type.t = VT_VOID;
4575 mk_pointer(&type);
4576 vset(&type, VT_LOCAL, 0); /* local frame */
4577 while (level--) {
4578 mk_pointer(&vtop->type);
4579 indir(); /* -> parent frame */
4581 if (tok1 == TOK_builtin_return_address) {
4582 // assume return address is just above frame pointer on stack
4583 vpushi(PTR_SIZE);
4584 gen_op('+');
4585 mk_pointer(&vtop->type);
4586 indir();
4589 break;
4590 #ifdef TCC_TARGET_X86_64
4591 #ifdef TCC_TARGET_PE
4592 case TOK_builtin_va_start:
4594 next();
4595 skip('(');
4596 expr_eq();
4597 skip(',');
4598 expr_eq();
4599 skip(')');
4600 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4601 tcc_error("__builtin_va_start expects a local variable");
4602 vtop->r &= ~(VT_LVAL | VT_REF);
4603 vtop->type = char_pointer_type;
4604 vtop->c.i += 8;
4605 vstore();
4607 break;
4608 #else
4609 case TOK_builtin_va_arg_types:
4611 CType type;
4612 next();
4613 skip('(');
4614 parse_type(&type);
4615 skip(')');
4616 vpushi(classify_x86_64_va_arg(&type));
4618 break;
4619 #endif
4620 #endif
4622 #ifdef TCC_TARGET_ARM64
4623 case TOK___va_start: {
4624 if (nocode_wanted)
4625 tcc_error("statement in global scope");
4626 next();
4627 skip('(');
4628 expr_eq();
4629 skip(',');
4630 expr_eq();
4631 skip(')');
4632 //xx check types
4633 gen_va_start();
4634 vpushi(0);
4635 vtop->type.t = VT_VOID;
4636 break;
4638 case TOK___va_arg: {
4639 CType type;
4640 if (nocode_wanted)
4641 tcc_error("statement in global scope");
4642 next();
4643 skip('(');
4644 expr_eq();
4645 skip(',');
4646 parse_type(&type);
4647 skip(')');
4648 //xx check types
4649 gen_va_arg(&type);
4650 vtop->type = type;
4651 break;
4653 case TOK___arm64_clear_cache: {
4654 next();
4655 skip('(');
4656 expr_eq();
4657 skip(',');
4658 expr_eq();
4659 skip(')');
4660 gen_clear_cache();
4661 vpushi(0);
4662 vtop->type.t = VT_VOID;
4663 break;
4665 #endif
4666 /* pre operations */
4667 case TOK_INC:
4668 case TOK_DEC:
4669 t = tok;
4670 next();
4671 unary();
4672 inc(0, t);
4673 break;
4674 case '-':
4675 next();
4676 unary();
4677 t = vtop->type.t & VT_BTYPE;
4678 if (is_float(t)) {
4679 /* In IEEE negate(x) isn't subtract(0,x), but rather
4680 subtract(-0, x). */
4681 vpush(&vtop->type);
4682 if (t == VT_FLOAT)
4683 vtop->c.f = -0.0f;
4684 else if (t == VT_DOUBLE)
4685 vtop->c.d = -0.0;
4686 else
4687 vtop->c.ld = -0.0;
4688 } else
4689 vpushi(0);
4690 vswap();
4691 gen_op('-');
4692 break;
4693 case TOK_LAND:
4694 if (!gnu_ext)
4695 goto tok_identifier;
4696 next();
4697 /* allow to take the address of a label */
4698 if (tok < TOK_UIDENT)
4699 expect("label identifier");
4700 s = label_find(tok);
4701 if (!s) {
4702 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4703 } else {
4704 if (s->r == LABEL_DECLARED)
4705 s->r = LABEL_FORWARD;
4707 if (!s->type.t) {
4708 s->type.t = VT_VOID;
4709 mk_pointer(&s->type);
4710 s->type.t |= VT_STATIC;
4712 vpushsym(&s->type, s);
4713 next();
4714 break;
4716 // special qnan , snan and infinity values
4717 case TOK___NAN__:
4718 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4719 next();
4720 break;
4721 case TOK___SNAN__:
4722 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4723 next();
4724 break;
4725 case TOK___INF__:
4726 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4727 next();
4728 break;
4730 default:
4731 tok_identifier:
4732 t = tok;
4733 next();
4734 if (t < TOK_UIDENT)
4735 expect("identifier");
4736 s = sym_find(t);
4737 if (!s) {
4738 const char *name = get_tok_str(t, NULL);
4739 if (tok != '(')
4740 tcc_error("'%s' undeclared", name);
4741 /* for simple function calls, we tolerate undeclared
4742 external reference to int() function */
4743 if (tcc_state->warn_implicit_function_declaration
4744 #ifdef TCC_TARGET_PE
4745 /* people must be warned about using undeclared WINAPI functions
4746 (which usually start with uppercase letter) */
4747 || (name[0] >= 'A' && name[0] <= 'Z')
4748 #endif
4750 tcc_warning("implicit declaration of function '%s'", name);
4751 s = external_global_sym(t, &func_old_type, 0);
4753 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4754 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4755 /* if referencing an inline function, then we generate a
4756 symbol to it if not already done. It will have the
4757 effect to generate code for it at the end of the
4758 compilation unit. Inline function as always
4759 generated in the text section. */
4760 if (!s->c && !nocode_wanted)
4761 put_extern_sym(s, text_section, 0, 0);
4762 r = VT_SYM | VT_CONST;
4763 } else {
4764 r = s->r;
4765 /* A symbol that has a register is a local register variable,
4766 which starts out as VT_LOCAL value. */
4767 if ((r & VT_VALMASK) < VT_CONST)
4768 r = (r & ~VT_VALMASK) | VT_LOCAL;
4770 vset(&s->type, r, s->c);
4771 /* Point to s as backpointer (even without r&VT_SYM).
4772 Will be used by at least the x86 inline asm parser for
4773 regvars. */
4774 vtop->sym = s;
4775 if (vtop->r & VT_SYM) {
4776 vtop->c.i = 0;
4778 break;
4781 /* post operations */
4782 while (1) {
4783 if (tok == TOK_INC || tok == TOK_DEC) {
4784 inc(1, tok);
4785 next();
4786 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4787 int qualifiers;
4788 /* field */
4789 if (tok == TOK_ARROW)
4790 indir();
4791 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4792 test_lvalue();
4793 gaddrof();
4794 /* expect pointer on structure */
4795 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4796 expect("struct or union");
4797 if (tok == TOK_CDOUBLE)
4798 expect("field name");
4799 next();
4800 if (tok == TOK_CINT || tok == TOK_CUINT)
4801 expect("field name");
4802 s = find_field(&vtop->type, tok);
4803 if (!s)
4804 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4805 /* add field offset to pointer */
4806 vtop->type = char_pointer_type; /* change type to 'char *' */
4807 vpushi(s->c);
4808 gen_op('+');
4809 /* change type to field type, and set to lvalue */
4810 vtop->type = s->type;
4811 vtop->type.t |= qualifiers;
4812 /* an array is never an lvalue */
4813 if (!(vtop->type.t & VT_ARRAY)) {
4814 vtop->r |= lvalue_type(vtop->type.t);
4815 #ifdef CONFIG_TCC_BCHECK
4816 /* if bound checking, the referenced pointer must be checked */
4817 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4818 vtop->r |= VT_MUSTBOUND;
4819 #endif
4821 next();
4822 } else if (tok == '[') {
4823 next();
4824 gexpr();
4825 gen_op('+');
4826 indir();
4827 skip(']');
4828 } else if (tok == '(') {
4829 SValue ret;
4830 Sym *sa;
4831 int nb_args, ret_nregs, ret_align, regsize, variadic;
4833 /* function call */
4834 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4835 /* pointer test (no array accepted) */
4836 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4837 vtop->type = *pointed_type(&vtop->type);
4838 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4839 goto error_func;
4840 } else {
4841 error_func:
4842 expect("function pointer");
4844 } else {
4845 vtop->r &= ~VT_LVAL; /* no lvalue */
4847 /* get return type */
4848 s = vtop->type.ref;
4849 next();
4850 sa = s->next; /* first parameter */
4851 nb_args = 0;
4852 ret.r2 = VT_CONST;
4853 /* compute first implicit argument if a structure is returned */
4854 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4855 variadic = (s->c == FUNC_ELLIPSIS);
4856 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4857 &ret_align, &regsize);
4858 if (!ret_nregs) {
4859 /* get some space for the returned structure */
4860 size = type_size(&s->type, &align);
4861 #ifdef TCC_TARGET_ARM64
4862 /* On arm64, a small struct is return in registers.
4863 It is much easier to write it to memory if we know
4864 that we are allowed to write some extra bytes, so
4865 round the allocated space up to a power of 2: */
4866 if (size < 16)
4867 while (size & (size - 1))
4868 size = (size | (size - 1)) + 1;
4869 #endif
4870 loc = (loc - size) & -align;
4871 ret.type = s->type;
4872 ret.r = VT_LOCAL | VT_LVAL;
4873 /* pass it as 'int' to avoid structure arg passing
4874 problems */
4875 vseti(VT_LOCAL, loc);
4876 ret.c = vtop->c;
4877 nb_args++;
4879 } else {
4880 ret_nregs = 1;
4881 ret.type = s->type;
4884 if (ret_nregs) {
4885 /* return in register */
4886 if (is_float(ret.type.t)) {
4887 ret.r = reg_fret(ret.type.t);
4888 #ifdef TCC_TARGET_X86_64
4889 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4890 ret.r2 = REG_QRET;
4891 #endif
4892 } else {
4893 #ifndef TCC_TARGET_ARM64
4894 #ifdef TCC_TARGET_X86_64
4895 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4896 #else
4897 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4898 #endif
4899 ret.r2 = REG_LRET;
4900 #endif
4901 ret.r = REG_IRET;
4903 ret.c.i = 0;
4905 if (tok != ')') {
4906 for(;;) {
4907 expr_eq();
4908 gfunc_param_typed(s, sa);
4909 nb_args++;
4910 if (sa)
4911 sa = sa->next;
4912 if (tok == ')')
4913 break;
4914 skip(',');
4917 if (sa)
4918 tcc_error("too few arguments to function");
4919 skip(')');
4920 if (!nocode_wanted) {
4921 gfunc_call(nb_args);
4922 } else {
4923 vtop -= (nb_args + 1);
4926 /* return value */
4927 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4928 vsetc(&ret.type, r, &ret.c);
4929 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4932 /* handle packed struct return */
4933 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4934 int addr, offset;
4936 size = type_size(&s->type, &align);
4937 /* We're writing whole regs often, make sure there's enough
4938 space. Assume register size is power of 2. */
4939 if (regsize > align)
4940 align = regsize;
4941 loc = (loc - size) & -align;
4942 addr = loc;
4943 offset = 0;
4944 for (;;) {
4945 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4946 vswap();
4947 vstore();
4948 vtop--;
4949 if (--ret_nregs == 0)
4950 break;
4951 offset += regsize;
4953 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4955 } else {
4956 break;
4961 ST_FUNC void expr_prod(void)
4963 int t;
4965 unary();
4966 while (tok == '*' || tok == '/' || tok == '%') {
4967 t = tok;
4968 next();
4969 unary();
4970 gen_op(t);
4974 ST_FUNC void expr_sum(void)
4976 int t;
4978 expr_prod();
4979 while (tok == '+' || tok == '-') {
4980 t = tok;
4981 next();
4982 expr_prod();
4983 gen_op(t);
4987 static void expr_shift(void)
4989 int t;
4991 expr_sum();
4992 while (tok == TOK_SHL || tok == TOK_SAR) {
4993 t = tok;
4994 next();
4995 expr_sum();
4996 gen_op(t);
5000 static void expr_cmp(void)
5002 int t;
5004 expr_shift();
5005 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5006 tok == TOK_ULT || tok == TOK_UGE) {
5007 t = tok;
5008 next();
5009 expr_shift();
5010 gen_op(t);
5014 static void expr_cmpeq(void)
5016 int t;
5018 expr_cmp();
5019 while (tok == TOK_EQ || tok == TOK_NE) {
5020 t = tok;
5021 next();
5022 expr_cmp();
5023 gen_op(t);
5027 static void expr_and(void)
5029 expr_cmpeq();
5030 while (tok == '&') {
5031 next();
5032 expr_cmpeq();
5033 gen_op('&');
5037 static void expr_xor(void)
5039 expr_and();
5040 while (tok == '^') {
5041 next();
5042 expr_and();
5043 gen_op('^');
5047 static void expr_or(void)
5049 expr_xor();
5050 while (tok == '|') {
5051 next();
5052 expr_xor();
5053 gen_op('|');
5057 /* XXX: fix this mess */
5058 static void expr_land_const(void)
5060 expr_or();
5061 while (tok == TOK_LAND) {
5062 next();
5063 expr_or();
5064 gen_op(TOK_LAND);
5067 static void expr_lor_const(void)
5069 expr_land_const();
5070 while (tok == TOK_LOR) {
5071 next();
5072 expr_land_const();
5073 gen_op(TOK_LOR);
5077 static void expr_land(void)
5079 expr_or();
5080 if (tok == TOK_LAND) {
5081 int t = 0;
5082 for(;;) {
5083 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5084 CType ctb;
5085 ctb.t = VT_BOOL;
5086 gen_cast(&ctb);
5087 if (vtop->c.i) {
5088 vpop();
5089 } else {
5090 int saved_nocode_wanted = nocode_wanted;
5091 nocode_wanted = 1;
5092 while (tok == TOK_LAND) {
5093 next();
5094 expr_or();
5095 vpop();
5097 if (t)
5098 gsym(t);
5099 nocode_wanted = saved_nocode_wanted;
5100 gen_cast(&int_type);
5101 break;
5103 } else {
5104 if (!t)
5105 save_regs(1);
5106 t = gvtst(1, t);
5108 if (tok != TOK_LAND) {
5109 if (t)
5110 vseti(VT_JMPI, t);
5111 else
5112 vpushi(1);
5113 break;
5115 next();
5116 expr_or();
5121 static void expr_lor(void)
5123 expr_land();
5124 if (tok == TOK_LOR) {
5125 int t = 0;
5126 for(;;) {
5127 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5128 CType ctb;
5129 ctb.t = VT_BOOL;
5130 gen_cast(&ctb);
5131 if (!vtop->c.i) {
5132 vpop();
5133 } else {
5134 int saved_nocode_wanted = nocode_wanted;
5135 nocode_wanted = 1;
5136 while (tok == TOK_LOR) {
5137 next();
5138 expr_land();
5139 vpop();
5141 if (t)
5142 gsym(t);
5143 nocode_wanted = saved_nocode_wanted;
5144 gen_cast(&int_type);
5145 break;
5147 } else {
5148 if (!t)
5149 save_regs(1);
5150 t = gvtst(0, t);
5152 if (tok != TOK_LOR) {
5153 if (t)
5154 vseti(VT_JMP, t);
5155 else
5156 vpushi(0);
5157 break;
5159 next();
5160 expr_land();
5165 /* Assuming vtop is a value used in a conditional context
5166 (i.e. compared with zero) return 0 if it's false, 1 if
5167 true and -1 if it can't be statically determined. */
5168 static int condition_3way(void)
5170 int c = -1;
5171 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5172 (!(vtop->r & VT_SYM) ||
5173 !(vtop->sym->type.t & VT_WEAK))) {
5174 CType boolean;
5175 boolean.t = VT_BOOL;
5176 vdup();
5177 gen_cast(&boolean);
5178 c = vtop->c.i;
5179 vpop();
5181 return c;
5184 static void expr_cond(void)
5186 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5187 int c;
5188 SValue sv;
5189 CType type, type1, type2;
5191 expr_lor();
5192 if (tok == '?') {
5193 next();
5194 c = condition_3way();
5195 if (c >= 0) {
5196 int saved_nocode_wanted = nocode_wanted;
5197 if (c) {
5198 if (tok != ':' || !gnu_ext) {
5199 vpop();
5200 gexpr();
5202 skip(':');
5203 nocode_wanted = 1;
5204 expr_cond();
5205 vpop();
5206 nocode_wanted = saved_nocode_wanted;
5207 } else {
5208 vpop();
5209 if (tok != ':' || !gnu_ext) {
5210 nocode_wanted = 1;
5211 gexpr();
5212 vpop();
5213 nocode_wanted = saved_nocode_wanted;
5215 skip(':');
5216 expr_cond();
5219 else {
5220 /* XXX This doesn't handle nocode_wanted correctly at all.
5221 It unconditionally calls gv/gvtst and friends. That's
5222 the case for many of the expr_ routines. Currently
5223 that should generate only useless code, but depending
5224 on other operand handling this might also generate
5225 pointer derefs for lvalue conversions whose result
5226 is useless, but nevertheless can lead to segfault.
5228 Somewhen we need to overhaul the whole nocode_wanted
5229 handling. */
5230 if (vtop != vstack) {
5231 /* needed to avoid having different registers saved in
5232 each branch */
5233 if (is_float(vtop->type.t)) {
5234 rc = RC_FLOAT;
5235 #ifdef TCC_TARGET_X86_64
5236 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5237 rc = RC_ST0;
5239 #endif
5241 else
5242 rc = RC_INT;
5243 gv(rc);
5244 save_regs(1);
5246 if (tok == ':' && gnu_ext) {
5247 gv_dup();
5248 tt = gvtst(1, 0);
5249 } else {
5250 tt = gvtst(1, 0);
5251 gexpr();
5253 type1 = vtop->type;
5254 sv = *vtop; /* save value to handle it later */
5255 vtop--; /* no vpop so that FP stack is not flushed */
5256 skip(':');
5257 u = gjmp(0);
5258 gsym(tt);
5259 expr_cond();
5260 type2 = vtop->type;
5262 t1 = type1.t;
5263 bt1 = t1 & VT_BTYPE;
5264 t2 = type2.t;
5265 bt2 = t2 & VT_BTYPE;
5266 /* cast operands to correct type according to ISOC rules */
5267 if (is_float(bt1) || is_float(bt2)) {
5268 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5269 type.t = VT_LDOUBLE;
5270 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5271 type.t = VT_DOUBLE;
5272 } else {
5273 type.t = VT_FLOAT;
5275 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5276 /* cast to biggest op */
5277 type.t = VT_LLONG;
5278 /* convert to unsigned if it does not fit in a long long */
5279 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5280 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5281 type.t |= VT_UNSIGNED;
5282 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5283 /* If one is a null ptr constant the result type
5284 is the other. */
5285 if (is_null_pointer (vtop))
5286 type = type1;
5287 else if (is_null_pointer (&sv))
5288 type = type2;
5289 /* XXX: test pointer compatibility, C99 has more elaborate
5290 rules here. */
5291 else
5292 type = type1;
5293 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5294 /* XXX: test function pointer compatibility */
5295 type = bt1 == VT_FUNC ? type1 : type2;
5296 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5297 /* XXX: test structure compatibility */
5298 type = bt1 == VT_STRUCT ? type1 : type2;
5299 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5300 /* NOTE: as an extension, we accept void on only one side */
5301 type.t = VT_VOID;
5302 } else {
5303 /* integer operations */
5304 type.t = VT_INT;
5305 /* convert to unsigned if it does not fit in an integer */
5306 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5307 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5308 type.t |= VT_UNSIGNED;
5310 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5311 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5312 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5314 /* now we convert second operand */
5315 gen_cast(&type);
5316 if (islv) {
5317 mk_pointer(&vtop->type);
5318 gaddrof();
5320 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5321 gaddrof();
5322 rc = RC_INT;
5323 if (is_float(type.t)) {
5324 rc = RC_FLOAT;
5325 #ifdef TCC_TARGET_X86_64
5326 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5327 rc = RC_ST0;
5329 #endif
5330 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5331 /* for long longs, we use fixed registers to avoid having
5332 to handle a complicated move */
5333 rc = RC_IRET;
5336 r2 = gv(rc);
5337 /* this is horrible, but we must also convert first
5338 operand */
5339 tt = gjmp(0);
5340 gsym(u);
5341 /* put again first value and cast it */
5342 *vtop = sv;
5343 gen_cast(&type);
5344 if (islv) {
5345 mk_pointer(&vtop->type);
5346 gaddrof();
5348 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5349 gaddrof();
5350 r1 = gv(rc);
5351 move_reg(r2, r1, type.t);
5352 vtop->r = r2;
5353 gsym(tt);
5354 if (islv)
5355 indir();
5360 static void expr_eq(void)
5362 int t;
5364 expr_cond();
5365 if (tok == '=' ||
5366 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5367 tok == TOK_A_XOR || tok == TOK_A_OR ||
5368 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5369 test_lvalue();
5370 t = tok;
5371 next();
5372 if (t == '=') {
5373 expr_eq();
5374 } else {
5375 vdup();
5376 expr_eq();
5377 gen_op(t & 0x7f);
5379 vstore();
5383 ST_FUNC void gexpr(void)
5385 while (1) {
5386 expr_eq();
5387 if (tok != ',')
5388 break;
5389 vpop();
5390 next();
5394 /* parse an expression and return its type without any side effect. */
5395 static void expr_type(CType *type)
5397 int saved_nocode_wanted;
5399 saved_nocode_wanted = nocode_wanted;
5400 nocode_wanted = 1;
5401 gexpr();
5402 *type = vtop->type;
5403 vpop();
5404 nocode_wanted = saved_nocode_wanted;
5407 /* parse a unary expression and return its type without any side
5408 effect. */
5409 static void unary_type(CType *type)
5411 int a;
5413 a = nocode_wanted;
5414 nocode_wanted = 1;
5415 unary();
5416 *type = vtop->type;
5417 vpop();
5418 nocode_wanted = a;
5421 /* parse a constant expression and return value in vtop. */
5422 static void expr_const1(void)
5424 int a;
5425 a = const_wanted;
5426 const_wanted = 1;
5427 expr_cond();
5428 const_wanted = a;
5431 /* parse an integer constant and return its value. */
5432 static inline int64_t expr_const64(void)
5434 int64_t c;
5435 expr_const1();
5436 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5437 expect("constant expression");
5438 c = vtop->c.i;
5439 vpop();
5440 return c;
5443 /* parse an integer constant and return its value.
5444 Complain if it doesn't fit 32bit (signed or unsigned). */
5445 ST_FUNC int expr_const(void)
5447 int c;
5448 int64_t wc = expr_const64();
5449 c = wc;
5450 if (c != wc && (unsigned)c != wc)
5451 tcc_error("constant exceeds 32 bit");
5452 return c;
5455 /* return the label token if current token is a label, otherwise
5456 return zero */
5457 static int is_label(void)
5459 int last_tok;
5461 /* fast test first */
5462 if (tok < TOK_UIDENT)
5463 return 0;
5464 /* no need to save tokc because tok is an identifier */
5465 last_tok = tok;
5466 next();
5467 if (tok == ':') {
5468 next();
5469 return last_tok;
5470 } else {
5471 unget_tok(last_tok);
5472 return 0;
5476 static void label_or_decl(int l)
5478 int last_tok;
5480 /* fast test first */
5481 if (tok >= TOK_UIDENT)
5483 /* no need to save tokc because tok is an identifier */
5484 last_tok = tok;
5485 next();
5486 if (tok == ':') {
5487 unget_tok(last_tok);
5488 return;
5490 unget_tok(last_tok);
5492 decl(l);
5495 static int case_cmp(const void *pa, const void *pb)
5497 int64_t a = (*(struct case_t**) pa)->v1;
5498 int64_t b = (*(struct case_t**) pb)->v1;
5499 return a < b ? -1 : a > b;
5502 static void gcase(struct case_t **base, int len, int *bsym)
5504 struct case_t *p;
5505 int e;
5506 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5507 gv(RC_INT);
5508 while (len > 4) {
5509 /* binary search */
5510 p = base[len/2];
5511 vdup();
5512 if (ll)
5513 vpushll(p->v2);
5514 else
5515 vpushi(p->v2);
5516 gen_op(TOK_LE);
5517 e = gtst(1, 0);
5518 vdup();
5519 if (ll)
5520 vpushll(p->v1);
5521 else
5522 vpushi(p->v1);
5523 gen_op(TOK_GE);
5524 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5525 /* x < v1 */
5526 gcase(base, len/2, bsym);
5527 if (cur_switch->def_sym)
5528 gjmp_addr(cur_switch->def_sym);
5529 else
5530 *bsym = gjmp(*bsym);
5531 /* x > v2 */
5532 gsym(e);
5533 e = len/2 + 1;
5534 base += e; len -= e;
5536 /* linear scan */
5537 while (len--) {
5538 p = *base++;
5539 vdup();
5540 if (ll)
5541 vpushll(p->v2);
5542 else
5543 vpushi(p->v2);
5544 if (p->v1 == p->v2) {
5545 gen_op(TOK_EQ);
5546 gtst_addr(0, p->sym);
5547 } else {
5548 gen_op(TOK_LE);
5549 e = gtst(1, 0);
5550 vdup();
5551 if (ll)
5552 vpushll(p->v1);
5553 else
5554 vpushi(p->v1);
5555 gen_op(TOK_GE);
5556 gtst_addr(0, p->sym);
5557 gsym(e);
5562 static void block(int *bsym, int *csym, int is_expr)
5564 int a, b, c, d, cond;
5565 Sym *s;
5567 /* generate line number info */
5568 if (tcc_state->do_debug &&
5569 (last_line_num != file->line_num || last_ind != ind)) {
5570 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5571 last_ind = ind;
5572 last_line_num = file->line_num;
5575 if (is_expr) {
5576 /* default return value is (void) */
5577 vpushi(0);
5578 vtop->type.t = VT_VOID;
5581 if (tok == TOK_IF) {
5582 /* if test */
5583 int saved_nocode_wanted = nocode_wanted;
5584 next();
5585 skip('(');
5586 gexpr();
5587 skip(')');
5588 cond = condition_3way();
5589 if (cond == 0)
5590 nocode_wanted |= 2;
5591 a = gvtst(1, 0);
5592 block(bsym, csym, 0);
5593 if (cond != 1)
5594 nocode_wanted = saved_nocode_wanted;
5595 c = tok;
5596 if (c == TOK_ELSE) {
5597 next();
5598 if (cond == 1)
5599 nocode_wanted |= 2;
5600 d = gjmp(0);
5601 gsym(a);
5602 block(bsym, csym, 0);
5603 gsym(d); /* patch else jmp */
5604 if (cond != 0)
5605 nocode_wanted = saved_nocode_wanted;
5606 } else
5607 gsym(a);
5608 } else if (tok == TOK_WHILE) {
5609 int saved_nocode_wanted;
5610 nocode_wanted &= ~2;
5611 next();
5612 d = ind;
5613 vla_sp_restore();
5614 skip('(');
5615 gexpr();
5616 skip(')');
5617 a = gvtst(1, 0);
5618 b = 0;
5619 ++local_scope;
5620 saved_nocode_wanted = nocode_wanted;
5621 block(&a, &b, 0);
5622 nocode_wanted = saved_nocode_wanted;
5623 --local_scope;
5624 if(!nocode_wanted)
5625 gjmp_addr(d);
5626 gsym(a);
5627 gsym_addr(b, d);
5628 } else if (tok == '{') {
5629 Sym *llabel;
5630 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5632 next();
5633 /* record local declaration stack position */
5634 s = local_stack;
5635 llabel = local_label_stack;
5636 ++local_scope;
5638 /* handle local labels declarations */
5639 if (tok == TOK_LABEL) {
5640 next();
5641 for(;;) {
5642 if (tok < TOK_UIDENT)
5643 expect("label identifier");
5644 label_push(&local_label_stack, tok, LABEL_DECLARED);
5645 next();
5646 if (tok == ',') {
5647 next();
5648 } else {
5649 skip(';');
5650 break;
5654 while (tok != '}') {
5655 label_or_decl(VT_LOCAL);
5656 if (tok != '}') {
5657 if (is_expr)
5658 vpop();
5659 block(bsym, csym, is_expr);
5662 /* pop locally defined labels */
5663 label_pop(&local_label_stack, llabel);
5664 /* pop locally defined symbols */
5665 --local_scope;
5666 /* In the is_expr case (a statement expression is finished here),
5667 vtop might refer to symbols on the local_stack. Either via the
5668 type or via vtop->sym. We can't pop those nor any that in turn
5669 might be referred to. To make it easier we don't roll back
5670 any symbols in that case; some upper level call to block() will
5671 do that. We do have to remove such symbols from the lookup
5672 tables, though. sym_pop will do that. */
5673 sym_pop(&local_stack, s, is_expr);
5675 /* Pop VLA frames and restore stack pointer if required */
5676 if (vlas_in_scope > saved_vlas_in_scope) {
5677 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5678 vla_sp_restore();
5680 vlas_in_scope = saved_vlas_in_scope;
5682 next();
5683 } else if (tok == TOK_RETURN) {
5684 next();
5685 if (tok != ';') {
5686 gexpr();
5687 gen_assign_cast(&func_vt);
5688 #ifdef TCC_TARGET_ARM64
5689 // Perhaps it would be better to use this for all backends:
5690 greturn();
5691 #else
5692 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5693 CType type, ret_type;
5694 int ret_align, ret_nregs, regsize;
5695 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5696 &ret_align, &regsize);
5697 if (0 == ret_nregs) {
5698 /* if returning structure, must copy it to implicit
5699 first pointer arg location */
5700 type = func_vt;
5701 mk_pointer(&type);
5702 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5703 indir();
5704 vswap();
5705 /* copy structure value to pointer */
5706 vstore();
5707 } else {
5708 /* returning structure packed into registers */
5709 int r, size, addr, align;
5710 size = type_size(&func_vt,&align);
5711 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5712 (vtop->c.i & (ret_align-1)))
5713 && (align & (ret_align-1))) {
5714 loc = (loc - size) & -ret_align;
5715 addr = loc;
5716 type = func_vt;
5717 vset(&type, VT_LOCAL | VT_LVAL, addr);
5718 vswap();
5719 vstore();
5720 vpop();
5721 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5723 vtop->type = ret_type;
5724 if (is_float(ret_type.t))
5725 r = rc_fret(ret_type.t);
5726 else
5727 r = RC_IRET;
5729 if (ret_nregs == 1)
5730 gv(r);
5731 else {
5732 for (;;) {
5733 vdup();
5734 gv(r);
5735 vpop();
5736 if (--ret_nregs == 0)
5737 break;
5738 /* We assume that when a structure is returned in multiple
5739 registers, their classes are consecutive values of the
5740 suite s(n) = 2^n */
5741 r <<= 1;
5742 vtop->c.i += regsize;
5746 } else if (is_float(func_vt.t)) {
5747 gv(rc_fret(func_vt.t));
5748 } else {
5749 gv(RC_IRET);
5751 #endif
5752 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5754 skip(';');
5755 /* jump unless last stmt in top-level block */
5756 if (tok != '}' || local_scope != 1)
5757 rsym = gjmp(rsym);
5758 nocode_wanted |= 2;
5759 } else if (tok == TOK_BREAK) {
5760 /* compute jump */
5761 if (!bsym)
5762 tcc_error("cannot break");
5763 *bsym = gjmp(*bsym);
5764 next();
5765 skip(';');
5766 nocode_wanted |= 2;
5767 } else if (tok == TOK_CONTINUE) {
5768 /* compute jump */
5769 if (!csym)
5770 tcc_error("cannot continue");
5771 vla_sp_restore_root();
5772 *csym = gjmp(*csym);
5773 next();
5774 skip(';');
5775 } else if (tok == TOK_FOR) {
5776 int e;
5777 int saved_nocode_wanted;
5778 nocode_wanted &= ~2;
5779 next();
5780 skip('(');
5781 s = local_stack;
5782 ++local_scope;
5783 if (tok != ';') {
5784 /* c99 for-loop init decl? */
5785 if (!decl0(VT_LOCAL, 1)) {
5786 /* no, regular for-loop init expr */
5787 gexpr();
5788 vpop();
5791 skip(';');
5792 d = ind;
5793 c = ind;
5794 vla_sp_restore();
5795 a = 0;
5796 b = 0;
5797 if (tok != ';') {
5798 gexpr();
5799 a = gvtst(1, 0);
5801 skip(';');
5802 if (tok != ')') {
5803 e = gjmp(0);
5804 c = ind;
5805 vla_sp_restore();
5806 gexpr();
5807 vpop();
5808 gjmp_addr(d);
5809 gsym(e);
5811 skip(')');
5812 saved_nocode_wanted = nocode_wanted;
5813 block(&a, &b, 0);
5814 nocode_wanted = saved_nocode_wanted;
5815 if(!nocode_wanted)
5816 gjmp_addr(c);
5817 gsym(a);
5818 gsym_addr(b, c);
5819 --local_scope;
5820 sym_pop(&local_stack, s, 0);
5822 } else
5823 if (tok == TOK_DO) {
5824 int saved_nocode_wanted;
5825 nocode_wanted &= ~2;
5826 next();
5827 a = 0;
5828 b = 0;
5829 d = ind;
5830 vla_sp_restore();
5831 saved_nocode_wanted = nocode_wanted;
5832 block(&a, &b, 0);
5833 nocode_wanted = saved_nocode_wanted;
5834 skip(TOK_WHILE);
5835 skip('(');
5836 gsym(b);
5837 gexpr();
5838 c = gvtst(0, 0);
5839 if (!nocode_wanted)
5840 gsym_addr(c, d);
5841 skip(')');
5842 gsym(a);
5843 skip(';');
5844 } else
5845 if (tok == TOK_SWITCH) {
5846 struct switch_t *saved, sw;
5847 int saved_nocode_wanted = nocode_wanted;
5848 SValue switchval;
5849 next();
5850 skip('(');
5851 gexpr();
5852 skip(')');
5853 switchval = *vtop--;
5854 a = 0;
5855 b = gjmp(0); /* jump to first case */
5856 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5857 saved = cur_switch;
5858 cur_switch = &sw;
5859 block(&a, csym, 0);
5860 nocode_wanted = saved_nocode_wanted;
5861 a = gjmp(a); /* add implicit break */
5862 /* case lookup */
5863 gsym(b);
5864 if (!nocode_wanted) {
5865 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5866 for (b = 1; b < sw.n; b++)
5867 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5868 tcc_error("duplicate case value");
5869 /* Our switch table sorting is signed, so the compared
5870 value needs to be as well when it's 64bit. */
5871 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5872 switchval.type.t &= ~VT_UNSIGNED;
5873 vpushv(&switchval);
5874 gcase(sw.p, sw.n, &a);
5875 vpop();
5876 if (sw.def_sym)
5877 gjmp_addr(sw.def_sym);
5879 dynarray_reset(&sw.p, &sw.n);
5880 cur_switch = saved;
5881 /* break label */
5882 gsym(a);
5883 } else
5884 if (tok == TOK_CASE) {
5885 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5886 if (!cur_switch)
5887 expect("switch");
5888 nocode_wanted &= ~2;
5889 next();
5890 cr->v1 = cr->v2 = expr_const64();
5891 if (gnu_ext && tok == TOK_DOTS) {
5892 next();
5893 cr->v2 = expr_const64();
5894 if (cr->v2 < cr->v1)
5895 tcc_warning("empty case range");
5897 cr->sym = ind;
5898 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5899 skip(':');
5900 is_expr = 0;
5901 goto block_after_label;
5902 } else
5903 if (tok == TOK_DEFAULT) {
5904 next();
5905 skip(':');
5906 if (!cur_switch)
5907 expect("switch");
5908 if (cur_switch->def_sym)
5909 tcc_error("too many 'default'");
5910 cur_switch->def_sym = ind;
5911 is_expr = 0;
5912 goto block_after_label;
5913 } else
5914 if (tok == TOK_GOTO) {
5915 next();
5916 if (tok == '*' && gnu_ext) {
5917 /* computed goto */
5918 next();
5919 gexpr();
5920 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5921 expect("pointer");
5922 if (!nocode_wanted)
5923 ggoto();
5924 else
5925 vtop--;
5926 } else if (tok >= TOK_UIDENT) {
5927 s = label_find(tok);
5928 /* put forward definition if needed */
5929 if (!s) {
5930 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5931 } else {
5932 if (s->r == LABEL_DECLARED)
5933 s->r = LABEL_FORWARD;
5935 vla_sp_restore_root();
5936 if (nocode_wanted)
5938 else if (s->r & LABEL_FORWARD)
5939 s->jnext = gjmp(s->jnext);
5940 else
5941 gjmp_addr(s->jnext);
5942 next();
5943 } else {
5944 expect("label identifier");
5946 skip(';');
5947 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5948 asm_instr();
5949 } else {
5950 b = is_label();
5951 if (b) {
5952 /* label case */
5953 s = label_find(b);
5954 if (s) {
5955 if (s->r == LABEL_DEFINED)
5956 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5957 gsym(s->jnext);
5958 s->r = LABEL_DEFINED;
5959 } else {
5960 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5962 s->jnext = ind;
5963 vla_sp_restore();
5964 /* we accept this, but it is a mistake */
5965 block_after_label:
5966 nocode_wanted &= ~2;
5967 if (tok == '}') {
5968 tcc_warning("deprecated use of label at end of compound statement");
5969 } else {
5970 if (is_expr)
5971 vpop();
5972 block(bsym, csym, is_expr);
5974 } else {
5975 /* expression case */
5976 if (tok != ';') {
5977 if (is_expr) {
5978 vpop();
5979 gexpr();
5980 } else {
5981 gexpr();
5982 vpop();
5985 skip(';');
5990 #define EXPR_CONST 1
5991 #define EXPR_ANY 2
5993 static void parse_init_elem(int expr_type)
5995 int saved_global_expr;
5996 switch(expr_type) {
5997 case EXPR_CONST:
5998 /* compound literals must be allocated globally in this case */
5999 saved_global_expr = global_expr;
6000 global_expr = 1;
6001 expr_const1();
6002 global_expr = saved_global_expr;
6003 /* NOTE: symbols are accepted */
6004 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
6005 tcc_error("initializer element is not constant");
6006 break;
6007 case EXPR_ANY:
6008 expr_eq();
6009 break;
6013 /* t is the array or struct type. c is the array or struct
6014 address. cur_field is the pointer to the current
6015 value, for arrays the 'c' member contains the current start
6016 index and the 'r' contains the end index (in case of range init).
6017 'size_only' is true if only size info is needed (only used
6018 in arrays) */
6019 static void decl_designator(CType *type, Section *sec, unsigned long c,
6020 Sym **cur_field, int size_only)
6022 Sym *s, *f;
6023 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6024 CType type1;
6026 notfirst = 0;
6027 elem_size = 0;
6028 nb_elems = 1;
6029 if (gnu_ext && (l = is_label()) != 0)
6030 goto struct_field;
6031 while (tok == '[' || tok == '.') {
6032 if (tok == '[') {
6033 if (!(type->t & VT_ARRAY))
6034 expect("array type");
6035 s = type->ref;
6036 next();
6037 index = expr_const();
6038 if (index < 0 || (s->c >= 0 && index >= s->c))
6039 tcc_error("invalid index");
6040 if (tok == TOK_DOTS && gnu_ext) {
6041 next();
6042 index_last = expr_const();
6043 if (index_last < 0 ||
6044 (s->c >= 0 && index_last >= s->c) ||
6045 index_last < index)
6046 tcc_error("invalid index");
6047 } else {
6048 index_last = index;
6050 skip(']');
6051 if (!notfirst) {
6052 (*cur_field)->c = index;
6053 (*cur_field)->r = index_last;
6055 type = pointed_type(type);
6056 elem_size = type_size(type, &align);
6057 c += index * elem_size;
6058 /* NOTE: we only support ranges for last designator */
6059 nb_elems = index_last - index + 1;
6060 if (nb_elems != 1) {
6061 notfirst = 1;
6062 break;
6064 } else {
6065 next();
6066 l = tok;
6067 next();
6068 struct_field:
6069 if ((type->t & VT_BTYPE) != VT_STRUCT)
6070 expect("struct/union type");
6071 f = find_field(type, l);
6072 if (!f)
6073 expect("field");
6074 if (!notfirst)
6075 *cur_field = f;
6076 /* XXX: fix this mess by using explicit storage field */
6077 type1 = f->type;
6078 type1.t |= (type->t & ~VT_TYPE);
6079 type = &type1;
6080 c += f->c;
6082 notfirst = 1;
6084 if (notfirst) {
6085 if (tok == '=') {
6086 next();
6087 } else {
6088 if (!gnu_ext)
6089 expect("=");
6091 } else {
6092 if (type->t & VT_ARRAY) {
6093 index = (*cur_field)->c;
6094 if (type->ref->c >= 0 && index >= type->ref->c)
6095 tcc_error("index too large");
6096 type = pointed_type(type);
6097 c += index * type_size(type, &align);
6098 } else {
6099 f = *cur_field;
6100 if (!f)
6101 tcc_error("too many field init");
6102 /* XXX: fix this mess by using explicit storage field */
6103 type1 = f->type;
6104 type1.t |= (type->t & ~VT_TYPE);
6105 type = &type1;
6106 c += f->c;
6109 decl_initializer(type, sec, c, 0, size_only);
6111 /* XXX: make it more general */
6112 if (!size_only && nb_elems > 1) {
6113 unsigned long c_end;
6114 uint8_t *src, *dst;
6115 int i;
6117 if (!sec) {
6118 vset(type, VT_LOCAL|VT_LVAL, c);
6119 for (i = 1; i < nb_elems; i++) {
6120 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6121 vswap();
6122 vstore();
6124 vpop();
6125 } else {
6126 c_end = c + nb_elems * elem_size;
6127 if (c_end > sec->data_allocated)
6128 section_realloc(sec, c_end);
6129 src = sec->data + c;
6130 dst = src;
6131 for(i = 1; i < nb_elems; i++) {
6132 dst += elem_size;
6133 memcpy(dst, src, elem_size);
6139 /* store a value or an expression directly in global data or in local array */
6140 static void init_putv(CType *type, Section *sec, unsigned long c)
6142 int bt, bit_pos, bit_size;
6143 void *ptr;
6144 unsigned long long bit_mask;
6145 CType dtype;
6147 dtype = *type;
6148 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6150 if (sec) {
6151 int size, align;
6152 /* XXX: not portable */
6153 /* XXX: generate error if incorrect relocation */
6154 gen_assign_cast(&dtype);
6155 bt = type->t & VT_BTYPE;
6156 size = type_size(type, &align);
6157 if (c + size > sec->data_allocated) {
6158 section_realloc(sec, c + size);
6160 ptr = sec->data + c;
6161 /* XXX: make code faster ? */
6162 if (!(type->t & VT_BITFIELD)) {
6163 bit_pos = 0;
6164 bit_size = PTR_SIZE * 8;
6165 bit_mask = -1LL;
6166 } else {
6167 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6168 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6169 bit_mask = (1LL << bit_size) - 1;
6171 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6172 vtop->sym->v >= SYM_FIRST_ANOM &&
6173 /* XXX This rejects compount literals like
6174 '(void *){ptr}'. The problem is that '&sym' is
6175 represented the same way, which would be ruled out
6176 by the SYM_FIRST_ANOM check above, but also '"string"'
6177 in 'char *p = "string"' is represented the same
6178 with the type being VT_PTR and the symbol being an
6179 anonymous one. That is, there's no difference in vtop
6180 between '(void *){x}' and '&(void *){x}'. Ignore
6181 pointer typed entities here. Hopefully no real code
6182 will every use compound literals with scalar type. */
6183 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6184 /* These come from compound literals, memcpy stuff over. */
6185 Section *ssec;
6186 ElfW(Sym) *esym;
6187 ElfW_Rel *rel;
6188 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6189 ssec = tcc_state->sections[esym->st_shndx];
6190 memmove (ptr, ssec->data + esym->st_value, size);
6191 if (ssec->reloc) {
6192 /* We need to copy over all memory contents, and that
6193 includes relocations. Use the fact that relocs are
6194 created it order, so look from the end of relocs
6195 until we hit one before the copied region. */
6196 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6197 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6198 while (num_relocs--) {
6199 rel--;
6200 if (rel->r_offset >= esym->st_value + size)
6201 continue;
6202 if (rel->r_offset < esym->st_value)
6203 break;
6204 /* Note: if the same fields are initialized multiple
6205 times (possible with designators) then we possibly
6206 add multiple relocations for the same offset here.
6207 That would lead to wrong code, the last reloc needs
6208 to win. We clean this up later after the whole
6209 initializer is parsed. */
6210 put_elf_reloca(symtab_section, sec,
6211 c + rel->r_offset - esym->st_value,
6212 ELFW(R_TYPE)(rel->r_info),
6213 ELFW(R_SYM)(rel->r_info),
6214 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6215 rel->r_addend
6216 #else
6218 #endif
6222 } else {
6223 if ((vtop->r & VT_SYM) &&
6224 (bt == VT_BYTE ||
6225 bt == VT_SHORT ||
6226 bt == VT_DOUBLE ||
6227 bt == VT_LDOUBLE ||
6228 #if PTR_SIZE == 8
6229 (bt == VT_LLONG && bit_size != 64) ||
6230 bt == VT_INT
6231 #else
6232 bt == VT_LLONG ||
6233 (bt == VT_INT && bit_size != 32)
6234 #endif
6236 tcc_error("initializer element is not computable at load time");
6237 switch(bt) {
6238 /* XXX: when cross-compiling we assume that each type has the
6239 same representation on host and target, which is likely to
6240 be wrong in the case of long double */
6241 case VT_BOOL:
6242 vtop->c.i = (vtop->c.i != 0);
6243 case VT_BYTE:
6244 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6245 break;
6246 case VT_SHORT:
6247 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6248 break;
6249 case VT_DOUBLE:
6250 *(double *)ptr = vtop->c.d;
6251 break;
6252 case VT_LDOUBLE:
6253 if (sizeof(long double) == LDOUBLE_SIZE)
6254 *(long double *)ptr = vtop->c.ld;
6255 else if (sizeof(double) == LDOUBLE_SIZE)
6256 *(double *)ptr = vtop->c.ld;
6257 else
6258 tcc_error("can't cross compile long double constants");
6259 break;
6260 #if PTR_SIZE != 8
6261 case VT_LLONG:
6262 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6263 break;
6264 #else
6265 case VT_LLONG:
6266 #endif
6267 case VT_PTR:
6269 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6270 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6271 if (vtop->r & VT_SYM)
6272 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6273 else
6274 *(addr_t *)ptr |= val;
6275 #else
6276 if (vtop->r & VT_SYM)
6277 greloc(sec, vtop->sym, c, R_DATA_PTR);
6278 *(addr_t *)ptr |= val;
6279 #endif
6280 break;
6282 default:
6284 int val = (vtop->c.i & bit_mask) << bit_pos;
6285 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6286 if (vtop->r & VT_SYM)
6287 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6288 else
6289 *(int *)ptr |= val;
6290 #else
6291 if (vtop->r & VT_SYM)
6292 greloc(sec, vtop->sym, c, R_DATA_PTR);
6293 *(int *)ptr |= val;
6294 #endif
6295 break;
6299 vtop--;
6300 } else {
6301 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6302 vswap();
6303 vstore();
6304 vpop();
6308 /* put zeros for variable based init */
6309 static void init_putz(Section *sec, unsigned long c, int size)
6311 if (sec) {
6312 /* nothing to do because globals are already set to zero */
6313 } else {
6314 vpush_global_sym(&func_old_type, TOK_memset);
6315 vseti(VT_LOCAL, c);
6316 #ifdef TCC_TARGET_ARM
6317 vpushs(size);
6318 vpushi(0);
6319 #else
6320 vpushi(0);
6321 vpushs(size);
6322 #endif
6323 gfunc_call(3);
6327 /* 't' contains the type and storage info. 'c' is the offset of the
6328 object in section 'sec'. If 'sec' is NULL, it means stack based
6329 allocation. 'first' is true if array '{' must be read (multi
6330 dimension implicit array init handling). 'size_only' is true if
6331 size only evaluation is wanted (only for arrays). */
6332 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6333 int first, int size_only)
6335 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6336 int size1, align1;
6337 int have_elem;
6338 Sym *s, *f;
6339 Sym indexsym;
6340 CType *t1;
6342 /* If we currently are at an '}' or ',' we have read an initializer
6343 element in one of our callers, and not yet consumed it. */
6344 have_elem = tok == '}' || tok == ',';
6345 if (!have_elem && tok != '{' &&
6346 /* In case of strings we have special handling for arrays, so
6347 don't consume them as initializer value (which would commit them
6348 to some anonymous symbol). */
6349 tok != TOK_LSTR && tok != TOK_STR &&
6350 !size_only) {
6351 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6352 have_elem = 1;
6355 if (have_elem &&
6356 !(type->t & VT_ARRAY) &&
6357 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6358 The source type might have VT_CONSTANT set, which is
6359 of course assignable to non-const elements. */
6360 is_compatible_parameter_types(type, &vtop->type)) {
6361 init_putv(type, sec, c);
6362 } else if (type->t & VT_ARRAY) {
6363 s = type->ref;
6364 n = s->c;
6365 array_length = 0;
6366 t1 = pointed_type(type);
6367 size1 = type_size(t1, &align1);
6369 no_oblock = 1;
6370 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6371 tok == '{') {
6372 if (tok != '{')
6373 tcc_error("character array initializer must be a literal,"
6374 " optionally enclosed in braces");
6375 skip('{');
6376 no_oblock = 0;
6379 /* only parse strings here if correct type (otherwise: handle
6380 them as ((w)char *) expressions */
6381 if ((tok == TOK_LSTR &&
6382 #ifdef TCC_TARGET_PE
6383 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6384 #else
6385 (t1->t & VT_BTYPE) == VT_INT
6386 #endif
6387 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6388 while (tok == TOK_STR || tok == TOK_LSTR) {
6389 int cstr_len, ch;
6391 /* compute maximum number of chars wanted */
6392 if (tok == TOK_STR)
6393 cstr_len = tokc.str.size;
6394 else
6395 cstr_len = tokc.str.size / sizeof(nwchar_t);
6396 cstr_len--;
6397 nb = cstr_len;
6398 if (n >= 0 && nb > (n - array_length))
6399 nb = n - array_length;
6400 if (!size_only) {
6401 if (cstr_len > nb)
6402 tcc_warning("initializer-string for array is too long");
6403 /* in order to go faster for common case (char
6404 string in global variable, we handle it
6405 specifically */
6406 if (sec && tok == TOK_STR && size1 == 1) {
6407 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6408 } else {
6409 for(i=0;i<nb;i++) {
6410 if (tok == TOK_STR)
6411 ch = ((unsigned char *)tokc.str.data)[i];
6412 else
6413 ch = ((nwchar_t *)tokc.str.data)[i];
6414 vpushi(ch);
6415 init_putv(t1, sec, c + (array_length + i) * size1);
6419 array_length += nb;
6420 next();
6422 /* only add trailing zero if enough storage (no
6423 warning in this case since it is standard) */
6424 if (n < 0 || array_length < n) {
6425 if (!size_only) {
6426 vpushi(0);
6427 init_putv(t1, sec, c + (array_length * size1));
6429 array_length++;
6431 } else {
6432 indexsym.c = 0;
6433 indexsym.r = 0;
6434 f = &indexsym;
6436 do_init_list:
6437 while (tok != '}' || have_elem) {
6438 decl_designator(type, sec, c, &f, size_only);
6439 have_elem = 0;
6440 index = f->c;
6441 /* must put zero in holes (note that doing it that way
6442 ensures that it even works with designators) */
6443 if (!size_only && array_length < index) {
6444 init_putz(sec, c + array_length * size1,
6445 (index - array_length) * size1);
6447 if (type->t & VT_ARRAY) {
6448 index = indexsym.c = ++indexsym.r;
6449 } else {
6450 index = index + type_size(&f->type, &align1);
6451 if (s->type.t == TOK_UNION)
6452 f = NULL;
6453 else
6454 f = f->next;
6456 if (index > array_length)
6457 array_length = index;
6459 if (type->t & VT_ARRAY) {
6460 /* special test for multi dimensional arrays (may not
6461 be strictly correct if designators are used at the
6462 same time) */
6463 if (no_oblock && index >= n)
6464 break;
6465 } else {
6466 if (no_oblock && f == NULL)
6467 break;
6469 if (tok == '}')
6470 break;
6471 skip(',');
6474 /* put zeros at the end */
6475 if (!size_only && array_length < n) {
6476 init_putz(sec, c + array_length * size1,
6477 (n - array_length) * size1);
6479 if (!no_oblock)
6480 skip('}');
6481 /* patch type size if needed, which happens only for array types */
6482 if (n < 0)
6483 s->c = array_length;
6484 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6485 size1 = 1;
6486 no_oblock = 1;
6487 if (first || tok == '{') {
6488 skip('{');
6489 no_oblock = 0;
6491 s = type->ref;
6492 f = s->next;
6493 array_length = 0;
6494 n = s->c;
6495 goto do_init_list;
6496 } else if (tok == '{') {
6497 next();
6498 decl_initializer(type, sec, c, first, size_only);
6499 skip('}');
6500 } else if (size_only) {
6501 /* If we supported only ISO C we wouldn't have to accept calling
6502 this on anything than an array size_only==1 (and even then
6503 only on the outermost level, so no recursion would be needed),
6504 because initializing a flex array member isn't supported.
6505 But GNU C supports it, so we need to recurse even into
6506 subfields of structs and arrays when size_only is set. */
6507 /* just skip expression */
6508 parlevel = parlevel1 = 0;
6509 while ((parlevel > 0 || parlevel1 > 0 ||
6510 (tok != '}' && tok != ',')) && tok != -1) {
6511 if (tok == '(')
6512 parlevel++;
6513 else if (tok == ')') {
6514 if (parlevel == 0 && parlevel1 == 0)
6515 break;
6516 parlevel--;
6518 else if (tok == '{')
6519 parlevel1++;
6520 else if (tok == '}') {
6521 if (parlevel == 0 && parlevel1 == 0)
6522 break;
6523 parlevel1--;
6525 next();
6527 } else {
6528 if (!have_elem) {
6529 /* This should happen only when we haven't parsed
6530 the init element above for fear of committing a
6531 string constant to memory too early. */
6532 if (tok != TOK_STR && tok != TOK_LSTR)
6533 expect("string constant");
6534 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6536 init_putv(type, sec, c);
6540 /* parse an initializer for type 't' if 'has_init' is non zero, and
6541 allocate space in local or global data space ('r' is either
6542 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6543 variable 'v' of scope 'scope' is declared before initializers
6544 are parsed. If 'v' is zero, then a reference to the new object
6545 is put in the value stack. If 'has_init' is 2, a special parsing
6546 is done to handle string constants. */
6547 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6548 int has_init, int v, int scope)
6550 int size, align, addr, data_offset;
6551 int level;
6552 ParseState saved_parse_state = {0};
6553 TokenString *init_str = NULL;
6554 Section *sec;
6555 Sym *flexible_array;
6557 flexible_array = NULL;
6558 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6559 Sym *field = type->ref->next;
6560 if (field) {
6561 while (field->next)
6562 field = field->next;
6563 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6564 flexible_array = field;
6568 size = type_size(type, &align);
6569 /* If unknown size, we must evaluate it before
6570 evaluating initializers because
6571 initializers can generate global data too
6572 (e.g. string pointers or ISOC99 compound
6573 literals). It also simplifies local
6574 initializers handling */
6575 if (size < 0 || (flexible_array && has_init)) {
6576 if (!has_init)
6577 tcc_error("unknown type size");
6578 /* get all init string */
6579 init_str = tok_str_alloc();
6580 if (has_init == 2) {
6581 /* only get strings */
6582 while (tok == TOK_STR || tok == TOK_LSTR) {
6583 tok_str_add_tok(init_str);
6584 next();
6586 } else {
6587 level = 0;
6588 while (level > 0 || (tok != ',' && tok != ';')) {
6589 if (tok < 0)
6590 tcc_error("unexpected end of file in initializer");
6591 tok_str_add_tok(init_str);
6592 if (tok == '{')
6593 level++;
6594 else if (tok == '}') {
6595 level--;
6596 if (level <= 0) {
6597 next();
6598 break;
6601 next();
6604 tok_str_add(init_str, -1);
6605 tok_str_add(init_str, 0);
6607 /* compute size */
6608 save_parse_state(&saved_parse_state);
6610 begin_macro(init_str, 1);
6611 next();
6612 decl_initializer(type, NULL, 0, 1, 1);
6613 /* prepare second initializer parsing */
6614 macro_ptr = init_str->str;
6615 next();
6617 /* if still unknown size, error */
6618 size = type_size(type, &align);
6619 if (size < 0)
6620 tcc_error("unknown type size");
6622 /* If there's a flex member and it was used in the initializer
6623 adjust size. */
6624 if (flexible_array &&
6625 flexible_array->type.ref->c > 0)
6626 size += flexible_array->type.ref->c
6627 * pointed_size(&flexible_array->type);
6628 /* take into account specified alignment if bigger */
6629 if (ad->a.aligned) {
6630 if (ad->a.aligned > align)
6631 align = ad->a.aligned;
6632 } else if (ad->a.packed) {
6633 align = 1;
6635 if ((r & VT_VALMASK) == VT_LOCAL) {
6636 sec = NULL;
6637 #ifdef CONFIG_TCC_BCHECK
6638 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6639 loc--;
6641 #endif
6642 loc = (loc - size) & -align;
6643 addr = loc;
6644 #ifdef CONFIG_TCC_BCHECK
6645 /* handles bounds */
6646 /* XXX: currently, since we do only one pass, we cannot track
6647 '&' operators, so we add only arrays */
6648 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6649 addr_t *bounds_ptr;
6650 /* add padding between regions */
6651 loc--;
6652 /* then add local bound info */
6653 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6654 bounds_ptr[0] = addr;
6655 bounds_ptr[1] = size;
6657 #endif
6658 if (v) {
6659 /* local variable */
6660 #ifdef CONFIG_TCC_ASM
6661 if (ad->asm_label) {
6662 int reg = asm_parse_regvar(ad->asm_label);
6663 if (reg >= 0)
6664 r = (r & ~VT_VALMASK) | reg;
6666 #endif
6667 sym_push(v, type, r, addr);
6668 } else {
6669 /* push local reference */
6670 vset(type, r, addr);
6672 } else {
6673 Sym *sym;
6675 sym = NULL;
6676 if (v && scope == VT_CONST) {
6677 /* see if the symbol was already defined */
6678 sym = sym_find(v);
6679 if (sym) {
6680 if (!is_compatible_types(&sym->type, type))
6681 tcc_error("incompatible types for redefinition of '%s'",
6682 get_tok_str(v, NULL));
6683 if (sym->type.t & VT_EXTERN) {
6684 /* if the variable is extern, it was not allocated */
6685 sym->type.t &= ~VT_EXTERN;
6686 /* set array size if it was omitted in extern
6687 declaration */
6688 if ((sym->type.t & VT_ARRAY) &&
6689 sym->type.ref->c < 0 &&
6690 type->ref->c >= 0)
6691 sym->type.ref->c = type->ref->c;
6692 } else {
6693 /* we accept several definitions of the same
6694 global variable. this is tricky, because we
6695 must play with the SHN_COMMON type of the symbol */
6696 /* XXX: should check if the variable was already
6697 initialized. It is incorrect to initialized it
6698 twice */
6699 /* no init data, we won't add more to the symbol */
6700 if (!has_init)
6701 goto no_alloc;
6706 /* allocate symbol in corresponding section */
6707 sec = ad->section;
6708 if (!sec) {
6709 if (has_init)
6710 sec = data_section;
6711 else if (tcc_state->nocommon)
6712 sec = bss_section;
6714 if (sec) {
6715 data_offset = sec->data_offset;
6716 data_offset = (data_offset + align - 1) & -align;
6717 addr = data_offset;
6718 /* very important to increment global pointer at this time
6719 because initializers themselves can create new initializers */
6720 data_offset += size;
6721 #ifdef CONFIG_TCC_BCHECK
6722 /* add padding if bound check */
6723 if (tcc_state->do_bounds_check)
6724 data_offset++;
6725 #endif
6726 sec->data_offset = data_offset;
6727 /* allocate section space to put the data */
6728 if (sec->sh_type != SHT_NOBITS &&
6729 data_offset > sec->data_allocated)
6730 section_realloc(sec, data_offset);
6731 /* align section if needed */
6732 if (align > sec->sh_addralign)
6733 sec->sh_addralign = align;
6734 } else {
6735 addr = 0; /* avoid warning */
6738 if (v) {
6739 if (scope != VT_CONST || !sym) {
6740 sym = sym_push(v, type, r | VT_SYM, 0);
6741 sym->asm_label = ad->asm_label;
6743 /* update symbol definition */
6744 if (sec) {
6745 put_extern_sym(sym, sec, addr, size);
6746 } else {
6747 ElfW(Sym) *esym;
6748 /* put a common area */
6749 put_extern_sym(sym, NULL, align, size);
6750 /* XXX: find a nicer way */
6751 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6752 esym->st_shndx = SHN_COMMON;
6754 } else {
6755 /* push global reference */
6756 sym = get_sym_ref(type, sec, addr, size);
6757 vpushsym(type, sym);
6759 /* patch symbol weakness */
6760 if (type->t & VT_WEAK)
6761 weaken_symbol(sym);
6762 apply_visibility(sym, type);
6763 #ifdef CONFIG_TCC_BCHECK
6764 /* handles bounds now because the symbol must be defined
6765 before for the relocation */
6766 if (tcc_state->do_bounds_check) {
6767 addr_t *bounds_ptr;
6769 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6770 /* then add global bound info */
6771 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6772 bounds_ptr[0] = 0; /* relocated */
6773 bounds_ptr[1] = size;
6775 #endif
6777 if (type->t & VT_VLA) {
6778 int a;
6780 /* save current stack pointer */
6781 if (vlas_in_scope == 0) {
6782 if (vla_sp_root_loc == -1)
6783 vla_sp_root_loc = (loc -= PTR_SIZE);
6784 gen_vla_sp_save(vla_sp_root_loc);
6787 vla_runtime_type_size(type, &a);
6788 gen_vla_alloc(type, a);
6789 gen_vla_sp_save(addr);
6790 vla_sp_loc = addr;
6791 vlas_in_scope++;
6792 } else if (has_init) {
6793 size_t oldreloc_offset = 0;
6794 if (sec && sec->reloc)
6795 oldreloc_offset = sec->reloc->data_offset;
6796 decl_initializer(type, sec, addr, 1, 0);
6797 if (sec && sec->reloc)
6798 squeeze_multi_relocs(sec, oldreloc_offset);
6799 /* patch flexible array member size back to -1, */
6800 /* for possible subsequent similar declarations */
6801 if (flexible_array)
6802 flexible_array->type.ref->c = -1;
6804 no_alloc: ;
6805 /* restore parse state if needed */
6806 if (init_str) {
6807 end_macro();
6808 restore_parse_state(&saved_parse_state);
6812 static void put_func_debug(Sym *sym)
6814 char buf[512];
6816 /* stabs info */
6817 /* XXX: we put here a dummy type */
6818 snprintf(buf, sizeof(buf), "%s:%c1",
6819 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6820 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6821 cur_text_section, sym->c);
6822 /* //gr gdb wants a line at the function */
6823 put_stabn(N_SLINE, 0, file->line_num, 0);
6824 last_ind = 0;
6825 last_line_num = 0;
6828 /* parse an old style function declaration list */
6829 /* XXX: check multiple parameter */
6830 static void func_decl_list(Sym *func_sym)
6832 AttributeDef ad;
6833 int v;
6834 Sym *s;
6835 CType btype, type;
6837 /* parse each declaration */
6838 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6839 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6840 if (!parse_btype(&btype, &ad))
6841 expect("declaration list");
6842 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6843 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6844 tok == ';') {
6845 /* we accept no variable after */
6846 } else {
6847 for(;;) {
6848 type = btype;
6849 type_decl(&type, &ad, &v, TYPE_DIRECT);
6850 /* find parameter in function parameter list */
6851 s = func_sym->next;
6852 while (s != NULL) {
6853 if ((s->v & ~SYM_FIELD) == v)
6854 goto found;
6855 s = s->next;
6857 tcc_error("declaration for parameter '%s' but no such parameter",
6858 get_tok_str(v, NULL));
6859 found:
6860 /* check that no storage specifier except 'register' was given */
6861 if (type.t & VT_STORAGE)
6862 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6863 convert_parameter_type(&type);
6864 /* we can add the type (NOTE: it could be local to the function) */
6865 s->type = type;
6866 /* accept other parameters */
6867 if (tok == ',')
6868 next();
6869 else
6870 break;
6873 skip(';');
6877 /* parse a function defined by symbol 'sym' and generate its code in
6878 'cur_text_section' */
6879 static void gen_function(Sym *sym)
6881 int saved_nocode_wanted = nocode_wanted;
6883 nocode_wanted = 0;
6884 ind = cur_text_section->data_offset;
6885 /* NOTE: we patch the symbol size later */
6886 put_extern_sym(sym, cur_text_section, ind, 0);
6887 funcname = get_tok_str(sym->v, NULL);
6888 func_ind = ind;
6889 /* Initialize VLA state */
6890 vla_sp_loc = -1;
6891 vla_sp_root_loc = -1;
6892 /* put debug symbol */
6893 if (tcc_state->do_debug)
6894 put_func_debug(sym);
6896 /* push a dummy symbol to enable local sym storage */
6897 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6898 local_scope = 1; /* for function parameters */
6899 gfunc_prolog(&sym->type);
6900 local_scope = 0;
6902 rsym = 0;
6903 block(NULL, NULL, 0);
6904 gsym(rsym);
6905 gfunc_epilog();
6906 cur_text_section->data_offset = ind;
6907 label_pop(&global_label_stack, NULL);
6908 /* reset local stack */
6909 local_scope = 0;
6910 sym_pop(&local_stack, NULL, 0);
6911 /* end of function */
6912 /* patch symbol size */
6913 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6914 ind - func_ind;
6915 /* patch symbol weakness (this definition overrules any prototype) */
6916 if (sym->type.t & VT_WEAK)
6917 weaken_symbol(sym);
6918 apply_visibility(sym, &sym->type);
6919 if (tcc_state->do_debug) {
6920 put_stabn(N_FUN, 0, 0, ind - func_ind);
6922 /* It's better to crash than to generate wrong code */
6923 cur_text_section = NULL;
6924 funcname = ""; /* for safety */
6925 func_vt.t = VT_VOID; /* for safety */
6926 func_var = 0; /* for safety */
6927 ind = 0; /* for safety */
6928 nocode_wanted = saved_nocode_wanted;
6929 check_vstack();
6932 static void gen_inline_functions(TCCState *s)
6934 Sym *sym;
6935 int inline_generated, i, ln;
6936 struct InlineFunc *fn;
6938 ln = file->line_num;
6939 /* iterate while inline function are referenced */
6940 for(;;) {
6941 inline_generated = 0;
6942 for (i = 0; i < s->nb_inline_fns; ++i) {
6943 fn = s->inline_fns[i];
6944 sym = fn->sym;
6945 if (sym && sym->c) {
6946 /* the function was used: generate its code and
6947 convert it to a normal function */
6948 fn->sym = NULL;
6949 if (file)
6950 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6951 sym->r = VT_SYM | VT_CONST;
6952 sym->type.t &= ~VT_INLINE;
6954 begin_macro(fn->func_str, 1);
6955 next();
6956 cur_text_section = text_section;
6957 gen_function(sym);
6958 end_macro();
6960 inline_generated = 1;
6963 if (!inline_generated)
6964 break;
6966 file->line_num = ln;
6969 ST_FUNC void free_inline_functions(TCCState *s)
6971 int i;
6972 /* free tokens of unused inline functions */
6973 for (i = 0; i < s->nb_inline_fns; ++i) {
6974 struct InlineFunc *fn = s->inline_fns[i];
6975 if (fn->sym)
6976 tok_str_free(fn->func_str);
6978 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6981 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6982 static int decl0(int l, int is_for_loop_init)
6984 int v, has_init, r;
6985 CType type, btype;
6986 Sym *sym;
6987 AttributeDef ad;
6989 while (1) {
6990 if (!parse_btype(&btype, &ad)) {
6991 if (is_for_loop_init)
6992 return 0;
6993 /* skip redundant ';' */
6994 /* XXX: find more elegant solution */
6995 if (tok == ';') {
6996 next();
6997 continue;
6999 if (l == VT_CONST &&
7000 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7001 /* global asm block */
7002 asm_global_instr();
7003 continue;
7005 /* special test for old K&R protos without explicit int
7006 type. Only accepted when defining global data */
7007 if (l == VT_LOCAL || tok < TOK_UIDENT)
7008 break;
7009 btype.t = VT_INT;
7011 if (((btype.t & VT_BTYPE) == VT_ENUM ||
7012 (btype.t & VT_BTYPE) == VT_STRUCT) &&
7013 tok == ';') {
7014 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7015 int v = btype.ref->v;
7016 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7017 tcc_warning("unnamed struct/union that defines no instances");
7019 next();
7020 continue;
7022 while (1) { /* iterate thru each declaration */
7023 type = btype;
7024 /* If the base type itself was an array type of unspecified
7025 size (like in 'typedef int arr[]; arr x = {1};') then
7026 we will overwrite the unknown size by the real one for
7027 this decl. We need to unshare the ref symbol holding
7028 that size. */
7029 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7030 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7032 type_decl(&type, &ad, &v, TYPE_DIRECT);
7033 #if 0
7035 char buf[500];
7036 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
7037 printf("type = '%s'\n", buf);
7039 #endif
7040 if ((type.t & VT_BTYPE) == VT_FUNC) {
7041 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7042 tcc_error("function without file scope cannot be static");
7044 /* if old style function prototype, we accept a
7045 declaration list */
7046 sym = type.ref;
7047 if (sym->c == FUNC_OLD)
7048 func_decl_list(sym);
7051 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7052 ad.asm_label = asm_label_instr();
7053 /* parse one last attribute list, after asm label */
7054 parse_attribute(&ad);
7055 if (tok == '{')
7056 expect(";");
7059 if (ad.a.weak)
7060 type.t |= VT_WEAK;
7061 #ifdef TCC_TARGET_PE
7062 if (ad.a.func_import)
7063 type.t |= VT_IMPORT;
7064 if (ad.a.func_export)
7065 type.t |= VT_EXPORT;
7066 #endif
7067 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7069 if (tok == '{') {
7070 if (l == VT_LOCAL)
7071 tcc_error("cannot use local functions");
7072 if ((type.t & VT_BTYPE) != VT_FUNC)
7073 expect("function definition");
7075 /* reject abstract declarators in function definition */
7076 sym = type.ref;
7077 while ((sym = sym->next) != NULL)
7078 if (!(sym->v & ~SYM_FIELD))
7079 expect("identifier");
7081 /* XXX: cannot do better now: convert extern line to static inline */
7082 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7083 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7085 sym = sym_find(v);
7086 if (sym) {
7087 Sym *ref;
7088 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7089 goto func_error1;
7091 ref = sym->type.ref;
7092 if (0 == ref->a.func_proto)
7093 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7095 /* use func_call from prototype if not defined */
7096 if (ref->a.func_call != FUNC_CDECL
7097 && type.ref->a.func_call == FUNC_CDECL)
7098 type.ref->a.func_call = ref->a.func_call;
7100 /* use export from prototype */
7101 if (ref->a.func_export)
7102 type.ref->a.func_export = 1;
7104 /* use static from prototype */
7105 if (sym->type.t & VT_STATIC)
7106 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7108 /* If the definition has no visibility use the
7109 one from prototype. */
7110 if (! (type.t & VT_VIS_MASK))
7111 type.t |= sym->type.t & VT_VIS_MASK;
7113 if (!is_compatible_types(&sym->type, &type)) {
7114 func_error1:
7115 tcc_error("incompatible types for redefinition of '%s'",
7116 get_tok_str(v, NULL));
7118 type.ref->a.func_proto = 0;
7119 /* if symbol is already defined, then put complete type */
7120 sym->type = type;
7121 } else {
7122 /* put function symbol */
7123 sym = global_identifier_push(v, type.t, 0);
7124 sym->type.ref = type.ref;
7127 /* static inline functions are just recorded as a kind
7128 of macro. Their code will be emitted at the end of
7129 the compilation unit only if they are used */
7130 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7131 (VT_INLINE | VT_STATIC)) {
7132 int block_level;
7133 struct InlineFunc *fn;
7134 const char *filename;
7136 filename = file ? file->filename : "";
7137 fn = tcc_malloc(sizeof *fn + strlen(filename));
7138 strcpy(fn->filename, filename);
7139 fn->sym = sym;
7140 fn->func_str = tok_str_alloc();
7142 block_level = 0;
7143 for(;;) {
7144 int t;
7145 if (tok == TOK_EOF)
7146 tcc_error("unexpected end of file");
7147 tok_str_add_tok(fn->func_str);
7148 t = tok;
7149 next();
7150 if (t == '{') {
7151 block_level++;
7152 } else if (t == '}') {
7153 block_level--;
7154 if (block_level == 0)
7155 break;
7158 tok_str_add(fn->func_str, -1);
7159 tok_str_add(fn->func_str, 0);
7160 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7162 } else {
7163 /* compute text section */
7164 cur_text_section = ad.section;
7165 if (!cur_text_section)
7166 cur_text_section = text_section;
7167 sym->r = VT_SYM | VT_CONST;
7168 gen_function(sym);
7170 break;
7171 } else {
7172 if (btype.t & VT_TYPEDEF) {
7173 /* save typedefed type */
7174 /* XXX: test storage specifiers ? */
7175 sym = sym_find(v);
7176 if (sym && sym->scope == local_scope) {
7177 if (!is_compatible_types(&sym->type, &type)
7178 || !(sym->type.t & VT_TYPEDEF))
7179 tcc_error("incompatible redefinition of '%s'",
7180 get_tok_str(v, NULL));
7181 sym->type = type;
7182 } else {
7183 sym = sym_push(v, &type, 0, 0);
7185 sym->a = ad.a;
7186 sym->type.t |= VT_TYPEDEF;
7187 } else {
7188 r = 0;
7189 if ((type.t & VT_BTYPE) == VT_FUNC) {
7190 /* external function definition */
7191 /* specific case for func_call attribute */
7192 ad.a.func_proto = 1;
7193 type.ref->a = ad.a;
7194 } else if (!(type.t & VT_ARRAY)) {
7195 /* not lvalue if array */
7196 r |= lvalue_type(type.t);
7198 has_init = (tok == '=');
7199 if (has_init && (type.t & VT_VLA))
7200 tcc_error("variable length array cannot be initialized");
7201 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7202 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7203 !has_init && l == VT_CONST && type.ref->c < 0)) {
7204 /* external variable or function */
7205 /* NOTE: as GCC, uninitialized global static
7206 arrays of null size are considered as
7207 extern */
7208 sym = external_sym(v, &type, r);
7209 sym->asm_label = ad.asm_label;
7211 if (ad.alias_target) {
7212 Section tsec;
7213 ElfW(Sym) *esym;
7214 Sym *alias_target;
7216 alias_target = sym_find(ad.alias_target);
7217 if (!alias_target || !alias_target->c)
7218 tcc_error("unsupported forward __alias__ attribute");
7219 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7220 tsec.sh_num = esym->st_shndx;
7221 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7223 } else {
7224 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7225 if (type.t & VT_STATIC)
7226 r |= VT_CONST;
7227 else
7228 r |= l;
7229 if (has_init)
7230 next();
7231 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7234 if (tok != ',') {
7235 if (is_for_loop_init)
7236 return 1;
7237 skip(';');
7238 break;
7240 next();
7242 ad.a.aligned = 0;
7245 return 0;
7248 ST_FUNC void decl(int l)
7250 decl0(l, 0);
7253 /* ------------------------------------------------------------------------- */