struct-layout: Allow lowering of member alignment
[tinycc.git] / tccgen.c
bloba411e8f84df5119baff42d72de7c41b20343bda7
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
72 static void gen_cast(CType *type);
73 static inline CType *pointed_type(CType *type);
74 static int is_compatible_types(CType *type1, CType *type2);
75 static int parse_btype(CType *type, AttributeDef *ad);
76 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
77 static void parse_expr_type(CType *type);
78 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
79 static void block(int *bsym, int *csym, int is_expr);
80 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
81 static int decl0(int l, int is_for_loop_init);
82 static void expr_eq(void);
83 static void expr_lor_const(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 ST_FUNC void tccgen_start(TCCState *s1)
144 cur_text_section = NULL;
145 funcname = "";
146 anon_sym = SYM_FIRST_ANOM;
147 section_sym = 0;
148 nocode_wanted = 1;
150 /* define some often used types */
151 int_type.t = VT_INT;
152 char_pointer_type.t = VT_BYTE;
153 mk_pointer(&char_pointer_type);
154 #if PTR_SIZE == 4
155 size_type.t = VT_INT;
156 #else
157 size_type.t = VT_LLONG;
158 #endif
159 func_old_type.t = VT_FUNC;
160 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
162 if (s1->do_debug) {
163 char buf[512];
165 /* file info: full path + filename */
166 section_sym = put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
168 text_section->sh_num, NULL);
169 getcwd(buf, sizeof(buf));
170 #ifdef _WIN32
171 normalize_slashes(buf);
172 #endif
173 pstrcat(buf, sizeof(buf), "/");
174 put_stabs_r(buf, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
176 put_stabs_r(file->filename, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
179 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
180 symbols can be safely used */
181 put_elf_sym(symtab_section, 0, 0,
182 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
183 SHN_ABS, file->filename);
185 #ifdef TCC_TARGET_ARM
186 arm_init(s1);
187 #endif
190 ST_FUNC void tccgen_end(TCCState *s1)
192 gen_inline_functions(s1);
193 check_vstack();
194 /* end of translation unit info */
195 if (s1->do_debug) {
196 put_stabs_r(NULL, N_SO, 0, 0,
197 text_section->data_offset, text_section, section_sym);
201 /* ------------------------------------------------------------------------- */
202 /* update sym->c so that it points to an external symbol in section
203 'section' with value 'value' */
205 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
206 addr_t value, unsigned long size,
207 int can_add_underscore)
209 int sym_type, sym_bind, sh_num, info, other;
210 ElfW(Sym) *esym;
211 const char *name;
212 char buf1[256];
214 #ifdef CONFIG_TCC_BCHECK
215 char buf[32];
216 #endif
218 if (section == NULL)
219 sh_num = SHN_UNDEF;
220 else if (section == SECTION_ABS)
221 sh_num = SHN_ABS;
222 else
223 sh_num = section->sh_num;
225 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
226 sym_type = STT_FUNC;
227 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
228 sym_type = STT_NOTYPE;
229 } else {
230 sym_type = STT_OBJECT;
233 if (sym->type.t & VT_STATIC)
234 sym_bind = STB_LOCAL;
235 else {
236 if (sym->type.t & VT_WEAK)
237 sym_bind = STB_WEAK;
238 else
239 sym_bind = STB_GLOBAL;
242 if (!sym->c) {
243 name = get_tok_str(sym->v, NULL);
244 #ifdef CONFIG_TCC_BCHECK
245 if (tcc_state->do_bounds_check) {
246 /* XXX: avoid doing that for statics ? */
247 /* if bound checking is activated, we change some function
248 names by adding the "__bound" prefix */
249 switch(sym->v) {
250 #ifdef TCC_TARGET_PE
251 /* XXX: we rely only on malloc hooks */
252 case TOK_malloc:
253 case TOK_free:
254 case TOK_realloc:
255 case TOK_memalign:
256 case TOK_calloc:
257 #endif
258 case TOK_memcpy:
259 case TOK_memmove:
260 case TOK_memset:
261 case TOK_strlen:
262 case TOK_strcpy:
263 case TOK_alloca:
264 strcpy(buf, "__bound_");
265 strcat(buf, name);
266 name = buf;
267 break;
270 #endif
271 other = 0;
273 #ifdef TCC_TARGET_PE
274 if (sym->type.t & VT_EXPORT)
275 other |= ST_PE_EXPORT;
276 if (sym_type == STT_FUNC && sym->type.ref) {
277 Sym *ref = sym->type.ref;
278 if (ref->a.func_export)
279 other |= ST_PE_EXPORT;
280 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
281 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
282 name = buf1;
283 other |= ST_PE_STDCALL;
284 can_add_underscore = 0;
286 } else {
287 if (find_elf_sym(tcc_state->dynsymtab_section, name))
288 other |= ST_PE_IMPORT;
289 if (sym->type.t & VT_IMPORT)
290 other |= ST_PE_IMPORT;
292 #else
293 if (! (sym->type.t & VT_STATIC))
294 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
295 #endif
296 if (tcc_state->leading_underscore && can_add_underscore) {
297 buf1[0] = '_';
298 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
299 name = buf1;
301 if (sym->asm_label) {
302 name = get_tok_str(sym->asm_label, NULL);
304 info = ELFW(ST_INFO)(sym_bind, sym_type);
305 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
306 } else {
307 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
308 esym->st_value = value;
309 esym->st_size = size;
310 esym->st_shndx = sh_num;
314 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
315 addr_t value, unsigned long size)
317 put_extern_sym2(sym, section, value, size, 1);
320 /* add a new relocation entry to symbol 'sym' in section 's' */
321 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
322 addr_t addend)
324 int c = 0;
325 if (sym) {
326 if (0 == sym->c)
327 put_extern_sym(sym, NULL, 0, 0);
328 c = sym->c;
330 /* now we can add ELF relocation info */
331 put_elf_reloca(symtab_section, s, offset, type, c, addend);
334 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
336 greloca(s, sym, offset, type, 0);
339 /* ------------------------------------------------------------------------- */
340 /* symbol allocator */
341 static Sym *__sym_malloc(void)
343 Sym *sym_pool, *sym, *last_sym;
344 int i;
346 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
347 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
349 last_sym = sym_free_first;
350 sym = sym_pool;
351 for(i = 0; i < SYM_POOL_NB; i++) {
352 sym->next = last_sym;
353 last_sym = sym;
354 sym++;
356 sym_free_first = last_sym;
357 return last_sym;
360 static inline Sym *sym_malloc(void)
362 Sym *sym;
363 #ifndef SYM_DEBUG
364 sym = sym_free_first;
365 if (!sym)
366 sym = __sym_malloc();
367 sym_free_first = sym->next;
368 return sym;
369 #else
370 sym = tcc_malloc(sizeof(Sym));
371 return sym;
372 #endif
375 ST_INLN void sym_free(Sym *sym)
377 #ifndef SYM_DEBUG
378 sym->next = sym_free_first;
379 sym_free_first = sym;
380 #else
381 tcc_free(sym);
382 #endif
385 /* push, without hashing */
386 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
388 Sym *s;
390 s = sym_malloc();
391 s->scope = 0;
392 s->v = v;
393 s->type.t = t;
394 s->type.ref = NULL;
395 #ifdef _WIN64
396 s->d = NULL;
397 #endif
398 s->c = c;
399 s->next = NULL;
400 /* add in stack */
401 s->prev = *ps;
402 *ps = s;
403 return s;
406 /* find a symbol and return its associated structure. 's' is the top
407 of the symbol stack */
408 ST_FUNC Sym *sym_find2(Sym *s, int v)
410 while (s) {
411 if (s->v == v)
412 return s;
413 else if (s->v == -1)
414 return NULL;
415 s = s->prev;
417 return NULL;
420 /* structure lookup */
421 ST_INLN Sym *struct_find(int v)
423 v -= TOK_IDENT;
424 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
425 return NULL;
426 return table_ident[v]->sym_struct;
429 /* find an identifier */
430 ST_INLN Sym *sym_find(int v)
432 v -= TOK_IDENT;
433 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
434 return NULL;
435 return table_ident[v]->sym_identifier;
438 /* push a given symbol on the symbol stack */
439 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
441 Sym *s, **ps;
442 TokenSym *ts;
444 if (local_stack)
445 ps = &local_stack;
446 else
447 ps = &global_stack;
448 s = sym_push2(ps, v, type->t, c);
449 s->type.ref = type->ref;
450 s->r = r;
451 /* don't record fields or anonymous symbols */
452 /* XXX: simplify */
453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
454 /* record symbol in token array */
455 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
456 if (v & SYM_STRUCT)
457 ps = &ts->sym_struct;
458 else
459 ps = &ts->sym_identifier;
460 s->prev_tok = *ps;
461 *ps = s;
462 s->scope = local_scope;
463 if (s->prev_tok && s->prev_tok->scope == s->scope)
464 tcc_error("redeclaration of '%s'",
465 get_tok_str(v & ~SYM_STRUCT, NULL));
467 return s;
470 /* push a global identifier */
471 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
473 Sym *s, **ps;
474 s = sym_push2(&global_stack, v, t, c);
475 /* don't record anonymous symbol */
476 if (v < SYM_FIRST_ANOM) {
477 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
478 /* modify the top most local identifier, so that
479 sym_identifier will point to 's' when popped */
480 while (*ps != NULL)
481 ps = &(*ps)->prev_tok;
482 s->prev_tok = NULL;
483 *ps = s;
485 return s;
488 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
489 pop them yet from the list, but do remove them from the token array. */
490 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
492 Sym *s, *ss, **ps;
493 TokenSym *ts;
494 int v;
496 s = *ptop;
497 while(s != b) {
498 ss = s->prev;
499 v = s->v;
500 /* remove symbol in token array */
501 /* XXX: simplify */
502 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
503 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
504 if (v & SYM_STRUCT)
505 ps = &ts->sym_struct;
506 else
507 ps = &ts->sym_identifier;
508 *ps = s->prev_tok;
510 if (!keep)
511 sym_free(s);
512 s = ss;
514 if (!keep)
515 *ptop = b;
518 static void weaken_symbol(Sym *sym)
520 sym->type.t |= VT_WEAK;
521 if (sym->c > 0) {
522 int esym_type;
523 ElfW(Sym) *esym;
525 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
526 esym_type = ELFW(ST_TYPE)(esym->st_info);
527 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
531 static void apply_visibility(Sym *sym, CType *type)
533 int vis = sym->type.t & VT_VIS_MASK;
534 int vis2 = type->t & VT_VIS_MASK;
535 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
536 vis = vis2;
537 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
539 else
540 vis = (vis < vis2) ? vis : vis2;
541 sym->type.t &= ~VT_VIS_MASK;
542 sym->type.t |= vis;
544 if (sym->c > 0) {
545 ElfW(Sym) *esym;
547 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
548 vis >>= VT_VIS_SHIFT;
549 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
553 /* ------------------------------------------------------------------------- */
555 ST_FUNC void swap(int *p, int *q)
557 int t;
558 t = *p;
559 *p = *q;
560 *q = t;
563 static void vsetc(CType *type, int r, CValue *vc)
565 int v;
567 if (vtop >= vstack + (VSTACK_SIZE - 1))
568 tcc_error("memory full (vstack)");
569 /* cannot let cpu flags if other instruction are generated. Also
570 avoid leaving VT_JMP anywhere except on the top of the stack
571 because it would complicate the code generator. */
572 if (vtop >= vstack) {
573 v = vtop->r & VT_VALMASK;
574 if (v == VT_CMP || (v & ~1) == VT_JMP)
575 gv(RC_INT);
577 vtop++;
578 vtop->type = *type;
579 vtop->r = r;
580 vtop->r2 = VT_CONST;
581 vtop->c = *vc;
582 vtop->sym = NULL;
585 /* push constant of type "type" with useless value */
586 ST_FUNC void vpush(CType *type)
588 CValue cval;
589 vsetc(type, VT_CONST, &cval);
592 /* push integer constant */
593 ST_FUNC void vpushi(int v)
595 CValue cval;
596 cval.i = v;
597 vsetc(&int_type, VT_CONST, &cval);
600 /* push a pointer sized constant */
601 static void vpushs(addr_t v)
603 CValue cval;
604 cval.i = v;
605 vsetc(&size_type, VT_CONST, &cval);
608 /* push arbitrary 64bit constant */
609 ST_FUNC void vpush64(int ty, unsigned long long v)
611 CValue cval;
612 CType ctype;
613 ctype.t = ty;
614 ctype.ref = NULL;
615 cval.i = v;
616 vsetc(&ctype, VT_CONST, &cval);
619 /* push long long constant */
620 static inline void vpushll(long long v)
622 vpush64(VT_LLONG, v);
625 /* push a symbol value of TYPE */
626 static inline void vpushsym(CType *type, Sym *sym)
628 CValue cval;
629 cval.i = 0;
630 vsetc(type, VT_CONST | VT_SYM, &cval);
631 vtop->sym = sym;
634 /* Return a static symbol pointing to a section */
635 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
637 int v;
638 Sym *sym;
640 v = anon_sym++;
641 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
642 sym->type.ref = type->ref;
643 sym->r = VT_CONST | VT_SYM;
644 put_extern_sym(sym, sec, offset, size);
645 return sym;
648 /* push a reference to a section offset by adding a dummy symbol */
649 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
651 vpushsym(type, get_sym_ref(type, sec, offset, size));
654 /* define a new external reference to a symbol 'v' of type 'u' */
655 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
663 s->type.ref = type->ref;
664 s->r = r | VT_CONST | VT_SYM;
666 return s;
669 /* define a new external reference to a symbol 'v' */
670 static Sym *external_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
678 s->type.t |= VT_EXTERN;
679 } else if (s->type.ref == func_old_type.ref) {
680 s->type.ref = type->ref;
681 s->r = r | VT_CONST | VT_SYM;
682 s->type.t |= VT_EXTERN;
683 } else if (!is_compatible_types(&s->type, type)) {
684 tcc_error("incompatible types for redefinition of '%s'",
685 get_tok_str(v, NULL));
687 /* Merge some storage attributes. */
688 if (type->t & VT_WEAK)
689 weaken_symbol(s);
691 if (type->t & VT_VIS_MASK)
692 apply_visibility(s, type);
694 return s;
697 /* push a reference to global symbol v */
698 ST_FUNC void vpush_global_sym(CType *type, int v)
700 vpushsym(type, external_global_sym(v, type, 0));
703 ST_FUNC void vset(CType *type, int r, long v)
705 CValue cval;
707 cval.i = v;
708 vsetc(type, r, &cval);
711 static void vseti(int r, int v)
713 CType type;
714 type.t = VT_INT;
715 type.ref = 0;
716 vset(&type, r, v);
719 ST_FUNC void vswap(void)
721 SValue tmp;
722 /* cannot let cpu flags if other instruction are generated. Also
723 avoid leaving VT_JMP anywhere except on the top of the stack
724 because it would complicate the code generator. */
725 if (vtop >= vstack) {
726 int v = vtop->r & VT_VALMASK;
727 if (v == VT_CMP || (v & ~1) == VT_JMP)
728 gv(RC_INT);
730 tmp = vtop[0];
731 vtop[0] = vtop[-1];
732 vtop[-1] = tmp;
734 /* XXX: +2% overall speed possible with optimized memswap
736 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
740 ST_FUNC void vpushv(SValue *v)
742 if (vtop >= vstack + (VSTACK_SIZE - 1))
743 tcc_error("memory full (vstack)");
744 vtop++;
745 *vtop = *v;
748 static void vdup(void)
750 vpushv(vtop);
753 /* save registers up to (vtop - n) stack entry */
754 ST_FUNC void save_regs(int n)
756 SValue *p, *p1;
757 for(p = vstack, p1 = vtop - n; p <= p1; p++)
758 save_reg(p->r);
761 /* save r to the memory stack, and mark it as being free */
762 ST_FUNC void save_reg(int r)
764 save_reg_upstack(r, 0);
767 /* save r to the memory stack, and mark it as being free,
768 if seen up to (vtop - n) stack entry */
769 ST_FUNC void save_reg_upstack(int r, int n)
771 int l, saved, size, align;
772 SValue *p, *p1, sv;
773 CType *type;
775 if ((r &= VT_VALMASK) >= VT_CONST)
776 return;
778 /* modify all stack values */
779 saved = 0;
780 l = 0;
781 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
782 if ((p->r & VT_VALMASK) == r ||
783 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
784 /* must save value on stack if not already done */
785 if (!saved) {
786 /* NOTE: must reload 'r' because r might be equal to r2 */
787 r = p->r & VT_VALMASK;
788 /* store register in the stack */
789 type = &p->type;
790 if ((p->r & VT_LVAL) ||
791 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
792 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
793 type = &char_pointer_type;
794 #else
795 type = &int_type;
796 #endif
797 size = type_size(type, &align);
798 loc = (loc - size) & -align;
799 sv.type.t = type->t;
800 sv.r = VT_LOCAL | VT_LVAL;
801 sv.c.i = loc;
802 store(r, &sv);
803 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
804 /* x86 specific: need to pop fp register ST0 if saved */
805 if (r == TREG_ST0) {
806 o(0xd8dd); /* fstp %st(0) */
808 #endif
809 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
810 /* special long long case */
811 if ((type->t & VT_BTYPE) == VT_LLONG) {
812 sv.c.i += 4;
813 store(p->r2, &sv);
815 #endif
816 l = loc;
817 saved = 1;
819 /* mark that stack entry as being saved on the stack */
820 if (p->r & VT_LVAL) {
821 /* also clear the bounded flag because the
822 relocation address of the function was stored in
823 p->c.i */
824 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
825 } else {
826 p->r = lvalue_type(p->type.t) | VT_LOCAL;
828 p->r2 = VT_CONST;
829 p->c.i = l;
834 #ifdef TCC_TARGET_ARM
835 /* find a register of class 'rc2' with at most one reference on stack.
836 * If none, call get_reg(rc) */
837 ST_FUNC int get_reg_ex(int rc, int rc2)
839 int r;
840 SValue *p;
842 for(r=0;r<NB_REGS;r++) {
843 if (reg_classes[r] & rc2) {
844 int n;
845 n=0;
846 for(p = vstack; p <= vtop; p++) {
847 if ((p->r & VT_VALMASK) == r ||
848 (p->r2 & VT_VALMASK) == r)
849 n++;
851 if (n <= 1)
852 return r;
855 return get_reg(rc);
857 #endif
859 /* find a free register of class 'rc'. If none, save one register */
860 ST_FUNC int get_reg(int rc)
862 int r;
863 SValue *p;
865 /* find a free register */
866 for(r=0;r<NB_REGS;r++) {
867 if (reg_classes[r] & rc) {
868 for(p=vstack;p<=vtop;p++) {
869 if ((p->r & VT_VALMASK) == r ||
870 (p->r2 & VT_VALMASK) == r)
871 goto notfound;
873 return r;
875 notfound: ;
878 /* no register left : free the first one on the stack (VERY
879 IMPORTANT to start from the bottom to ensure that we don't
880 spill registers used in gen_opi()) */
881 for(p=vstack;p<=vtop;p++) {
882 /* look at second register (if long long) */
883 r = p->r2 & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc))
885 goto save_found;
886 r = p->r & VT_VALMASK;
887 if (r < VT_CONST && (reg_classes[r] & rc)) {
888 save_found:
889 save_reg(r);
890 return r;
893 /* Should never comes here */
894 return -1;
897 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
898 if needed */
899 static void move_reg(int r, int s, int t)
901 SValue sv;
903 if (r != s) {
904 save_reg(r);
905 sv.type.t = t;
906 sv.type.ref = NULL;
907 sv.r = s;
908 sv.c.i = 0;
909 load(r, &sv);
913 /* get address of vtop (vtop MUST BE an lvalue) */
914 ST_FUNC void gaddrof(void)
916 if (vtop->r & VT_REF && !nocode_wanted)
917 gv(RC_INT);
918 vtop->r &= ~VT_LVAL;
919 /* tricky: if saved lvalue, then we can go back to lvalue */
920 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
921 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
926 #ifdef CONFIG_TCC_BCHECK
927 /* generate lvalue bound code */
928 static void gbound(void)
930 int lval_type;
931 CType type1;
933 vtop->r &= ~VT_MUSTBOUND;
934 /* if lvalue, then use checking code before dereferencing */
935 if (vtop->r & VT_LVAL) {
936 /* if not VT_BOUNDED value, then make one */
937 if (!(vtop->r & VT_BOUNDED)) {
938 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
939 /* must save type because we must set it to int to get pointer */
940 type1 = vtop->type;
941 vtop->type.t = VT_PTR;
942 gaddrof();
943 vpushi(0);
944 gen_bounded_ptr_add();
945 vtop->r |= lval_type;
946 vtop->type = type1;
948 /* then check for dereferencing */
949 gen_bounded_ptr_deref();
952 #endif
954 /* store vtop a register belonging to class 'rc'. lvalues are
955 converted to values. Cannot be used if cannot be converted to
956 register value (such as structures). */
957 ST_FUNC int gv(int rc)
959 int r, bit_pos, bit_size, size, align, i;
960 int rc2;
962 /* NOTE: get_reg can modify vstack[] */
963 if (vtop->type.t & VT_BITFIELD) {
964 CType type;
965 int bits = 32;
966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
968 /* remove bit field info to avoid loops */
969 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
970 /* cast to int to propagate signedness in following ops */
971 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
972 type.t = VT_LLONG;
973 bits = 64;
974 } else
975 type.t = VT_INT;
976 if((vtop->type.t & VT_UNSIGNED) ||
977 (vtop->type.t & VT_BTYPE) == VT_BOOL)
978 type.t |= VT_UNSIGNED;
979 gen_cast(&type);
980 /* generate shifts */
981 vpushi(bits - (bit_pos + bit_size));
982 gen_op(TOK_SHL);
983 vpushi(bits - bit_size);
984 /* NOTE: transformed to SHR if unsigned */
985 gen_op(TOK_SAR);
986 r = gv(rc);
987 } else {
988 if (is_float(vtop->type.t) &&
989 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
990 Sym *sym;
991 int *ptr;
992 unsigned long offset;
993 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
994 CValue check;
995 #endif
997 /* XXX: unify with initializers handling ? */
998 /* CPUs usually cannot use float constants, so we store them
999 generically in data segment */
1000 size = type_size(&vtop->type, &align);
1001 offset = (data_section->data_offset + align - 1) & -align;
1002 data_section->data_offset = offset;
1003 /* XXX: not portable yet */
1004 #if defined(__i386__) || defined(__x86_64__)
1005 /* Zero pad x87 tenbyte long doubles */
1006 if (size == LDOUBLE_SIZE) {
1007 vtop->c.tab[2] &= 0xffff;
1008 #if LDOUBLE_SIZE == 16
1009 vtop->c.tab[3] = 0;
1010 #endif
1012 #endif
1013 ptr = section_ptr_add(data_section, size);
1014 size = size >> 2;
1015 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1016 check.d = 1;
1017 if(check.tab[0])
1018 for(i=0;i<size;i++)
1019 ptr[i] = vtop->c.tab[size-1-i];
1020 else
1021 #endif
1022 for(i=0;i<size;i++)
1023 ptr[i] = vtop->c.tab[i];
1024 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1025 vtop->r |= VT_LVAL | VT_SYM;
1026 vtop->sym = sym;
1027 vtop->c.i = 0;
1029 #ifdef CONFIG_TCC_BCHECK
1030 if (vtop->r & VT_MUSTBOUND)
1031 gbound();
1032 #endif
1034 r = vtop->r & VT_VALMASK;
1035 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1036 #ifndef TCC_TARGET_ARM64
1037 if (rc == RC_IRET)
1038 rc2 = RC_LRET;
1039 #ifdef TCC_TARGET_X86_64
1040 else if (rc == RC_FRET)
1041 rc2 = RC_QRET;
1042 #endif
1043 #endif
1045 /* need to reload if:
1046 - constant
1047 - lvalue (need to dereference pointer)
1048 - already a register, but not in the right class */
1049 if (r >= VT_CONST
1050 || (vtop->r & VT_LVAL)
1051 || !(reg_classes[r] & rc)
1052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1053 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1054 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1055 #else
1056 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1057 #endif
1060 r = get_reg(rc);
1061 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1062 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1063 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1064 #else
1065 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1066 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1067 unsigned long long ll;
1068 #endif
1069 int r2, original_type;
1070 original_type = vtop->type.t;
1071 /* two register type load : expand to two words
1072 temporarily */
1073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1074 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1075 /* load constant */
1076 ll = vtop->c.i;
1077 vtop->c.i = ll; /* first word */
1078 load(r, vtop);
1079 vtop->r = r; /* save register value */
1080 vpushi(ll >> 32); /* second word */
1081 } else
1082 #endif
1083 if (vtop->r & VT_LVAL) {
1084 /* We do not want to modifier the long long
1085 pointer here, so the safest (and less
1086 efficient) is to save all the other registers
1087 in the stack. XXX: totally inefficient. */
1088 #if 0
1089 save_regs(1);
1090 #else
1091 /* lvalue_save: save only if used further down the stack */
1092 save_reg_upstack(vtop->r, 1);
1093 #endif
1094 /* load from memory */
1095 vtop->type.t = load_type;
1096 load(r, vtop);
1097 vdup();
1098 vtop[-1].r = r; /* save register value */
1099 /* increment pointer to get second word */
1100 vtop->type.t = addr_type;
1101 gaddrof();
1102 vpushi(load_size);
1103 gen_op('+');
1104 vtop->r |= VT_LVAL;
1105 vtop->type.t = load_type;
1106 } else {
1107 /* move registers */
1108 load(r, vtop);
1109 vdup();
1110 vtop[-1].r = r; /* save register value */
1111 vtop->r = vtop[-1].r2;
1113 /* Allocate second register. Here we rely on the fact that
1114 get_reg() tries first to free r2 of an SValue. */
1115 r2 = get_reg(rc2);
1116 load(r2, vtop);
1117 vpop();
1118 /* write second register */
1119 vtop->r2 = r2;
1120 vtop->type.t = original_type;
1121 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1122 int t1, t;
1123 /* lvalue of scalar type : need to use lvalue type
1124 because of possible cast */
1125 t = vtop->type.t;
1126 t1 = t;
1127 /* compute memory access type */
1128 if (vtop->r & VT_REF)
1129 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1130 t = VT_PTR;
1131 #else
1132 t = VT_INT;
1133 #endif
1134 else if (vtop->r & VT_LVAL_BYTE)
1135 t = VT_BYTE;
1136 else if (vtop->r & VT_LVAL_SHORT)
1137 t = VT_SHORT;
1138 if (vtop->r & VT_LVAL_UNSIGNED)
1139 t |= VT_UNSIGNED;
1140 vtop->type.t = t;
1141 load(r, vtop);
1142 /* restore wanted type */
1143 vtop->type.t = t1;
1144 } else {
1145 /* one register type load */
1146 load(r, vtop);
1149 vtop->r = r;
1150 #ifdef TCC_TARGET_C67
1151 /* uses register pairs for doubles */
1152 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1153 vtop->r2 = r+1;
1154 #endif
1156 return r;
1159 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1160 ST_FUNC void gv2(int rc1, int rc2)
1162 int v;
1164 /* generate more generic register first. But VT_JMP or VT_CMP
1165 values must be generated first in all cases to avoid possible
1166 reload errors */
1167 v = vtop[0].r & VT_VALMASK;
1168 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1169 vswap();
1170 gv(rc1);
1171 vswap();
1172 gv(rc2);
1173 /* test if reload is needed for first register */
1174 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1175 vswap();
1176 gv(rc1);
1177 vswap();
1179 } else {
1180 gv(rc2);
1181 vswap();
1182 gv(rc1);
1183 vswap();
1184 /* test if reload is needed for first register */
1185 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1186 gv(rc2);
1191 #ifndef TCC_TARGET_ARM64
1192 /* wrapper around RC_FRET to return a register by type */
1193 static int rc_fret(int t)
1195 #ifdef TCC_TARGET_X86_64
1196 if (t == VT_LDOUBLE) {
1197 return RC_ST0;
1199 #endif
1200 return RC_FRET;
1202 #endif
1204 /* wrapper around REG_FRET to return a register by type */
1205 static int reg_fret(int t)
1207 #ifdef TCC_TARGET_X86_64
1208 if (t == VT_LDOUBLE) {
1209 return TREG_ST0;
1211 #endif
1212 return REG_FRET;
1215 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1216 /* expand 64bit on stack in two ints */
1217 static void lexpand(void)
1219 int u, v;
1220 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1221 v = vtop->r & (VT_VALMASK | VT_LVAL);
1222 if (v == VT_CONST) {
1223 vdup();
1224 vtop[0].c.i >>= 32;
1225 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1226 vdup();
1227 vtop[0].c.i += 4;
1228 } else {
1229 gv(RC_INT);
1230 vdup();
1231 vtop[0].r = vtop[-1].r2;
1232 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1234 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1236 #endif
1238 #ifdef TCC_TARGET_ARM
1239 /* expand long long on stack */
1240 ST_FUNC void lexpand_nr(void)
1242 int u,v;
1244 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1245 vdup();
1246 vtop->r2 = VT_CONST;
1247 vtop->type.t = VT_INT | u;
1248 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1249 if (v == VT_CONST) {
1250 vtop[-1].c.i = vtop->c.i;
1251 vtop->c.i = vtop->c.i >> 32;
1252 vtop->r = VT_CONST;
1253 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1254 vtop->c.i += 4;
1255 vtop->r = vtop[-1].r;
1256 } else if (v > VT_CONST) {
1257 vtop--;
1258 lexpand();
1259 } else
1260 vtop->r = vtop[-1].r2;
1261 vtop[-1].r2 = VT_CONST;
1262 vtop[-1].type.t = VT_INT | u;
1264 #endif
1266 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1267 /* build a long long from two ints */
1268 static void lbuild(int t)
1270 gv2(RC_INT, RC_INT);
1271 vtop[-1].r2 = vtop[0].r;
1272 vtop[-1].type.t = t;
1273 vpop();
1275 #endif
1277 /* rotate n first stack elements to the bottom
1278 I1 ... In -> I2 ... In I1 [top is right]
1280 ST_FUNC void vrotb(int n)
1282 int i;
1283 SValue tmp;
1285 tmp = vtop[-n + 1];
1286 for(i=-n+1;i!=0;i++)
1287 vtop[i] = vtop[i+1];
1288 vtop[0] = tmp;
1291 /* rotate the n elements before entry e towards the top
1292 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1294 ST_FUNC void vrote(SValue *e, int n)
1296 int i;
1297 SValue tmp;
1299 tmp = *e;
1300 for(i = 0;i < n - 1; i++)
1301 e[-i] = e[-i - 1];
1302 e[-n + 1] = tmp;
1305 /* rotate n first stack elements to the top
1306 I1 ... In -> In I1 ... I(n-1) [top is right]
1308 ST_FUNC void vrott(int n)
1310 vrote(vtop, n);
1313 /* pop stack value */
1314 ST_FUNC void vpop(void)
1316 int v;
1317 v = vtop->r & VT_VALMASK;
1318 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1319 /* for x86, we need to pop the FP stack */
1320 if (v == TREG_ST0 && !nocode_wanted) {
1321 o(0xd8dd); /* fstp %st(0) */
1322 } else
1323 #endif
1324 if (v == VT_JMP || v == VT_JMPI) {
1325 /* need to put correct jump if && or || without test */
1326 gsym(vtop->c.i);
1328 vtop--;
1331 /* convert stack entry to register and duplicate its value in another
1332 register */
1333 static void gv_dup(void)
1335 int rc, t, r, r1;
1336 SValue sv;
1338 t = vtop->type.t;
1339 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1340 if ((t & VT_BTYPE) == VT_LLONG) {
1341 lexpand();
1342 gv_dup();
1343 vswap();
1344 vrotb(3);
1345 gv_dup();
1346 vrotb(4);
1347 /* stack: H L L1 H1 */
1348 lbuild(t);
1349 vrotb(3);
1350 vrotb(3);
1351 vswap();
1352 lbuild(t);
1353 vswap();
1354 } else
1355 #endif
1357 /* duplicate value */
1358 rc = RC_INT;
1359 sv.type.t = VT_INT;
1360 if (is_float(t)) {
1361 rc = RC_FLOAT;
1362 #ifdef TCC_TARGET_X86_64
1363 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1364 rc = RC_ST0;
1366 #endif
1367 sv.type.t = t;
1369 r = gv(rc);
1370 r1 = get_reg(rc);
1371 sv.r = r;
1372 sv.c.i = 0;
1373 load(r1, &sv); /* move r to r1 */
1374 vdup();
1375 /* duplicates value */
1376 if (r != r1)
1377 vtop->r = r1;
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 ST_FUNC int gvtst(int inv, int t)
1386 int v = vtop->r & VT_VALMASK;
1387 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1388 vpushi(0);
1389 gen_op(TOK_NE);
1391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1392 /* constant jmp optimization */
1393 if ((vtop->c.i != 0) != inv)
1394 t = gjmp(t);
1395 vtop--;
1396 return t;
1398 return gtst(inv, t);
1401 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1402 /* generate CPU independent (unsigned) long long operations */
1403 static void gen_opl(int op)
1405 int t, a, b, op1, c, i;
1406 int func;
1407 unsigned short reg_iret = REG_IRET;
1408 unsigned short reg_lret = REG_LRET;
1409 SValue tmp;
1411 switch(op) {
1412 case '/':
1413 case TOK_PDIV:
1414 func = TOK___divdi3;
1415 goto gen_func;
1416 case TOK_UDIV:
1417 func = TOK___udivdi3;
1418 goto gen_func;
1419 case '%':
1420 func = TOK___moddi3;
1421 goto gen_mod_func;
1422 case TOK_UMOD:
1423 func = TOK___umoddi3;
1424 gen_mod_func:
1425 #ifdef TCC_ARM_EABI
1426 reg_iret = TREG_R2;
1427 reg_lret = TREG_R3;
1428 #endif
1429 gen_func:
1430 /* call generic long long function */
1431 vpush_global_sym(&func_old_type, func);
1432 vrott(3);
1433 gfunc_call(2);
1434 vpushi(0);
1435 vtop->r = reg_iret;
1436 vtop->r2 = reg_lret;
1437 break;
1438 case '^':
1439 case '&':
1440 case '|':
1441 case '*':
1442 case '+':
1443 case '-':
1444 //pv("gen_opl A",0,2);
1445 t = vtop->type.t;
1446 vswap();
1447 lexpand();
1448 vrotb(3);
1449 lexpand();
1450 /* stack: L1 H1 L2 H2 */
1451 tmp = vtop[0];
1452 vtop[0] = vtop[-3];
1453 vtop[-3] = tmp;
1454 tmp = vtop[-2];
1455 vtop[-2] = vtop[-3];
1456 vtop[-3] = tmp;
1457 vswap();
1458 /* stack: H1 H2 L1 L2 */
1459 //pv("gen_opl B",0,4);
1460 if (op == '*') {
1461 vpushv(vtop - 1);
1462 vpushv(vtop - 1);
1463 gen_op(TOK_UMULL);
1464 lexpand();
1465 /* stack: H1 H2 L1 L2 ML MH */
1466 for(i=0;i<4;i++)
1467 vrotb(6);
1468 /* stack: ML MH H1 H2 L1 L2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-2];
1471 vtop[-2] = tmp;
1472 /* stack: ML MH H1 L2 H2 L1 */
1473 gen_op('*');
1474 vrotb(3);
1475 vrotb(3);
1476 gen_op('*');
1477 /* stack: ML MH M1 M2 */
1478 gen_op('+');
1479 gen_op('+');
1480 } else if (op == '+' || op == '-') {
1481 /* XXX: add non carry method too (for MIPS or alpha) */
1482 if (op == '+')
1483 op1 = TOK_ADDC1;
1484 else
1485 op1 = TOK_SUBC1;
1486 gen_op(op1);
1487 /* stack: H1 H2 (L1 op L2) */
1488 vrotb(3);
1489 vrotb(3);
1490 gen_op(op1 + 1); /* TOK_xxxC2 */
1491 } else {
1492 gen_op(op);
1493 /* stack: H1 H2 (L1 op L2) */
1494 vrotb(3);
1495 vrotb(3);
1496 /* stack: (L1 op L2) H1 H2 */
1497 gen_op(op);
1498 /* stack: (L1 op L2) (H1 op H2) */
1500 /* stack: L H */
1501 lbuild(t);
1502 break;
1503 case TOK_SAR:
1504 case TOK_SHR:
1505 case TOK_SHL:
1506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1507 t = vtop[-1].type.t;
1508 vswap();
1509 lexpand();
1510 vrotb(3);
1511 /* stack: L H shift */
1512 c = (int)vtop->c.i;
1513 /* constant: simpler */
1514 /* NOTE: all comments are for SHL. the other cases are
1515 done by swaping words */
1516 vpop();
1517 if (op != TOK_SHL)
1518 vswap();
1519 if (c >= 32) {
1520 /* stack: L H */
1521 vpop();
1522 if (c > 32) {
1523 vpushi(c - 32);
1524 gen_op(op);
1526 if (op != TOK_SAR) {
1527 vpushi(0);
1528 } else {
1529 gv_dup();
1530 vpushi(31);
1531 gen_op(TOK_SAR);
1533 vswap();
1534 } else {
1535 vswap();
1536 gv_dup();
1537 /* stack: H L L */
1538 vpushi(c);
1539 gen_op(op);
1540 vswap();
1541 vpushi(32 - c);
1542 if (op == TOK_SHL)
1543 gen_op(TOK_SHR);
1544 else
1545 gen_op(TOK_SHL);
1546 vrotb(3);
1547 /* stack: L L H */
1548 vpushi(c);
1549 if (op == TOK_SHL)
1550 gen_op(TOK_SHL);
1551 else
1552 gen_op(TOK_SHR);
1553 gen_op('|');
1555 if (op != TOK_SHL)
1556 vswap();
1557 lbuild(t);
1558 } else {
1559 /* XXX: should provide a faster fallback on x86 ? */
1560 switch(op) {
1561 case TOK_SAR:
1562 func = TOK___ashrdi3;
1563 goto gen_func;
1564 case TOK_SHR:
1565 func = TOK___lshrdi3;
1566 goto gen_func;
1567 case TOK_SHL:
1568 func = TOK___ashldi3;
1569 goto gen_func;
1572 break;
1573 default:
1574 /* compare operations */
1575 t = vtop->type.t;
1576 vswap();
1577 lexpand();
1578 vrotb(3);
1579 lexpand();
1580 /* stack: L1 H1 L2 H2 */
1581 tmp = vtop[-1];
1582 vtop[-1] = vtop[-2];
1583 vtop[-2] = tmp;
1584 /* stack: L1 L2 H1 H2 */
1585 /* compare high */
1586 op1 = op;
1587 /* when values are equal, we need to compare low words. since
1588 the jump is inverted, we invert the test too. */
1589 if (op1 == TOK_LT)
1590 op1 = TOK_LE;
1591 else if (op1 == TOK_GT)
1592 op1 = TOK_GE;
1593 else if (op1 == TOK_ULT)
1594 op1 = TOK_ULE;
1595 else if (op1 == TOK_UGT)
1596 op1 = TOK_UGE;
1597 a = 0;
1598 b = 0;
1599 gen_op(op1);
1600 if (op1 != TOK_NE) {
1601 a = gvtst(1, 0);
1603 if (op != TOK_EQ) {
1604 /* generate non equal test */
1605 /* XXX: NOT PORTABLE yet */
1606 if (a == 0) {
1607 b = gvtst(0, 0);
1608 } else {
1609 #if defined(TCC_TARGET_I386)
1610 b = psym(0x850f, 0);
1611 #elif defined(TCC_TARGET_ARM)
1612 b = ind;
1613 o(0x1A000000 | encbranch(ind, 0, 1));
1614 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1615 tcc_error("not implemented");
1616 #else
1617 #error not supported
1618 #endif
1621 /* compare low. Always unsigned */
1622 op1 = op;
1623 if (op1 == TOK_LT)
1624 op1 = TOK_ULT;
1625 else if (op1 == TOK_LE)
1626 op1 = TOK_ULE;
1627 else if (op1 == TOK_GT)
1628 op1 = TOK_UGT;
1629 else if (op1 == TOK_GE)
1630 op1 = TOK_UGE;
1631 gen_op(op1);
1632 a = gvtst(1, a);
1633 gsym(b);
1634 vseti(VT_JMPI, a);
1635 break;
1638 #endif
1640 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1642 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1643 return (a ^ b) >> 63 ? -x : x;
1646 static int gen_opic_lt(uint64_t a, uint64_t b)
1648 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1651 /* handle integer constant optimizations and various machine
1652 independent opt */
1653 static void gen_opic(int op)
1655 SValue *v1 = vtop - 1;
1656 SValue *v2 = vtop;
1657 int t1 = v1->type.t & VT_BTYPE;
1658 int t2 = v2->type.t & VT_BTYPE;
1659 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1661 uint64_t l1 = c1 ? v1->c.i : 0;
1662 uint64_t l2 = c2 ? v2->c.i : 0;
1663 int shm = (t1 == VT_LLONG) ? 63 : 31;
1665 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1666 l1 = ((uint32_t)l1 |
1667 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1668 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1669 l2 = ((uint32_t)l2 |
1670 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1672 if (c1 && c2) {
1673 switch(op) {
1674 case '+': l1 += l2; break;
1675 case '-': l1 -= l2; break;
1676 case '&': l1 &= l2; break;
1677 case '^': l1 ^= l2; break;
1678 case '|': l1 |= l2; break;
1679 case '*': l1 *= l2; break;
1681 case TOK_PDIV:
1682 case '/':
1683 case '%':
1684 case TOK_UDIV:
1685 case TOK_UMOD:
1686 /* if division by zero, generate explicit division */
1687 if (l2 == 0) {
1688 if (const_wanted)
1689 tcc_error("division by zero in constant");
1690 goto general_case;
1692 switch(op) {
1693 default: l1 = gen_opic_sdiv(l1, l2); break;
1694 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1695 case TOK_UDIV: l1 = l1 / l2; break;
1696 case TOK_UMOD: l1 = l1 % l2; break;
1698 break;
1699 case TOK_SHL: l1 <<= (l2 & shm); break;
1700 case TOK_SHR: l1 >>= (l2 & shm); break;
1701 case TOK_SAR:
1702 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1703 break;
1704 /* tests */
1705 case TOK_ULT: l1 = l1 < l2; break;
1706 case TOK_UGE: l1 = l1 >= l2; break;
1707 case TOK_EQ: l1 = l1 == l2; break;
1708 case TOK_NE: l1 = l1 != l2; break;
1709 case TOK_ULE: l1 = l1 <= l2; break;
1710 case TOK_UGT: l1 = l1 > l2; break;
1711 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1712 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1713 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1714 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1715 /* logical */
1716 case TOK_LAND: l1 = l1 && l2; break;
1717 case TOK_LOR: l1 = l1 || l2; break;
1718 default:
1719 goto general_case;
1721 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1722 l1 = ((uint32_t)l1 |
1723 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1724 v1->c.i = l1;
1725 vtop--;
1726 } else {
1727 /* if commutative ops, put c2 as constant */
1728 if (c1 && (op == '+' || op == '&' || op == '^' ||
1729 op == '|' || op == '*')) {
1730 vswap();
1731 c2 = c1; //c = c1, c1 = c2, c2 = c;
1732 l2 = l1; //l = l1, l1 = l2, l2 = l;
1734 if (!const_wanted &&
1735 c1 && ((l1 == 0 &&
1736 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1737 (l1 == -1 && op == TOK_SAR))) {
1738 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1739 vtop--;
1740 } else if (!const_wanted &&
1741 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1742 (l2 == -1 && op == '|') ||
1743 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1744 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1745 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1746 if (l2 == 1)
1747 vtop->c.i = 0;
1748 vswap();
1749 vtop--;
1750 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1751 op == TOK_PDIV) &&
1752 l2 == 1) ||
1753 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1754 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1755 l2 == 0) ||
1756 (op == '&' &&
1757 l2 == -1))) {
1758 /* filter out NOP operations like x*1, x-0, x&-1... */
1759 vtop--;
1760 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1761 /* try to use shifts instead of muls or divs */
1762 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1763 int n = -1;
1764 while (l2) {
1765 l2 >>= 1;
1766 n++;
1768 vtop->c.i = n;
1769 if (op == '*')
1770 op = TOK_SHL;
1771 else if (op == TOK_PDIV)
1772 op = TOK_SAR;
1773 else
1774 op = TOK_SHR;
1776 goto general_case;
1777 } else if (c2 && (op == '+' || op == '-') &&
1778 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1779 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1780 /* symbol + constant case */
1781 if (op == '-')
1782 l2 = -l2;
1783 l2 += vtop[-1].c.i;
1784 /* The backends can't always deal with addends to symbols
1785 larger than +-1<<31. Don't construct such. */
1786 if ((int)l2 != l2)
1787 goto general_case;
1788 vtop--;
1789 vtop->c.i = l2;
1790 } else {
1791 general_case:
1792 if (!nocode_wanted) {
1793 /* call low level op generator */
1794 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1795 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1796 gen_opl(op);
1797 else
1798 gen_opi(op);
1799 } else {
1800 vtop--;
1801 /* Ensure vtop isn't marked VT_CONST in case something
1802 up our callchain is interested in const-ness of the
1803 expression. Also make it a non-LVAL if it was,
1804 so that further code can't accidentally generate
1805 a deref (happen only for buggy uses of e.g.
1806 gv() under nocode_wanted). */
1807 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 if (!nocode_wanted) {
1870 gen_opf(op);
1871 } else {
1872 vtop--;
1877 static int pointed_size(CType *type)
1879 int align;
1880 return type_size(pointed_type(type), &align);
1883 static void vla_runtime_pointed_size(CType *type)
1885 int align;
1886 vla_runtime_type_size(pointed_type(type), &align);
1889 static inline int is_null_pointer(SValue *p)
1891 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1892 return 0;
1893 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1894 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1895 ((p->type.t & VT_BTYPE) == VT_PTR &&
1896 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1899 static inline int is_integer_btype(int bt)
1901 return (bt == VT_BYTE || bt == VT_SHORT ||
1902 bt == VT_INT || bt == VT_LLONG);
1905 /* check types for comparison or subtraction of pointers */
1906 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1908 CType *type1, *type2, tmp_type1, tmp_type2;
1909 int bt1, bt2;
1911 /* null pointers are accepted for all comparisons as gcc */
1912 if (is_null_pointer(p1) || is_null_pointer(p2))
1913 return;
1914 type1 = &p1->type;
1915 type2 = &p2->type;
1916 bt1 = type1->t & VT_BTYPE;
1917 bt2 = type2->t & VT_BTYPE;
1918 /* accept comparison between pointer and integer with a warning */
1919 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1920 if (op != TOK_LOR && op != TOK_LAND )
1921 tcc_warning("comparison between pointer and integer");
1922 return;
1925 /* both must be pointers or implicit function pointers */
1926 if (bt1 == VT_PTR) {
1927 type1 = pointed_type(type1);
1928 } else if (bt1 != VT_FUNC)
1929 goto invalid_operands;
1931 if (bt2 == VT_PTR) {
1932 type2 = pointed_type(type2);
1933 } else if (bt2 != VT_FUNC) {
1934 invalid_operands:
1935 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1937 if ((type1->t & VT_BTYPE) == VT_VOID ||
1938 (type2->t & VT_BTYPE) == VT_VOID)
1939 return;
1940 tmp_type1 = *type1;
1941 tmp_type2 = *type2;
1942 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1943 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1944 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1945 /* gcc-like error if '-' is used */
1946 if (op == '-')
1947 goto invalid_operands;
1948 else
1949 tcc_warning("comparison of distinct pointer types lacks a cast");
1953 /* generic gen_op: handles types problems */
1954 ST_FUNC void gen_op(int op)
1956 int u, t1, t2, bt1, bt2, t;
1957 CType type1;
1959 redo:
1960 t1 = vtop[-1].type.t;
1961 t2 = vtop[0].type.t;
1962 bt1 = t1 & VT_BTYPE;
1963 bt2 = t2 & VT_BTYPE;
1965 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1966 tcc_error("operation on a struct");
1967 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1968 if (bt2 == VT_FUNC) {
1969 mk_pointer(&vtop->type);
1970 gaddrof();
1972 if (bt1 == VT_FUNC) {
1973 vswap();
1974 mk_pointer(&vtop->type);
1975 gaddrof();
1976 vswap();
1978 goto redo;
1979 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1980 /* at least one operand is a pointer */
1981 /* relationnal op: must be both pointers */
1982 if (op >= TOK_ULT && op <= TOK_LOR) {
1983 check_comparison_pointer_types(vtop - 1, vtop, op);
1984 /* pointers are handled are unsigned */
1985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1986 t = VT_LLONG | VT_UNSIGNED;
1987 #else
1988 t = VT_INT | VT_UNSIGNED;
1989 #endif
1990 goto std_op;
1992 /* if both pointers, then it must be the '-' op */
1993 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1994 if (op != '-')
1995 tcc_error("cannot use pointers here");
1996 check_comparison_pointer_types(vtop - 1, vtop, op);
1997 /* XXX: check that types are compatible */
1998 if (vtop[-1].type.t & VT_VLA) {
1999 vla_runtime_pointed_size(&vtop[-1].type);
2000 } else {
2001 vpushi(pointed_size(&vtop[-1].type));
2003 vrott(3);
2004 gen_opic(op);
2005 /* set to integer type */
2006 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2007 vtop->type.t = VT_LLONG;
2008 #else
2009 vtop->type.t = VT_INT;
2010 #endif
2011 vswap();
2012 gen_op(TOK_PDIV);
2013 } else {
2014 /* exactly one pointer : must be '+' or '-'. */
2015 if (op != '-' && op != '+')
2016 tcc_error("cannot use pointers here");
2017 /* Put pointer as first operand */
2018 if (bt2 == VT_PTR) {
2019 vswap();
2020 swap(&t1, &t2);
2022 #if PTR_SIZE == 4
2023 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2024 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2025 gen_cast(&int_type);
2026 #endif
2027 type1 = vtop[-1].type;
2028 type1.t &= ~VT_ARRAY;
2029 if (vtop[-1].type.t & VT_VLA)
2030 vla_runtime_pointed_size(&vtop[-1].type);
2031 else {
2032 u = pointed_size(&vtop[-1].type);
2033 if (u < 0)
2034 tcc_error("unknown array element size");
2035 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2036 vpushll(u);
2037 #else
2038 /* XXX: cast to int ? (long long case) */
2039 vpushi(u);
2040 #endif
2042 gen_op('*');
2043 #if 0
2044 /* #ifdef CONFIG_TCC_BCHECK
2045 The main reason to removing this code:
2046 #include <stdio.h>
2047 int main ()
2049 int v[10];
2050 int i = 10;
2051 int j = 9;
2052 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2053 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2055 When this code is on. then the output looks like
2056 v+i-j = 0xfffffffe
2057 v+(i-j) = 0xbff84000
2059 /* if evaluating constant expression, no code should be
2060 generated, so no bound check */
2061 if (tcc_state->do_bounds_check && !const_wanted) {
2062 /* if bounded pointers, we generate a special code to
2063 test bounds */
2064 if (op == '-') {
2065 vpushi(0);
2066 vswap();
2067 gen_op('-');
2069 gen_bounded_ptr_add();
2070 } else
2071 #endif
2073 gen_opic(op);
2075 /* put again type if gen_opic() swaped operands */
2076 vtop->type = type1;
2078 } else if (is_float(bt1) || is_float(bt2)) {
2079 /* compute bigger type and do implicit casts */
2080 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2081 t = VT_LDOUBLE;
2082 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2083 t = VT_DOUBLE;
2084 } else {
2085 t = VT_FLOAT;
2087 /* floats can only be used for a few operations */
2088 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2089 (op < TOK_ULT || op > TOK_GT))
2090 tcc_error("invalid operands for binary operation");
2091 goto std_op;
2092 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2093 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2095 t |= VT_UNSIGNED;
2096 goto std_op;
2097 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2098 /* cast to biggest op */
2099 t = VT_LLONG;
2100 /* convert to unsigned if it does not fit in a long long */
2101 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2102 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2103 t |= VT_UNSIGNED;
2104 goto std_op;
2105 } else {
2106 /* integer operations */
2107 t = VT_INT;
2108 /* convert to unsigned if it does not fit in an integer */
2109 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2110 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2111 t |= VT_UNSIGNED;
2112 std_op:
2113 /* XXX: currently, some unsigned operations are explicit, so
2114 we modify them here */
2115 if (t & VT_UNSIGNED) {
2116 if (op == TOK_SAR)
2117 op = TOK_SHR;
2118 else if (op == '/')
2119 op = TOK_UDIV;
2120 else if (op == '%')
2121 op = TOK_UMOD;
2122 else if (op == TOK_LT)
2123 op = TOK_ULT;
2124 else if (op == TOK_GT)
2125 op = TOK_UGT;
2126 else if (op == TOK_LE)
2127 op = TOK_ULE;
2128 else if (op == TOK_GE)
2129 op = TOK_UGE;
2131 vswap();
2132 type1.t = t;
2133 gen_cast(&type1);
2134 vswap();
2135 /* special case for shifts and long long: we keep the shift as
2136 an integer */
2137 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2138 type1.t = VT_INT;
2139 gen_cast(&type1);
2140 if (is_float(t))
2141 gen_opif(op);
2142 else
2143 gen_opic(op);
2144 if (op >= TOK_ULT && op <= TOK_GT) {
2145 /* relationnal op: the result is an int */
2146 vtop->type.t = VT_INT;
2147 } else {
2148 vtop->type.t = t;
2151 // Make sure that we have converted to an rvalue:
2152 if (vtop->r & VT_LVAL && !nocode_wanted)
2153 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2156 #ifndef TCC_TARGET_ARM
2157 /* generic itof for unsigned long long case */
2158 static void gen_cvt_itof1(int t)
2160 #ifdef TCC_TARGET_ARM64
2161 gen_cvt_itof(t);
2162 #else
2163 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2164 (VT_LLONG | VT_UNSIGNED)) {
2166 if (t == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___floatundisf);
2168 #if LDOUBLE_SIZE != 8
2169 else if (t == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___floatundixf);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___floatundidf);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = reg_fret(t);
2178 } else {
2179 gen_cvt_itof(t);
2181 #endif
2183 #endif
2185 /* generic ftoi for unsigned long long case */
2186 static void gen_cvt_ftoi1(int t)
2188 #ifdef TCC_TARGET_ARM64
2189 gen_cvt_ftoi(t);
2190 #else
2191 int st;
2193 if (t == (VT_LLONG | VT_UNSIGNED)) {
2194 /* not handled natively */
2195 st = vtop->type.t & VT_BTYPE;
2196 if (st == VT_FLOAT)
2197 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2198 #if LDOUBLE_SIZE != 8
2199 else if (st == VT_LDOUBLE)
2200 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2201 #endif
2202 else
2203 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2204 vrott(2);
2205 gfunc_call(1);
2206 vpushi(0);
2207 vtop->r = REG_IRET;
2208 vtop->r2 = REG_LRET;
2209 } else {
2210 gen_cvt_ftoi(t);
2212 #endif
2215 /* force char or short cast */
2216 static void force_charshort_cast(int t)
2218 int bits, dbt;
2219 dbt = t & VT_BTYPE;
2220 /* XXX: add optimization if lvalue : just change type and offset */
2221 if (dbt == VT_BYTE)
2222 bits = 8;
2223 else
2224 bits = 16;
2225 if (t & VT_UNSIGNED) {
2226 vpushi((1 << bits) - 1);
2227 gen_op('&');
2228 } else {
2229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2230 bits = 64 - bits;
2231 else
2232 bits = 32 - bits;
2233 vpushi(bits);
2234 gen_op(TOK_SHL);
2235 /* result must be signed or the SAR is converted to an SHL
2236 This was not the case when "t" was a signed short
2237 and the last value on the stack was an unsigned int */
2238 vtop->type.t &= ~VT_UNSIGNED;
2239 vpushi(bits);
2240 gen_op(TOK_SAR);
2244 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2245 static void gen_cast(CType *type)
2247 int sbt, dbt, sf, df, c, p;
2249 /* special delayed cast for char/short */
2250 /* XXX: in some cases (multiple cascaded casts), it may still
2251 be incorrect */
2252 if (vtop->r & VT_MUSTCAST) {
2253 vtop->r &= ~VT_MUSTCAST;
2254 force_charshort_cast(vtop->type.t);
2257 /* bitfields first get cast to ints */
2258 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2259 gv(RC_INT);
2262 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2263 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2265 if (sbt != dbt) {
2266 sf = is_float(sbt);
2267 df = is_float(dbt);
2268 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2269 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2270 if (c) {
2271 /* constant case: we can do it now */
2272 /* XXX: in ISOC, cannot do it if error in convert */
2273 if (sbt == VT_FLOAT)
2274 vtop->c.ld = vtop->c.f;
2275 else if (sbt == VT_DOUBLE)
2276 vtop->c.ld = vtop->c.d;
2278 if (df) {
2279 if ((sbt & VT_BTYPE) == VT_LLONG) {
2280 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2281 vtop->c.ld = vtop->c.i;
2282 else
2283 vtop->c.ld = -(long double)-vtop->c.i;
2284 } else if(!sf) {
2285 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2286 vtop->c.ld = (uint32_t)vtop->c.i;
2287 else
2288 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2291 if (dbt == VT_FLOAT)
2292 vtop->c.f = (float)vtop->c.ld;
2293 else if (dbt == VT_DOUBLE)
2294 vtop->c.d = (double)vtop->c.ld;
2295 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2296 vtop->c.i = vtop->c.ld;
2297 } else if (sf && dbt == VT_BOOL) {
2298 vtop->c.i = (vtop->c.ld != 0);
2299 } else {
2300 if(sf)
2301 vtop->c.i = vtop->c.ld;
2302 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2304 else if (sbt & VT_UNSIGNED)
2305 vtop->c.i = (uint32_t)vtop->c.i;
2306 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2307 else if (sbt == VT_PTR)
2309 #endif
2310 else if (sbt != VT_LLONG)
2311 vtop->c.i = ((uint32_t)vtop->c.i |
2312 -(vtop->c.i & 0x80000000));
2314 if (dbt == (VT_LLONG|VT_UNSIGNED))
2316 else if (dbt == VT_BOOL)
2317 vtop->c.i = (vtop->c.i != 0);
2318 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2319 else if (dbt == VT_PTR)
2321 #endif
2322 else if (dbt != VT_LLONG) {
2323 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2324 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2325 0xffffffff);
2326 vtop->c.i &= m;
2327 if (!(dbt & VT_UNSIGNED))
2328 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2331 } else if (p && dbt == VT_BOOL) {
2332 vtop->r = VT_CONST;
2333 vtop->c.i = 1;
2334 } else if (!nocode_wanted) {
2335 /* non constant case: generate code */
2336 if (sf && df) {
2337 /* convert from fp to fp */
2338 gen_cvt_ftof(dbt);
2339 } else if (df) {
2340 /* convert int to fp */
2341 gen_cvt_itof1(dbt);
2342 } else if (sf) {
2343 /* convert fp to int */
2344 if (dbt == VT_BOOL) {
2345 vpushi(0);
2346 gen_op(TOK_NE);
2347 } else {
2348 /* we handle char/short/etc... with generic code */
2349 if (dbt != (VT_INT | VT_UNSIGNED) &&
2350 dbt != (VT_LLONG | VT_UNSIGNED) &&
2351 dbt != VT_LLONG)
2352 dbt = VT_INT;
2353 gen_cvt_ftoi1(dbt);
2354 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2355 /* additional cast for char/short... */
2356 vtop->type.t = dbt;
2357 gen_cast(type);
2360 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2361 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2362 if ((sbt & VT_BTYPE) != VT_LLONG) {
2363 /* scalar to long long */
2364 /* machine independent conversion */
2365 gv(RC_INT);
2366 /* generate high word */
2367 if (sbt == (VT_INT | VT_UNSIGNED)) {
2368 vpushi(0);
2369 gv(RC_INT);
2370 } else {
2371 if (sbt == VT_PTR) {
2372 /* cast from pointer to int before we apply
2373 shift operation, which pointers don't support*/
2374 gen_cast(&int_type);
2376 gv_dup();
2377 vpushi(31);
2378 gen_op(TOK_SAR);
2380 /* patch second register */
2381 vtop[-1].r2 = vtop->r;
2382 vpop();
2384 #else
2385 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2386 (dbt & VT_BTYPE) == VT_PTR ||
2387 (dbt & VT_BTYPE) == VT_FUNC) {
2388 if ((sbt & VT_BTYPE) != VT_LLONG &&
2389 (sbt & VT_BTYPE) != VT_PTR &&
2390 (sbt & VT_BTYPE) != VT_FUNC) {
2391 /* need to convert from 32bit to 64bit */
2392 gv(RC_INT);
2393 if (sbt != (VT_INT | VT_UNSIGNED)) {
2394 #if defined(TCC_TARGET_ARM64)
2395 gen_cvt_sxtw();
2396 #elif defined(TCC_TARGET_X86_64)
2397 int r = gv(RC_INT);
2398 /* x86_64 specific: movslq */
2399 o(0x6348);
2400 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2401 #else
2402 #error
2403 #endif
2406 #endif
2407 } else if (dbt == VT_BOOL) {
2408 /* scalar to bool */
2409 vpushi(0);
2410 gen_op(TOK_NE);
2411 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2412 (dbt & VT_BTYPE) == VT_SHORT) {
2413 if (sbt == VT_PTR) {
2414 vtop->type.t = VT_INT;
2415 tcc_warning("nonportable conversion from pointer to char/short");
2417 force_charshort_cast(dbt);
2418 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2419 } else if ((dbt & VT_BTYPE) == VT_INT) {
2420 /* scalar to int */
2421 if ((sbt & VT_BTYPE) == VT_LLONG) {
2422 /* from long long: just take low order word */
2423 lexpand();
2424 vpop();
2426 /* if lvalue and single word type, nothing to do because
2427 the lvalue already contains the real type size (see
2428 VT_LVAL_xxx constants) */
2429 #endif
2432 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2433 /* if we are casting between pointer types,
2434 we must update the VT_LVAL_xxx size */
2435 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2436 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2438 vtop->type = *type;
2441 /* return type size as known at compile time. Put alignment at 'a' */
2442 ST_FUNC int type_size(CType *type, int *a)
2444 Sym *s;
2445 int bt;
2447 bt = type->t & VT_BTYPE;
2448 if (bt == VT_STRUCT) {
2449 /* struct/union */
2450 s = type->ref;
2451 *a = s->r;
2452 return s->c;
2453 } else if (bt == VT_PTR) {
2454 if (type->t & VT_ARRAY) {
2455 int ts;
2457 s = type->ref;
2458 ts = type_size(&s->type, a);
2460 if (ts < 0 && s->c < 0)
2461 ts = -ts;
2463 return ts * s->c;
2464 } else {
2465 *a = PTR_SIZE;
2466 return PTR_SIZE;
2468 } else if (bt == VT_LDOUBLE) {
2469 *a = LDOUBLE_ALIGN;
2470 return LDOUBLE_SIZE;
2471 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2472 #ifdef TCC_TARGET_I386
2473 #ifdef TCC_TARGET_PE
2474 *a = 8;
2475 #else
2476 *a = 4;
2477 #endif
2478 #elif defined(TCC_TARGET_ARM)
2479 #ifdef TCC_ARM_EABI
2480 *a = 8;
2481 #else
2482 *a = 4;
2483 #endif
2484 #else
2485 *a = 8;
2486 #endif
2487 return 8;
2488 } else if (bt == VT_INT || bt == VT_FLOAT) {
2489 *a = 4;
2490 return 4;
2491 } else if (bt == VT_SHORT) {
2492 *a = 2;
2493 return 2;
2494 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2495 *a = 8;
2496 return 16;
2497 } else if (bt == VT_ENUM) {
2498 *a = 4;
2499 /* Enums might be incomplete, so don't just return '4' here. */
2500 return type->ref->c;
2501 } else {
2502 /* char, void, function, _Bool */
2503 *a = 1;
2504 return 1;
2508 /* push type size as known at runtime time on top of value stack. Put
2509 alignment at 'a' */
2510 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2512 if (type->t & VT_VLA) {
2513 type_size(&type->ref->type, a);
2514 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2515 } else {
2516 vpushi(type_size(type, a));
2520 static void vla_sp_restore(void) {
2521 if (vlas_in_scope) {
2522 gen_vla_sp_restore(vla_sp_loc);
2526 static void vla_sp_restore_root(void) {
2527 if (vlas_in_scope) {
2528 gen_vla_sp_restore(vla_sp_root_loc);
2532 /* return the pointed type of t */
2533 static inline CType *pointed_type(CType *type)
2535 return &type->ref->type;
2538 /* modify type so that its it is a pointer to type. */
2539 ST_FUNC void mk_pointer(CType *type)
2541 Sym *s;
2542 s = sym_push(SYM_FIELD, type, 0, -1);
2543 type->t = VT_PTR | (type->t & ~VT_TYPE);
2544 type->ref = s;
2547 /* compare function types. OLD functions match any new functions */
2548 static int is_compatible_func(CType *type1, CType *type2)
2550 Sym *s1, *s2;
2552 s1 = type1->ref;
2553 s2 = type2->ref;
2554 if (!is_compatible_types(&s1->type, &s2->type))
2555 return 0;
2556 /* check func_call */
2557 if (s1->a.func_call != s2->a.func_call)
2558 return 0;
2559 /* XXX: not complete */
2560 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2561 return 1;
2562 if (s1->c != s2->c)
2563 return 0;
2564 while (s1 != NULL) {
2565 if (s2 == NULL)
2566 return 0;
2567 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2568 return 0;
2569 s1 = s1->next;
2570 s2 = s2->next;
2572 if (s2)
2573 return 0;
2574 return 1;
2577 /* return true if type1 and type2 are the same. If unqualified is
2578 true, qualifiers on the types are ignored.
2580 - enums are not checked as gcc __builtin_types_compatible_p ()
2582 static int compare_types(CType *type1, CType *type2, int unqualified)
2584 int bt1, t1, t2;
2586 t1 = type1->t & VT_TYPE;
2587 t2 = type2->t & VT_TYPE;
2588 if (unqualified) {
2589 /* strip qualifiers before comparing */
2590 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2591 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2593 /* Default Vs explicit signedness only matters for char */
2594 if ((t1 & VT_BTYPE) != VT_BYTE) {
2595 t1 &= ~VT_DEFSIGN;
2596 t2 &= ~VT_DEFSIGN;
2598 /* An enum is compatible with (unsigned) int. Ideally we would
2599 store the enums signedness in type->ref.a.<some_bit> and
2600 only accept unsigned enums with unsigned int and vice versa.
2601 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2602 from pointer target types, so we can't add it here either. */
2603 if ((t1 & VT_BTYPE) == VT_ENUM) {
2604 t1 = VT_INT;
2605 if (type1->ref->a.unsigned_enum)
2606 t1 |= VT_UNSIGNED;
2608 if ((t2 & VT_BTYPE) == VT_ENUM) {
2609 t2 = VT_INT;
2610 if (type2->ref->a.unsigned_enum)
2611 t2 |= VT_UNSIGNED;
2613 /* XXX: bitfields ? */
2614 if (t1 != t2)
2615 return 0;
2616 /* test more complicated cases */
2617 bt1 = t1 & VT_BTYPE;
2618 if (bt1 == VT_PTR) {
2619 type1 = pointed_type(type1);
2620 type2 = pointed_type(type2);
2621 return is_compatible_types(type1, type2);
2622 } else if (bt1 == VT_STRUCT) {
2623 return (type1->ref == type2->ref);
2624 } else if (bt1 == VT_FUNC) {
2625 return is_compatible_func(type1, type2);
2626 } else {
2627 return 1;
2631 /* return true if type1 and type2 are exactly the same (including
2632 qualifiers).
2634 static int is_compatible_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,0);
2639 /* return true if type1 and type2 are the same (ignoring qualifiers).
2641 static int is_compatible_parameter_types(CType *type1, CType *type2)
2643 return compare_types(type1,type2,1);
2646 /* print a type. If 'varstr' is not NULL, then the variable is also
2647 printed in the type */
2648 /* XXX: union */
2649 /* XXX: add array and function pointers */
2650 static void type_to_str(char *buf, int buf_size,
2651 CType *type, const char *varstr)
2653 int bt, v, t;
2654 Sym *s, *sa;
2655 char buf1[256];
2656 const char *tstr;
2658 t = type->t & VT_TYPE;
2659 bt = t & VT_BTYPE;
2660 buf[0] = '\0';
2661 if (t & VT_CONSTANT)
2662 pstrcat(buf, buf_size, "const ");
2663 if (t & VT_VOLATILE)
2664 pstrcat(buf, buf_size, "volatile ");
2665 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2666 pstrcat(buf, buf_size, "unsigned ");
2667 else if (t & VT_DEFSIGN)
2668 pstrcat(buf, buf_size, "signed ");
2669 switch(bt) {
2670 case VT_VOID:
2671 tstr = "void";
2672 goto add_tstr;
2673 case VT_BOOL:
2674 tstr = "_Bool";
2675 goto add_tstr;
2676 case VT_BYTE:
2677 tstr = "char";
2678 goto add_tstr;
2679 case VT_SHORT:
2680 tstr = "short";
2681 goto add_tstr;
2682 case VT_INT:
2683 tstr = "int";
2684 goto add_tstr;
2685 case VT_LONG:
2686 tstr = "long";
2687 goto add_tstr;
2688 case VT_LLONG:
2689 tstr = "long long";
2690 goto add_tstr;
2691 case VT_FLOAT:
2692 tstr = "float";
2693 goto add_tstr;
2694 case VT_DOUBLE:
2695 tstr = "double";
2696 goto add_tstr;
2697 case VT_LDOUBLE:
2698 tstr = "long double";
2699 add_tstr:
2700 pstrcat(buf, buf_size, tstr);
2701 break;
2702 case VT_ENUM:
2703 case VT_STRUCT:
2704 if (bt == VT_STRUCT)
2705 tstr = "struct ";
2706 else
2707 tstr = "enum ";
2708 pstrcat(buf, buf_size, tstr);
2709 v = type->ref->v & ~SYM_STRUCT;
2710 if (v >= SYM_FIRST_ANOM)
2711 pstrcat(buf, buf_size, "<anonymous>");
2712 else
2713 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2714 break;
2715 case VT_FUNC:
2716 s = type->ref;
2717 type_to_str(buf, buf_size, &s->type, varstr);
2718 pstrcat(buf, buf_size, "(");
2719 sa = s->next;
2720 while (sa != NULL) {
2721 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2722 pstrcat(buf, buf_size, buf1);
2723 sa = sa->next;
2724 if (sa)
2725 pstrcat(buf, buf_size, ", ");
2727 pstrcat(buf, buf_size, ")");
2728 goto no_var;
2729 case VT_PTR:
2730 s = type->ref;
2731 if (t & VT_ARRAY) {
2732 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2733 type_to_str(buf, buf_size, &s->type, buf1);
2734 goto no_var;
2736 pstrcpy(buf1, sizeof(buf1), "*");
2737 if (t & VT_CONSTANT)
2738 pstrcat(buf1, buf_size, "const ");
2739 if (t & VT_VOLATILE)
2740 pstrcat(buf1, buf_size, "volatile ");
2741 if (varstr)
2742 pstrcat(buf1, sizeof(buf1), varstr);
2743 type_to_str(buf, buf_size, &s->type, buf1);
2744 goto no_var;
2746 if (varstr) {
2747 pstrcat(buf, buf_size, " ");
2748 pstrcat(buf, buf_size, varstr);
2750 no_var: ;
2753 /* verify type compatibility to store vtop in 'dt' type, and generate
2754 casts if needed. */
2755 static void gen_assign_cast(CType *dt)
2757 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2758 char buf1[256], buf2[256];
2759 int dbt, sbt;
2761 st = &vtop->type; /* source type */
2762 dbt = dt->t & VT_BTYPE;
2763 sbt = st->t & VT_BTYPE;
2764 if (sbt == VT_VOID || dbt == VT_VOID) {
2765 if (sbt == VT_VOID && dbt == VT_VOID)
2766 ; /*
2767 It is Ok if both are void
2768 A test program:
2769 void func1() {}
2770 void func2() {
2771 return func1();
2773 gcc accepts this program
2775 else
2776 tcc_error("cannot cast from/to void");
2778 if (dt->t & VT_CONSTANT)
2779 tcc_warning("assignment of read-only location");
2780 switch(dbt) {
2781 case VT_PTR:
2782 /* special cases for pointers */
2783 /* '0' can also be a pointer */
2784 if (is_null_pointer(vtop))
2785 goto type_ok;
2786 /* accept implicit pointer to integer cast with warning */
2787 if (is_integer_btype(sbt)) {
2788 tcc_warning("assignment makes pointer from integer without a cast");
2789 goto type_ok;
2791 type1 = pointed_type(dt);
2792 /* a function is implicitely a function pointer */
2793 if (sbt == VT_FUNC) {
2794 if ((type1->t & VT_BTYPE) != VT_VOID &&
2795 !is_compatible_types(pointed_type(dt), st))
2796 tcc_warning("assignment from incompatible pointer type");
2797 goto type_ok;
2799 if (sbt != VT_PTR)
2800 goto error;
2801 type2 = pointed_type(st);
2802 if ((type1->t & VT_BTYPE) == VT_VOID ||
2803 (type2->t & VT_BTYPE) == VT_VOID) {
2804 /* void * can match anything */
2805 } else {
2806 /* exact type match, except for qualifiers */
2807 tmp_type1 = *type1;
2808 tmp_type2 = *type2;
2809 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2812 /* Like GCC don't warn by default for merely changes
2813 in pointer target signedness. Do warn for different
2814 base types, though, in particular for unsigned enums
2815 and signed int targets. */
2816 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2817 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2818 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2820 else
2821 tcc_warning("assignment from incompatible pointer type");
2824 /* check const and volatile */
2825 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2826 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2827 tcc_warning("assignment discards qualifiers from pointer target type");
2828 break;
2829 case VT_BYTE:
2830 case VT_SHORT:
2831 case VT_INT:
2832 case VT_LLONG:
2833 if (sbt == VT_PTR || sbt == VT_FUNC) {
2834 tcc_warning("assignment makes integer from pointer without a cast");
2835 } else if (sbt == VT_STRUCT) {
2836 goto case_VT_STRUCT;
2838 /* XXX: more tests */
2839 break;
2840 case VT_STRUCT:
2841 case_VT_STRUCT:
2842 tmp_type1 = *dt;
2843 tmp_type2 = *st;
2844 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2847 error:
2848 type_to_str(buf1, sizeof(buf1), st, NULL);
2849 type_to_str(buf2, sizeof(buf2), dt, NULL);
2850 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2852 break;
2854 type_ok:
2855 gen_cast(dt);
2858 /* store vtop in lvalue pushed on stack */
2859 ST_FUNC void vstore(void)
2861 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2863 ft = vtop[-1].type.t;
2864 sbt = vtop->type.t & VT_BTYPE;
2865 dbt = ft & VT_BTYPE;
2866 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2867 (sbt == VT_INT && dbt == VT_SHORT))
2868 && !(vtop->type.t & VT_BITFIELD)) {
2869 /* optimize char/short casts */
2870 delayed_cast = VT_MUSTCAST;
2871 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2872 ((1 << VT_STRUCT_SHIFT) - 1));
2873 /* XXX: factorize */
2874 if (ft & VT_CONSTANT)
2875 tcc_warning("assignment of read-only location");
2876 } else {
2877 delayed_cast = 0;
2878 if (!(ft & VT_BITFIELD))
2879 gen_assign_cast(&vtop[-1].type);
2882 if (sbt == VT_STRUCT) {
2883 /* if structure, only generate pointer */
2884 /* structure assignment : generate memcpy */
2885 /* XXX: optimize if small size */
2886 if (!nocode_wanted) {
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2913 } else {
2914 vswap();
2915 vpop();
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 if (!nocode_wanted) {
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop[-1].r & VT_MUSTBOUND) {
2968 vswap();
2969 gbound();
2970 vswap();
2972 #endif
2973 rc = RC_INT;
2974 if (is_float(ft)) {
2975 rc = RC_FLOAT;
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2978 rc = RC_ST0;
2979 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2980 rc = RC_FRET;
2982 #endif
2984 r = gv(rc); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2987 SValue sv;
2988 t = get_reg(RC_INT);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 sv.type.t = VT_PTR;
2991 #else
2992 sv.type.t = VT_INT;
2993 #endif
2994 sv.r = VT_LOCAL | VT_LVAL;
2995 sv.c.i = vtop[-1].c.i;
2996 load(t, &sv);
2997 vtop[-1].r = t | VT_LVAL;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3002 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3003 #else
3004 if ((ft & VT_BTYPE) == VT_LLONG) {
3005 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3006 #endif
3007 vtop[-1].type.t = load_type;
3008 store(r, vtop - 1);
3009 vswap();
3010 /* convert to int to increment easily */
3011 vtop->type.t = addr_type;
3012 gaddrof();
3013 vpushi(load_size);
3014 gen_op('+');
3015 vtop->r |= VT_LVAL;
3016 vswap();
3017 vtop[-1].type.t = load_type;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop->r2, vtop - 1);
3020 } else {
3021 store(r, vtop - 1);
3024 vswap();
3025 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop->r |= delayed_cast;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC void inc(int post, int c)
3033 test_lvalue();
3034 vdup(); /* save lvalue */
3035 if (post) {
3036 if (!nocode_wanted)
3037 gv_dup(); /* duplicate value */
3038 else
3039 vdup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* If I is >= 1 and a power of two, returns log2(i)+1.
3066 If I is 0 returns 0. */
3067 static int exact_log2p1(int i)
3069 int ret;
3070 if (!i)
3071 return 0;
3072 for (ret = 1; i >= 1 << 8; ret += 8)
3073 i >>= 8;
3074 if (i >= 1 << 4)
3075 ret += 4, i >>= 4;
3076 if (i >= 1 << 2)
3077 ret += 2, i >>= 2;
3078 if (i >= 1 << 1)
3079 ret++;
3080 return ret;
3083 /* Parse GNUC __attribute__ extension. Currently, the following
3084 extensions are recognized:
3085 - aligned(n) : set data/function alignment.
3086 - packed : force data alignment to 1
3087 - section(x) : generate data/code in this section.
3088 - unused : currently ignored, but may be used someday.
3089 - regparm(n) : pass function parameters in registers (i386 only)
3091 static void parse_attribute(AttributeDef *ad)
3093 int t, n;
3094 CString astr;
3096 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3097 next();
3098 skip('(');
3099 skip('(');
3100 while (tok != ')') {
3101 if (tok < TOK_IDENT)
3102 expect("attribute name");
3103 t = tok;
3104 next();
3105 switch(t) {
3106 case TOK_SECTION1:
3107 case TOK_SECTION2:
3108 skip('(');
3109 parse_mult_str(&astr, "section name");
3110 ad->section = find_section(tcc_state, (char *)astr.data);
3111 skip(')');
3112 cstr_free(&astr);
3113 break;
3114 case TOK_ALIAS1:
3115 case TOK_ALIAS2:
3116 skip('(');
3117 parse_mult_str(&astr, "alias(\"target\")");
3118 ad->alias_target = /* save string as token, for later */
3119 tok_alloc((char*)astr.data, astr.size-1)->tok;
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_VISIBILITY1:
3124 case TOK_VISIBILITY2:
3125 skip('(');
3126 parse_mult_str(&astr,
3127 "visibility(\"default|hidden|internal|protected\")");
3128 if (!strcmp (astr.data, "default"))
3129 ad->a.visibility = STV_DEFAULT;
3130 else if (!strcmp (astr.data, "hidden"))
3131 ad->a.visibility = STV_HIDDEN;
3132 else if (!strcmp (astr.data, "internal"))
3133 ad->a.visibility = STV_INTERNAL;
3134 else if (!strcmp (astr.data, "protected"))
3135 ad->a.visibility = STV_PROTECTED;
3136 else
3137 expect("visibility(\"default|hidden|internal|protected\")");
3138 skip(')');
3139 cstr_free(&astr);
3140 break;
3141 case TOK_ALIGNED1:
3142 case TOK_ALIGNED2:
3143 if (tok == '(') {
3144 next();
3145 n = expr_const();
3146 if (n <= 0 || (n & (n - 1)) != 0)
3147 tcc_error("alignment must be a positive power of two");
3148 skip(')');
3149 } else {
3150 n = MAX_ALIGN;
3152 ad->a.aligned = exact_log2p1(n);
3153 if (n != 1 << (ad->a.aligned - 1))
3154 tcc_error("alignment of %d is larger than implemented", n);
3155 break;
3156 case TOK_PACKED1:
3157 case TOK_PACKED2:
3158 ad->a.packed = 1;
3159 break;
3160 case TOK_WEAK1:
3161 case TOK_WEAK2:
3162 ad->a.weak = 1;
3163 break;
3164 case TOK_UNUSED1:
3165 case TOK_UNUSED2:
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3168 break;
3169 case TOK_NORETURN1:
3170 case TOK_NORETURN2:
3171 /* currently, no need to handle it because tcc does not
3172 track unused objects */
3173 break;
3174 case TOK_CDECL1:
3175 case TOK_CDECL2:
3176 case TOK_CDECL3:
3177 ad->a.func_call = FUNC_CDECL;
3178 break;
3179 case TOK_STDCALL1:
3180 case TOK_STDCALL2:
3181 case TOK_STDCALL3:
3182 ad->a.func_call = FUNC_STDCALL;
3183 break;
3184 #ifdef TCC_TARGET_I386
3185 case TOK_REGPARM1:
3186 case TOK_REGPARM2:
3187 skip('(');
3188 n = expr_const();
3189 if (n > 3)
3190 n = 3;
3191 else if (n < 0)
3192 n = 0;
3193 if (n > 0)
3194 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3195 skip(')');
3196 break;
3197 case TOK_FASTCALL1:
3198 case TOK_FASTCALL2:
3199 case TOK_FASTCALL3:
3200 ad->a.func_call = FUNC_FASTCALLW;
3201 break;
3202 #endif
3203 case TOK_MODE:
3204 skip('(');
3205 switch(tok) {
3206 case TOK_MODE_DI:
3207 ad->a.mode = VT_LLONG + 1;
3208 break;
3209 case TOK_MODE_QI:
3210 ad->a.mode = VT_BYTE + 1;
3211 break;
3212 case TOK_MODE_HI:
3213 ad->a.mode = VT_SHORT + 1;
3214 break;
3215 case TOK_MODE_SI:
3216 case TOK_MODE_word:
3217 ad->a.mode = VT_INT + 1;
3218 break;
3219 default:
3220 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3221 break;
3223 next();
3224 skip(')');
3225 break;
3226 case TOK_DLLEXPORT:
3227 ad->a.func_export = 1;
3228 break;
3229 case TOK_DLLIMPORT:
3230 ad->a.func_import = 1;
3231 break;
3232 default:
3233 if (tcc_state->warn_unsupported)
3234 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3235 /* skip parameters */
3236 if (tok == '(') {
3237 int parenthesis = 0;
3238 do {
3239 if (tok == '(')
3240 parenthesis++;
3241 else if (tok == ')')
3242 parenthesis--;
3243 next();
3244 } while (parenthesis && tok != -1);
3246 break;
3248 if (tok != ',')
3249 break;
3250 next();
3252 skip(')');
3253 skip(')');
3257 static Sym * find_field (CType *type, int v)
3259 Sym *s = type->ref;
3260 v |= SYM_FIELD;
3261 while ((s = s->next) != NULL) {
3262 if ((s->v & SYM_FIELD) &&
3263 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3264 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3265 Sym *ret = find_field (&s->type, v);
3266 if (ret)
3267 return ret;
3269 if (s->v == v)
3270 break;
3272 return s;
3275 static void struct_add_offset (Sym *s, int offset)
3277 while ((s = s->next) != NULL) {
3278 if ((s->v & SYM_FIELD) &&
3279 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3280 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3281 struct_add_offset(s->type.ref, offset);
3282 } else
3283 s->c += offset;
3287 static void struct_layout(CType *type, AttributeDef *ad)
3289 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3290 int pcc = !tcc_state->ms_bitfields;
3291 Sym *f;
3292 if (ad->a.aligned)
3293 maxalign = 1 << (ad->a.aligned - 1);
3294 else
3295 maxalign = 1;
3296 offset = 0;
3297 c = 0;
3298 bit_pos = 0;
3299 prevbt = VT_STRUCT; /* make it never match */
3300 prev_bit_size = 0;
3301 for (f = type->ref->next; f; f = f->next) {
3302 int typealign, bit_size;
3303 int size = type_size(&f->type, &typealign);
3304 if (f->type.t & VT_BITFIELD)
3305 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3306 else
3307 bit_size = -1;
3308 if (bit_size == 0 && pcc) {
3309 /* Zero-width bit-fields in PCC mode aren't affected
3310 by any packing (attribute or pragma). */
3311 align = typealign;
3312 } else if (f->r > 1) {
3313 align = f->r;
3314 } else if (ad->a.packed || f->r == 1) {
3315 align = 1;
3316 /* Packed fields or packed records don't let the base type
3317 influence the records type alignment. */
3318 typealign = 1;
3319 } else {
3320 align = typealign;
3322 if (type->ref->type.t != TOK_STRUCT) {
3323 if (pcc && bit_size >= 0)
3324 size = (bit_size + 7) >> 3;
3325 /* Bit position is already zero from our caller. */
3326 offset = 0;
3327 if (size > c)
3328 c = size;
3329 } else if (bit_size < 0) {
3330 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3331 prevbt = VT_STRUCT;
3332 prev_bit_size = 0;
3333 c = (c + addbytes + align - 1) & -align;
3334 offset = c;
3335 if (size > 0)
3336 c += size;
3337 bit_pos = 0;
3338 } else {
3339 /* A bit-field. Layout is more complicated. There are two
3340 options TCC implements: PCC compatible and MS compatible
3341 (PCC compatible is what GCC uses for almost all targets).
3342 In PCC layout the overall size of the struct (in c) is
3343 _excluding_ the current run of bit-fields (that is,
3344 there's at least additional bit_pos bits after c). In
3345 MS layout c does include the current run of bit-fields.
3347 This matters for calculating the natural alignment buckets
3348 in PCC mode. */
3350 /* 'align' will be used to influence records alignment,
3351 so it's the max of specified and type alignment, except
3352 in certain cases that depend on the mode. */
3353 if (align < typealign)
3354 align = typealign;
3355 if (pcc) {
3356 /* In PCC layout a non-packed bit-field is placed adjacent
3357 to the preceding bit-fields, except if it would overflow
3358 its container (depending on base type) or it's a zero-width
3359 bit-field. Packed non-zero-width bit-fields always are
3360 placed adjacent. */
3361 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3362 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3363 if (bit_size == 0 ||
3364 (typealign != 1 &&
3365 (ofs2 / (typealign * 8)) > (size/typealign))) {
3366 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3367 bit_pos = 0;
3369 offset = c;
3370 /* In PCC layout named bit-fields influence the alignment
3371 of the containing struct using the base types alignment,
3372 except for packed fields (which here have correct
3373 align/typealign). */
3374 if ((f->v & SYM_FIRST_ANOM))
3375 align = 1;
3376 } else {
3377 bt = f->type.t & VT_BTYPE;
3378 if ((bit_pos + bit_size > size * 8) ||
3379 (bit_size > 0) == (bt != prevbt)) {
3380 c = (c + typealign - 1) & -typealign;
3381 offset = c;
3382 bit_pos = 0;
3383 /* In MS bitfield mode a bit-field run always uses
3384 at least as many bits as the underlying type.
3385 To start a new run it's also required that this
3386 or the last bit-field had non-zero width. */
3387 if (bit_size || prev_bit_size)
3388 c += size;
3390 /* In MS layout the records alignment is normally
3391 influenced by the field, except for a zero-width
3392 field at the start of a run (but by further zero-width
3393 fields it is again). */
3394 if (bit_size == 0 && prevbt != bt)
3395 align = 1;
3396 prevbt = bt;
3397 prev_bit_size = bit_size;
3399 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3400 | (bit_pos << VT_STRUCT_SHIFT);
3401 bit_pos += bit_size;
3402 if (pcc && bit_pos >= size * 8) {
3403 c += size;
3404 bit_pos -= size * 8;
3407 if (align > maxalign)
3408 maxalign = align;
3409 #if 0
3410 printf("set field %s offset=%d c=%d",
3411 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3412 if (f->type.t & VT_BITFIELD) {
3413 printf(" pos=%d size=%d",
3414 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3415 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3417 printf("\n");
3418 #endif
3420 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3421 Sym *ass;
3422 /* An anonymous struct/union. Adjust member offsets
3423 to reflect the real offset of our containing struct.
3424 Also set the offset of this anon member inside
3425 the outer struct to be zero. Via this it
3426 works when accessing the field offset directly
3427 (from base object), as well as when recursing
3428 members in initializer handling. */
3429 int v2 = f->type.ref->v;
3430 if (!(v2 & SYM_FIELD) &&
3431 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3432 Sym **pps;
3433 /* This happens only with MS extensions. The
3434 anon member has a named struct type, so it
3435 potentially is shared with other references.
3436 We need to unshare members so we can modify
3437 them. */
3438 ass = f->type.ref;
3439 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3440 &f->type.ref->type, 0,
3441 f->type.ref->c);
3442 pps = &f->type.ref->next;
3443 while ((ass = ass->next) != NULL) {
3444 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3445 pps = &((*pps)->next);
3447 *pps = NULL;
3449 struct_add_offset(f->type.ref, offset);
3450 f->c = 0;
3451 } else {
3452 f->c = offset;
3455 f->r = 0;
3457 /* store size and alignment */
3458 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3459 + maxalign - 1) & -maxalign;
3460 type->ref->r = maxalign;
3463 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3464 static void struct_decl(CType *type, AttributeDef *ad, int u)
3466 int a, v, size, align, flexible, alignoverride;
3467 long c;
3468 int bit_size, bsize, bt;
3469 Sym *s, *ss, **ps;
3470 AttributeDef ad1;
3471 CType type1, btype;
3473 a = tok; /* save decl type */
3474 next();
3475 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3476 parse_attribute(ad);
3477 if (tok != '{') {
3478 v = tok;
3479 next();
3480 /* struct already defined ? return it */
3481 if (v < TOK_IDENT)
3482 expect("struct/union/enum name");
3483 s = struct_find(v);
3484 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3485 if (s->type.t != a)
3486 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3487 goto do_decl;
3489 } else {
3490 v = anon_sym++;
3492 /* Record the original enum/struct/union token. */
3493 type1.t = a;
3494 type1.ref = NULL;
3495 /* we put an undefined size for struct/union */
3496 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3497 s->r = 0; /* default alignment is zero as gcc */
3498 /* put struct/union/enum name in type */
3499 do_decl:
3500 type->t = u;
3501 type->ref = s;
3503 if (tok == '{') {
3504 next();
3505 if (s->c != -1)
3506 tcc_error("struct/union/enum already defined");
3507 /* cannot be empty */
3508 c = 0;
3509 /* non empty enums are not allowed */
3510 if (a == TOK_ENUM) {
3511 int seen_neg = 0;
3512 int seen_wide = 0;
3513 for(;;) {
3514 CType *t = &int_type;
3515 v = tok;
3516 if (v < TOK_UIDENT)
3517 expect("identifier");
3518 ss = sym_find(v);
3519 if (ss && !local_stack)
3520 tcc_error("redefinition of enumerator '%s'",
3521 get_tok_str(v, NULL));
3522 next();
3523 if (tok == '=') {
3524 next();
3525 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3526 c = expr_const64();
3527 #else
3528 /* We really want to support long long enums
3529 on i386 as well, but the Sym structure only
3530 holds a 'long' for associated constants,
3531 and enlarging it would bump its size (no
3532 available padding). So punt for now. */
3533 c = expr_const();
3534 #endif
3536 if (c < 0)
3537 seen_neg = 1;
3538 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3539 seen_wide = 1, t = &size_type;
3540 /* enum symbols have static storage */
3541 ss = sym_push(v, t, VT_CONST, c);
3542 ss->type.t |= VT_STATIC;
3543 if (tok != ',')
3544 break;
3545 next();
3546 c++;
3547 /* NOTE: we accept a trailing comma */
3548 if (tok == '}')
3549 break;
3551 if (!seen_neg)
3552 s->a.unsigned_enum = 1;
3553 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3554 skip('}');
3555 } else {
3556 ps = &s->next;
3557 flexible = 0;
3558 while (tok != '}') {
3559 if (!parse_btype(&btype, &ad1)) {
3560 skip(';');
3561 continue;
3563 while (1) {
3564 if (flexible)
3565 tcc_error("flexible array member '%s' not at the end of struct",
3566 get_tok_str(v, NULL));
3567 bit_size = -1;
3568 v = 0;
3569 type1 = btype;
3570 if (tok != ':') {
3571 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3572 if (v == 0) {
3573 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3574 expect("identifier");
3575 else {
3576 int v = btype.ref->v;
3577 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3578 if (tcc_state->ms_extensions == 0)
3579 expect("identifier");
3583 if (type_size(&type1, &align) < 0) {
3584 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3585 flexible = 1;
3586 else
3587 tcc_error("field '%s' has incomplete type",
3588 get_tok_str(v, NULL));
3590 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3591 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3592 tcc_error("invalid type for '%s'",
3593 get_tok_str(v, NULL));
3595 if (tok == ':') {
3596 next();
3597 bit_size = expr_const();
3598 /* XXX: handle v = 0 case for messages */
3599 if (bit_size < 0)
3600 tcc_error("negative width in bit-field '%s'",
3601 get_tok_str(v, NULL));
3602 if (v && bit_size == 0)
3603 tcc_error("zero width for bit-field '%s'",
3604 get_tok_str(v, NULL));
3605 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3606 parse_attribute(&ad1);
3608 size = type_size(&type1, &align);
3609 /* Only remember non-default alignment. */
3610 alignoverride = 0;
3611 if (ad1.a.aligned) {
3612 int speca = 1 << (ad1.a.aligned - 1);
3613 alignoverride = speca;
3614 } else if (ad1.a.packed || ad->a.packed) {
3615 alignoverride = 1;
3616 } else if (*tcc_state->pack_stack_ptr) {
3617 if (align > *tcc_state->pack_stack_ptr)
3618 alignoverride = *tcc_state->pack_stack_ptr;
3620 if (bit_size >= 0) {
3621 bt = type1.t & VT_BTYPE;
3622 if (bt != VT_INT &&
3623 bt != VT_BYTE &&
3624 bt != VT_SHORT &&
3625 bt != VT_BOOL &&
3626 bt != VT_ENUM &&
3627 bt != VT_LLONG)
3628 tcc_error("bitfields must have scalar type");
3629 bsize = size * 8;
3630 if (bit_size > bsize) {
3631 tcc_error("width of '%s' exceeds its type",
3632 get_tok_str(v, NULL));
3633 } else if (bit_size == bsize) {
3634 /* no need for bit fields */
3636 } else {
3637 type1.t |= VT_BITFIELD |
3638 (0 << VT_STRUCT_SHIFT) |
3639 (bit_size << (VT_STRUCT_SHIFT + 6));
3642 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3643 /* Remember we've seen a real field to check
3644 for placement of flexible array member. */
3645 c = 1;
3647 /* If member is a struct or bit-field, enforce
3648 placing into the struct (as anonymous). */
3649 if (v == 0 &&
3650 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3651 bit_size >= 0)) {
3652 v = anon_sym++;
3654 if (v) {
3655 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3656 *ps = ss;
3657 ps = &ss->next;
3659 if (tok == ';' || tok == TOK_EOF)
3660 break;
3661 skip(',');
3663 skip(';');
3665 skip('}');
3666 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3667 parse_attribute(ad);
3668 struct_layout(type, ad);
3673 /* return 1 if basic type is a type size (short, long, long long) */
3674 ST_FUNC int is_btype_size(int bt)
3676 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3679 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3680 are added to the element type, copied because it could be a typedef. */
3681 static void parse_btype_qualify(CType *type, int qualifiers)
3683 while (type->t & VT_ARRAY) {
3684 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3685 type = &type->ref->type;
3687 type->t |= qualifiers;
3690 /* return 0 if no type declaration. otherwise, return the basic type
3691 and skip it.
3693 static int parse_btype(CType *type, AttributeDef *ad)
3695 int t, u, bt_size, complete, type_found, typespec_found;
3696 Sym *s;
3697 CType type1;
3699 memset(ad, 0, sizeof(AttributeDef));
3700 complete = 0;
3701 type_found = 0;
3702 typespec_found = 0;
3703 t = 0;
3704 while(1) {
3705 switch(tok) {
3706 case TOK_EXTENSION:
3707 /* currently, we really ignore extension */
3708 next();
3709 continue;
3711 /* basic types */
3712 case TOK_CHAR:
3713 u = VT_BYTE;
3714 basic_type:
3715 next();
3716 basic_type1:
3717 if (complete)
3718 tcc_error("too many basic types");
3719 t |= u;
3720 bt_size = is_btype_size (u & VT_BTYPE);
3721 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3722 complete = 1;
3723 typespec_found = 1;
3724 break;
3725 case TOK_VOID:
3726 u = VT_VOID;
3727 goto basic_type;
3728 case TOK_SHORT:
3729 u = VT_SHORT;
3730 goto basic_type;
3731 case TOK_INT:
3732 u = VT_INT;
3733 goto basic_type;
3734 case TOK_LONG:
3735 next();
3736 if ((t & VT_BTYPE) == VT_DOUBLE) {
3737 #ifndef TCC_TARGET_PE
3738 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3739 #endif
3740 } else if ((t & VT_BTYPE) == VT_LONG) {
3741 t = (t & ~VT_BTYPE) | VT_LLONG;
3742 } else {
3743 u = VT_LONG;
3744 goto basic_type1;
3746 break;
3747 #ifdef TCC_TARGET_ARM64
3748 case TOK_UINT128:
3749 /* GCC's __uint128_t appears in some Linux header files. Make it a
3750 synonym for long double to get the size and alignment right. */
3751 u = VT_LDOUBLE;
3752 goto basic_type;
3753 #endif
3754 case TOK_BOOL:
3755 u = VT_BOOL;
3756 goto basic_type;
3757 case TOK_FLOAT:
3758 u = VT_FLOAT;
3759 goto basic_type;
3760 case TOK_DOUBLE:
3761 next();
3762 if ((t & VT_BTYPE) == VT_LONG) {
3763 #ifdef TCC_TARGET_PE
3764 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3765 #else
3766 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3767 #endif
3768 } else {
3769 u = VT_DOUBLE;
3770 goto basic_type1;
3772 break;
3773 case TOK_ENUM:
3774 struct_decl(&type1, ad, VT_ENUM);
3775 basic_type2:
3776 u = type1.t;
3777 type->ref = type1.ref;
3778 goto basic_type1;
3779 case TOK_STRUCT:
3780 case TOK_UNION:
3781 struct_decl(&type1, ad, VT_STRUCT);
3782 goto basic_type2;
3784 /* type modifiers */
3785 case TOK_CONST1:
3786 case TOK_CONST2:
3787 case TOK_CONST3:
3788 type->t = t;
3789 parse_btype_qualify(type, VT_CONSTANT);
3790 t = type->t;
3791 next();
3792 break;
3793 case TOK_VOLATILE1:
3794 case TOK_VOLATILE2:
3795 case TOK_VOLATILE3:
3796 type->t = t;
3797 parse_btype_qualify(type, VT_VOLATILE);
3798 t = type->t;
3799 next();
3800 break;
3801 case TOK_SIGNED1:
3802 case TOK_SIGNED2:
3803 case TOK_SIGNED3:
3804 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3805 tcc_error("signed and unsigned modifier");
3806 typespec_found = 1;
3807 t |= VT_DEFSIGN;
3808 next();
3809 break;
3810 case TOK_REGISTER:
3811 case TOK_AUTO:
3812 case TOK_RESTRICT1:
3813 case TOK_RESTRICT2:
3814 case TOK_RESTRICT3:
3815 next();
3816 break;
3817 case TOK_UNSIGNED:
3818 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3819 tcc_error("signed and unsigned modifier");
3820 t |= VT_DEFSIGN | VT_UNSIGNED;
3821 next();
3822 typespec_found = 1;
3823 break;
3825 /* storage */
3826 case TOK_EXTERN:
3827 t |= VT_EXTERN;
3828 next();
3829 break;
3830 case TOK_STATIC:
3831 t |= VT_STATIC;
3832 next();
3833 break;
3834 case TOK_TYPEDEF:
3835 t |= VT_TYPEDEF;
3836 next();
3837 break;
3838 case TOK_INLINE1:
3839 case TOK_INLINE2:
3840 case TOK_INLINE3:
3841 t |= VT_INLINE;
3842 next();
3843 break;
3845 /* GNUC attribute */
3846 case TOK_ATTRIBUTE1:
3847 case TOK_ATTRIBUTE2:
3848 parse_attribute(ad);
3849 if (ad->a.mode) {
3850 u = ad->a.mode -1;
3851 t = (t & ~VT_BTYPE) | u;
3853 break;
3854 /* GNUC typeof */
3855 case TOK_TYPEOF1:
3856 case TOK_TYPEOF2:
3857 case TOK_TYPEOF3:
3858 next();
3859 parse_expr_type(&type1);
3860 /* remove all storage modifiers except typedef */
3861 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3862 goto basic_type2;
3863 default:
3864 if (typespec_found)
3865 goto the_end;
3866 s = sym_find(tok);
3867 if (!s || !(s->type.t & VT_TYPEDEF))
3868 goto the_end;
3870 type->t = ((s->type.t & ~VT_TYPEDEF) |
3871 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3872 type->ref = s->type.ref;
3873 if (t & (VT_CONSTANT | VT_VOLATILE))
3874 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3875 t = type->t;
3877 if (s->r) {
3878 /* get attributes from typedef */
3879 if (0 == ad->a.aligned)
3880 ad->a.aligned = s->a.aligned;
3881 if (0 == ad->a.func_call)
3882 ad->a.func_call = s->a.func_call;
3883 ad->a.packed |= s->a.packed;
3885 next();
3886 typespec_found = 1;
3887 break;
3889 type_found = 1;
3891 the_end:
3892 if (tcc_state->char_is_unsigned) {
3893 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3894 t |= VT_UNSIGNED;
3897 /* long is never used as type */
3898 if ((t & VT_BTYPE) == VT_LONG)
3899 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3900 defined TCC_TARGET_PE
3901 t = (t & ~VT_BTYPE) | VT_INT;
3902 #else
3903 t = (t & ~VT_BTYPE) | VT_LLONG;
3904 #endif
3905 type->t = t;
3906 return type_found;
3909 /* convert a function parameter type (array to pointer and function to
3910 function pointer) */
3911 static inline void convert_parameter_type(CType *pt)
3913 /* remove const and volatile qualifiers (XXX: const could be used
3914 to indicate a const function parameter */
3915 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3916 /* array must be transformed to pointer according to ANSI C */
3917 pt->t &= ~VT_ARRAY;
3918 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3919 mk_pointer(pt);
3923 ST_FUNC void parse_asm_str(CString *astr)
3925 skip('(');
3926 parse_mult_str(astr, "string constant");
3929 /* Parse an asm label and return the token */
3930 static int asm_label_instr(void)
3932 int v;
3933 CString astr;
3935 next();
3936 parse_asm_str(&astr);
3937 skip(')');
3938 #ifdef ASM_DEBUG
3939 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3940 #endif
3941 v = tok_alloc(astr.data, astr.size - 1)->tok;
3942 cstr_free(&astr);
3943 return v;
3946 static void post_type(CType *type, AttributeDef *ad, int storage)
3948 int n, l, t1, arg_size, align;
3949 Sym **plast, *s, *first;
3950 AttributeDef ad1;
3951 CType pt;
3953 if (tok == '(') {
3954 /* function declaration */
3955 next();
3956 l = 0;
3957 first = NULL;
3958 plast = &first;
3959 arg_size = 0;
3960 if (tok != ')') {
3961 for(;;) {
3962 /* read param name and compute offset */
3963 if (l != FUNC_OLD) {
3964 if (!parse_btype(&pt, &ad1)) {
3965 if (l) {
3966 tcc_error("invalid type");
3967 } else {
3968 l = FUNC_OLD;
3969 goto old_proto;
3972 l = FUNC_NEW;
3973 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3974 break;
3975 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3976 if ((pt.t & VT_BTYPE) == VT_VOID)
3977 tcc_error("parameter declared as void");
3978 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3979 } else {
3980 old_proto:
3981 n = tok;
3982 if (n < TOK_UIDENT)
3983 expect("identifier");
3984 pt.t = VT_INT;
3985 next();
3987 convert_parameter_type(&pt);
3988 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3989 *plast = s;
3990 plast = &s->next;
3991 if (tok == ')')
3992 break;
3993 skip(',');
3994 if (l == FUNC_NEW && tok == TOK_DOTS) {
3995 l = FUNC_ELLIPSIS;
3996 next();
3997 break;
4001 /* if no parameters, then old type prototype */
4002 if (l == 0)
4003 l = FUNC_OLD;
4004 skip(')');
4005 /* NOTE: const is ignored in returned type as it has a special
4006 meaning in gcc / C++ */
4007 type->t &= ~VT_CONSTANT;
4008 /* some ancient pre-K&R C allows a function to return an array
4009 and the array brackets to be put after the arguments, such
4010 that "int c()[]" means something like "int[] c()" */
4011 if (tok == '[') {
4012 next();
4013 skip(']'); /* only handle simple "[]" */
4014 type->t |= VT_PTR;
4016 /* we push a anonymous symbol which will contain the function prototype */
4017 ad->a.func_args = arg_size;
4018 s = sym_push(SYM_FIELD, type, 0, l);
4019 s->a = ad->a;
4020 s->next = first;
4021 type->t = VT_FUNC;
4022 type->ref = s;
4023 } else if (tok == '[') {
4024 int saved_nocode_wanted = nocode_wanted;
4025 /* array definition */
4026 next();
4027 if (tok == TOK_RESTRICT1)
4028 next();
4029 n = -1;
4030 t1 = 0;
4031 if (tok != ']') {
4032 if (!local_stack || (storage & VT_STATIC))
4033 vpushi(expr_const());
4034 else {
4035 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4036 length must always be evaluated, even under nocode_wanted,
4037 so that its size slot is initialized (e.g. under sizeof
4038 or typeof). */
4039 nocode_wanted = 0;
4040 gexpr();
4042 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4043 n = vtop->c.i;
4044 if (n < 0)
4045 tcc_error("invalid array size");
4046 } else {
4047 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4048 tcc_error("size of variable length array should be an integer");
4049 t1 = VT_VLA;
4052 skip(']');
4053 /* parse next post type */
4054 post_type(type, ad, storage);
4055 if (type->t == VT_FUNC)
4056 tcc_error("declaration of an array of functions");
4057 t1 |= type->t & VT_VLA;
4059 if (t1 & VT_VLA) {
4060 loc -= type_size(&int_type, &align);
4061 loc &= -align;
4062 n = loc;
4064 vla_runtime_type_size(type, &align);
4065 gen_op('*');
4066 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4067 vswap();
4068 vstore();
4070 if (n != -1)
4071 vpop();
4072 nocode_wanted = saved_nocode_wanted;
4074 /* we push an anonymous symbol which will contain the array
4075 element type */
4076 s = sym_push(SYM_FIELD, type, 0, n);
4077 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4078 type->ref = s;
4082 /* Parse a type declaration (except basic type), and return the type
4083 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4084 expected. 'type' should contain the basic type. 'ad' is the
4085 attribute definition of the basic type. It can be modified by
4086 type_decl().
4088 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4090 Sym *s;
4091 CType type1, *type2;
4092 int qualifiers, storage;
4094 while (tok == '*') {
4095 qualifiers = 0;
4096 redo:
4097 next();
4098 switch(tok) {
4099 case TOK_CONST1:
4100 case TOK_CONST2:
4101 case TOK_CONST3:
4102 qualifiers |= VT_CONSTANT;
4103 goto redo;
4104 case TOK_VOLATILE1:
4105 case TOK_VOLATILE2:
4106 case TOK_VOLATILE3:
4107 qualifiers |= VT_VOLATILE;
4108 goto redo;
4109 case TOK_RESTRICT1:
4110 case TOK_RESTRICT2:
4111 case TOK_RESTRICT3:
4112 goto redo;
4113 /* XXX: clarify attribute handling */
4114 case TOK_ATTRIBUTE1:
4115 case TOK_ATTRIBUTE2:
4116 parse_attribute(ad);
4117 break;
4119 mk_pointer(type);
4120 type->t |= qualifiers;
4123 /* recursive type */
4124 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4125 type1.t = 0; /* XXX: same as int */
4126 if (tok == '(') {
4127 next();
4128 /* XXX: this is not correct to modify 'ad' at this point, but
4129 the syntax is not clear */
4130 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4131 parse_attribute(ad);
4132 type_decl(&type1, ad, v, td);
4133 skip(')');
4134 } else {
4135 /* type identifier */
4136 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4137 *v = tok;
4138 next();
4139 } else {
4140 if (!(td & TYPE_ABSTRACT))
4141 expect("identifier");
4142 *v = 0;
4145 storage = type->t & VT_STORAGE;
4146 type->t &= ~VT_STORAGE;
4147 post_type(type, ad, storage);
4148 type->t |= storage;
4149 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4150 parse_attribute(ad);
4152 if (!type1.t)
4153 return;
4154 /* append type at the end of type1 */
4155 type2 = &type1;
4156 for(;;) {
4157 s = type2->ref;
4158 type2 = &s->type;
4159 if (!type2->t) {
4160 *type2 = *type;
4161 break;
4164 *type = type1;
4167 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4168 ST_FUNC int lvalue_type(int t)
4170 int bt, r;
4171 r = VT_LVAL;
4172 bt = t & VT_BTYPE;
4173 if (bt == VT_BYTE || bt == VT_BOOL)
4174 r |= VT_LVAL_BYTE;
4175 else if (bt == VT_SHORT)
4176 r |= VT_LVAL_SHORT;
4177 else
4178 return r;
4179 if (t & VT_UNSIGNED)
4180 r |= VT_LVAL_UNSIGNED;
4181 return r;
4184 /* indirection with full error checking and bound check */
4185 ST_FUNC void indir(void)
4187 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4188 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4189 return;
4190 expect("pointer");
4192 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4193 gv(RC_INT);
4194 vtop->type = *pointed_type(&vtop->type);
4195 /* Arrays and functions are never lvalues */
4196 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4197 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4198 vtop->r |= lvalue_type(vtop->type.t);
4199 /* if bound checking, the referenced pointer must be checked */
4200 #ifdef CONFIG_TCC_BCHECK
4201 if (tcc_state->do_bounds_check)
4202 vtop->r |= VT_MUSTBOUND;
4203 #endif
4207 /* pass a parameter to a function and do type checking and casting */
4208 static void gfunc_param_typed(Sym *func, Sym *arg)
4210 int func_type;
4211 CType type;
4213 func_type = func->c;
4214 if (func_type == FUNC_OLD ||
4215 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4216 /* default casting : only need to convert float to double */
4217 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4218 type.t = VT_DOUBLE;
4219 gen_cast(&type);
4220 } else if (vtop->type.t & VT_BITFIELD) {
4221 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4222 type.ref = vtop->type.ref;
4223 gen_cast(&type);
4225 } else if (arg == NULL) {
4226 tcc_error("too many arguments to function");
4227 } else {
4228 type = arg->type;
4229 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4230 gen_assign_cast(&type);
4234 /* parse an expression of the form '(type)' or '(expr)' and return its
4235 type */
4236 static void parse_expr_type(CType *type)
4238 int n;
4239 AttributeDef ad;
4241 skip('(');
4242 if (parse_btype(type, &ad)) {
4243 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4244 } else {
4245 expr_type(type);
4247 skip(')');
4250 static void parse_type(CType *type)
4252 AttributeDef ad;
4253 int n;
4255 if (!parse_btype(type, &ad)) {
4256 expect("type");
4258 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4261 static void vpush_tokc(int t)
4263 CType type;
4264 type.t = t;
4265 type.ref = 0;
4266 vsetc(&type, VT_CONST, &tokc);
4269 ST_FUNC void unary(void)
4271 int n, t, align, size, r, sizeof_caller;
4272 CType type;
4273 Sym *s;
4274 AttributeDef ad;
4276 sizeof_caller = in_sizeof;
4277 in_sizeof = 0;
4278 /* XXX: GCC 2.95.3 does not generate a table although it should be
4279 better here */
4280 tok_next:
4281 switch(tok) {
4282 case TOK_EXTENSION:
4283 next();
4284 goto tok_next;
4285 case TOK_CINT:
4286 case TOK_CCHAR:
4287 case TOK_LCHAR:
4288 vpushi(tokc.i);
4289 next();
4290 break;
4291 case TOK_CUINT:
4292 vpush_tokc(VT_INT | VT_UNSIGNED);
4293 next();
4294 break;
4295 case TOK_CLLONG:
4296 vpush_tokc(VT_LLONG);
4297 next();
4298 break;
4299 case TOK_CULLONG:
4300 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4301 next();
4302 break;
4303 case TOK_CFLOAT:
4304 vpush_tokc(VT_FLOAT);
4305 next();
4306 break;
4307 case TOK_CDOUBLE:
4308 vpush_tokc(VT_DOUBLE);
4309 next();
4310 break;
4311 case TOK_CLDOUBLE:
4312 vpush_tokc(VT_LDOUBLE);
4313 next();
4314 break;
4315 case TOK___FUNCTION__:
4316 if (!gnu_ext)
4317 goto tok_identifier;
4318 /* fall thru */
4319 case TOK___FUNC__:
4321 void *ptr;
4322 int len;
4323 /* special function name identifier */
4324 len = strlen(funcname) + 1;
4325 /* generate char[len] type */
4326 type.t = VT_BYTE;
4327 mk_pointer(&type);
4328 type.t |= VT_ARRAY;
4329 type.ref->c = len;
4330 vpush_ref(&type, data_section, data_section->data_offset, len);
4331 ptr = section_ptr_add(data_section, len);
4332 memcpy(ptr, funcname, len);
4333 next();
4335 break;
4336 case TOK_LSTR:
4337 #ifdef TCC_TARGET_PE
4338 t = VT_SHORT | VT_UNSIGNED;
4339 #else
4340 t = VT_INT;
4341 #endif
4342 goto str_init;
4343 case TOK_STR:
4344 /* string parsing */
4345 t = VT_BYTE;
4346 str_init:
4347 if (tcc_state->warn_write_strings)
4348 t |= VT_CONSTANT;
4349 type.t = t;
4350 mk_pointer(&type);
4351 type.t |= VT_ARRAY;
4352 memset(&ad, 0, sizeof(AttributeDef));
4353 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4354 break;
4355 case '(':
4356 next();
4357 /* cast ? */
4358 if (parse_btype(&type, &ad)) {
4359 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4360 skip(')');
4361 /* check ISOC99 compound literal */
4362 if (tok == '{') {
4363 /* data is allocated locally by default */
4364 if (global_expr)
4365 r = VT_CONST;
4366 else
4367 r = VT_LOCAL;
4368 /* all except arrays are lvalues */
4369 if (!(type.t & VT_ARRAY))
4370 r |= lvalue_type(type.t);
4371 memset(&ad, 0, sizeof(AttributeDef));
4372 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4373 } else {
4374 if (sizeof_caller) {
4375 vpush(&type);
4376 return;
4378 unary();
4379 gen_cast(&type);
4381 } else if (tok == '{') {
4382 if (const_wanted)
4383 tcc_error("expected constant");
4384 /* save all registers */
4385 if (!nocode_wanted)
4386 save_regs(0);
4387 /* statement expression : we do not accept break/continue
4388 inside as GCC does */
4389 block(NULL, NULL, 1);
4390 skip(')');
4391 } else {
4392 gexpr();
4393 skip(')');
4395 break;
4396 case '*':
4397 next();
4398 unary();
4399 indir();
4400 break;
4401 case '&':
4402 next();
4403 unary();
4404 /* functions names must be treated as function pointers,
4405 except for unary '&' and sizeof. Since we consider that
4406 functions are not lvalues, we only have to handle it
4407 there and in function calls. */
4408 /* arrays can also be used although they are not lvalues */
4409 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4410 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4411 test_lvalue();
4412 mk_pointer(&vtop->type);
4413 gaddrof();
4414 break;
4415 case '!':
4416 next();
4417 unary();
4418 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4419 CType boolean;
4420 boolean.t = VT_BOOL;
4421 gen_cast(&boolean);
4422 vtop->c.i = !vtop->c.i;
4423 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4424 vtop->c.i ^= 1;
4425 else {
4426 save_regs(1);
4427 vseti(VT_JMP, gvtst(1, 0));
4429 break;
4430 case '~':
4431 next();
4432 unary();
4433 vpushi(-1);
4434 gen_op('^');
4435 break;
4436 case '+':
4437 next();
4438 unary();
4439 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4440 tcc_error("pointer not accepted for unary plus");
4441 /* In order to force cast, we add zero, except for floating point
4442 where we really need an noop (otherwise -0.0 will be transformed
4443 into +0.0). */
4444 if (!is_float(vtop->type.t)) {
4445 vpushi(0);
4446 gen_op('+');
4448 break;
4449 case TOK_SIZEOF:
4450 case TOK_ALIGNOF1:
4451 case TOK_ALIGNOF2:
4452 t = tok;
4453 next();
4454 in_sizeof++;
4455 unary_type(&type); // Perform a in_sizeof = 0;
4456 size = type_size(&type, &align);
4457 if (t == TOK_SIZEOF) {
4458 if (!(type.t & VT_VLA)) {
4459 if (size < 0)
4460 tcc_error("sizeof applied to an incomplete type");
4461 vpushs(size);
4462 } else {
4463 vla_runtime_type_size(&type, &align);
4465 } else {
4466 vpushs(align);
4468 vtop->type.t |= VT_UNSIGNED;
4469 break;
4471 case TOK_builtin_expect:
4473 /* __builtin_expect is a no-op for now */
4474 int saved_nocode_wanted;
4475 next();
4476 skip('(');
4477 expr_eq();
4478 skip(',');
4479 saved_nocode_wanted = nocode_wanted;
4480 nocode_wanted = 1;
4481 expr_lor_const();
4482 vpop();
4483 nocode_wanted = saved_nocode_wanted;
4484 skip(')');
4486 break;
4487 case TOK_builtin_types_compatible_p:
4489 CType type1, type2;
4490 next();
4491 skip('(');
4492 parse_type(&type1);
4493 skip(',');
4494 parse_type(&type2);
4495 skip(')');
4496 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4497 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4498 vpushi(is_compatible_types(&type1, &type2));
4500 break;
4501 case TOK_builtin_choose_expr:
4503 int saved_nocode_wanted;
4504 int64_t c;
4505 next();
4506 skip('(');
4507 c = expr_const64();
4508 skip(',');
4509 if (!c) {
4510 saved_nocode_wanted = nocode_wanted;
4511 nocode_wanted = 1;
4513 expr_eq();
4514 if (!c) {
4515 vpop();
4516 nocode_wanted = saved_nocode_wanted;
4518 skip(',');
4519 if (c) {
4520 saved_nocode_wanted = nocode_wanted;
4521 nocode_wanted = 1;
4523 expr_eq();
4524 if (c) {
4525 vpop();
4526 nocode_wanted = saved_nocode_wanted;
4528 skip(')');
4530 break;
4531 case TOK_builtin_constant_p:
4533 int saved_nocode_wanted, res;
4534 next();
4535 skip('(');
4536 saved_nocode_wanted = nocode_wanted;
4537 nocode_wanted = 1;
4538 gexpr();
4539 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4540 vpop();
4541 nocode_wanted = saved_nocode_wanted;
4542 skip(')');
4543 vpushi(res);
4545 break;
4546 case TOK_builtin_frame_address:
4547 case TOK_builtin_return_address:
4549 int tok1 = tok;
4550 int level;
4551 CType type;
4552 next();
4553 skip('(');
4554 if (tok != TOK_CINT) {
4555 tcc_error("%s only takes positive integers",
4556 tok1 == TOK_builtin_return_address ?
4557 "__builtin_return_address" :
4558 "__builtin_frame_address");
4560 level = (uint32_t)tokc.i;
4561 next();
4562 skip(')');
4563 type.t = VT_VOID;
4564 mk_pointer(&type);
4565 vset(&type, VT_LOCAL, 0); /* local frame */
4566 while (level--) {
4567 mk_pointer(&vtop->type);
4568 indir(); /* -> parent frame */
4570 if (tok1 == TOK_builtin_return_address) {
4571 // assume return address is just above frame pointer on stack
4572 vpushi(PTR_SIZE);
4573 gen_op('+');
4574 mk_pointer(&vtop->type);
4575 indir();
4578 break;
4579 #ifdef TCC_TARGET_X86_64
4580 #ifdef TCC_TARGET_PE
4581 case TOK_builtin_va_start:
4583 next();
4584 skip('(');
4585 expr_eq();
4586 skip(',');
4587 expr_eq();
4588 skip(')');
4589 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4590 tcc_error("__builtin_va_start expects a local variable");
4591 vtop->r &= ~(VT_LVAL | VT_REF);
4592 vtop->type = char_pointer_type;
4593 vtop->c.i += 8;
4594 vstore();
4596 break;
4597 #else
4598 case TOK_builtin_va_arg_types:
4600 CType type;
4601 next();
4602 skip('(');
4603 parse_type(&type);
4604 skip(')');
4605 vpushi(classify_x86_64_va_arg(&type));
4607 break;
4608 #endif
4609 #endif
4611 #ifdef TCC_TARGET_ARM64
4612 case TOK___va_start: {
4613 if (nocode_wanted)
4614 tcc_error("statement in global scope");
4615 next();
4616 skip('(');
4617 expr_eq();
4618 skip(',');
4619 expr_eq();
4620 skip(')');
4621 //xx check types
4622 gen_va_start();
4623 vpushi(0);
4624 vtop->type.t = VT_VOID;
4625 break;
4627 case TOK___va_arg: {
4628 CType type;
4629 if (nocode_wanted)
4630 tcc_error("statement in global scope");
4631 next();
4632 skip('(');
4633 expr_eq();
4634 skip(',');
4635 parse_type(&type);
4636 skip(')');
4637 //xx check types
4638 gen_va_arg(&type);
4639 vtop->type = type;
4640 break;
4642 case TOK___arm64_clear_cache: {
4643 next();
4644 skip('(');
4645 expr_eq();
4646 skip(',');
4647 expr_eq();
4648 skip(')');
4649 gen_clear_cache();
4650 vpushi(0);
4651 vtop->type.t = VT_VOID;
4652 break;
4654 #endif
4655 /* pre operations */
4656 case TOK_INC:
4657 case TOK_DEC:
4658 t = tok;
4659 next();
4660 unary();
4661 inc(0, t);
4662 break;
4663 case '-':
4664 next();
4665 unary();
4666 t = vtop->type.t & VT_BTYPE;
4667 if (is_float(t)) {
4668 /* In IEEE negate(x) isn't subtract(0,x), but rather
4669 subtract(-0, x). */
4670 vpush(&vtop->type);
4671 if (t == VT_FLOAT)
4672 vtop->c.f = -0.0f;
4673 else if (t == VT_DOUBLE)
4674 vtop->c.d = -0.0;
4675 else
4676 vtop->c.ld = -0.0;
4677 } else
4678 vpushi(0);
4679 vswap();
4680 gen_op('-');
4681 break;
4682 case TOK_LAND:
4683 if (!gnu_ext)
4684 goto tok_identifier;
4685 next();
4686 /* allow to take the address of a label */
4687 if (tok < TOK_UIDENT)
4688 expect("label identifier");
4689 s = label_find(tok);
4690 if (!s) {
4691 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4692 } else {
4693 if (s->r == LABEL_DECLARED)
4694 s->r = LABEL_FORWARD;
4696 if (!s->type.t) {
4697 s->type.t = VT_VOID;
4698 mk_pointer(&s->type);
4699 s->type.t |= VT_STATIC;
4701 vpushsym(&s->type, s);
4702 next();
4703 break;
4705 // special qnan , snan and infinity values
4706 case TOK___NAN__:
4707 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4708 next();
4709 break;
4710 case TOK___SNAN__:
4711 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4712 next();
4713 break;
4714 case TOK___INF__:
4715 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4716 next();
4717 break;
4719 default:
4720 tok_identifier:
4721 t = tok;
4722 next();
4723 if (t < TOK_UIDENT)
4724 expect("identifier");
4725 s = sym_find(t);
4726 if (!s) {
4727 const char *name = get_tok_str(t, NULL);
4728 if (tok != '(')
4729 tcc_error("'%s' undeclared", name);
4730 /* for simple function calls, we tolerate undeclared
4731 external reference to int() function */
4732 if (tcc_state->warn_implicit_function_declaration
4733 #ifdef TCC_TARGET_PE
4734 /* people must be warned about using undeclared WINAPI functions
4735 (which usually start with uppercase letter) */
4736 || (name[0] >= 'A' && name[0] <= 'Z')
4737 #endif
4739 tcc_warning("implicit declaration of function '%s'", name);
4740 s = external_global_sym(t, &func_old_type, 0);
4742 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4743 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4744 /* if referencing an inline function, then we generate a
4745 symbol to it if not already done. It will have the
4746 effect to generate code for it at the end of the
4747 compilation unit. Inline function as always
4748 generated in the text section. */
4749 if (!s->c && !nocode_wanted)
4750 put_extern_sym(s, text_section, 0, 0);
4751 r = VT_SYM | VT_CONST;
4752 } else {
4753 r = s->r;
4754 /* A symbol that has a register is a local register variable,
4755 which starts out as VT_LOCAL value. */
4756 if ((r & VT_VALMASK) < VT_CONST)
4757 r = (r & ~VT_VALMASK) | VT_LOCAL;
4759 vset(&s->type, r, s->c);
4760 /* Point to s as backpointer (even without r&VT_SYM).
4761 Will be used by at least the x86 inline asm parser for
4762 regvars. */
4763 vtop->sym = s;
4764 if (vtop->r & VT_SYM) {
4765 vtop->c.i = 0;
4767 break;
4770 /* post operations */
4771 while (1) {
4772 if (tok == TOK_INC || tok == TOK_DEC) {
4773 inc(1, tok);
4774 next();
4775 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4776 int qualifiers;
4777 /* field */
4778 if (tok == TOK_ARROW)
4779 indir();
4780 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4781 test_lvalue();
4782 gaddrof();
4783 /* expect pointer on structure */
4784 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4785 expect("struct or union");
4786 if (tok == TOK_CDOUBLE)
4787 expect("field name");
4788 next();
4789 if (tok == TOK_CINT || tok == TOK_CUINT)
4790 expect("field name");
4791 s = find_field(&vtop->type, tok);
4792 if (!s)
4793 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4794 /* add field offset to pointer */
4795 vtop->type = char_pointer_type; /* change type to 'char *' */
4796 vpushi(s->c);
4797 gen_op('+');
4798 /* change type to field type, and set to lvalue */
4799 vtop->type = s->type;
4800 vtop->type.t |= qualifiers;
4801 /* an array is never an lvalue */
4802 if (!(vtop->type.t & VT_ARRAY)) {
4803 vtop->r |= lvalue_type(vtop->type.t);
4804 #ifdef CONFIG_TCC_BCHECK
4805 /* if bound checking, the referenced pointer must be checked */
4806 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4807 vtop->r |= VT_MUSTBOUND;
4808 #endif
4810 next();
4811 } else if (tok == '[') {
4812 next();
4813 gexpr();
4814 gen_op('+');
4815 indir();
4816 skip(']');
4817 } else if (tok == '(') {
4818 SValue ret;
4819 Sym *sa;
4820 int nb_args, ret_nregs, ret_align, regsize, variadic;
4822 /* function call */
4823 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4824 /* pointer test (no array accepted) */
4825 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4826 vtop->type = *pointed_type(&vtop->type);
4827 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4828 goto error_func;
4829 } else {
4830 error_func:
4831 expect("function pointer");
4833 } else {
4834 vtop->r &= ~VT_LVAL; /* no lvalue */
4836 /* get return type */
4837 s = vtop->type.ref;
4838 next();
4839 sa = s->next; /* first parameter */
4840 nb_args = 0;
4841 ret.r2 = VT_CONST;
4842 /* compute first implicit argument if a structure is returned */
4843 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4844 variadic = (s->c == FUNC_ELLIPSIS);
4845 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4846 &ret_align, &regsize);
4847 if (!ret_nregs) {
4848 /* get some space for the returned structure */
4849 size = type_size(&s->type, &align);
4850 #ifdef TCC_TARGET_ARM64
4851 /* On arm64, a small struct is return in registers.
4852 It is much easier to write it to memory if we know
4853 that we are allowed to write some extra bytes, so
4854 round the allocated space up to a power of 2: */
4855 if (size < 16)
4856 while (size & (size - 1))
4857 size = (size | (size - 1)) + 1;
4858 #endif
4859 loc = (loc - size) & -align;
4860 ret.type = s->type;
4861 ret.r = VT_LOCAL | VT_LVAL;
4862 /* pass it as 'int' to avoid structure arg passing
4863 problems */
4864 vseti(VT_LOCAL, loc);
4865 ret.c = vtop->c;
4866 nb_args++;
4868 } else {
4869 ret_nregs = 1;
4870 ret.type = s->type;
4873 if (ret_nregs) {
4874 /* return in register */
4875 if (is_float(ret.type.t)) {
4876 ret.r = reg_fret(ret.type.t);
4877 #ifdef TCC_TARGET_X86_64
4878 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4879 ret.r2 = REG_QRET;
4880 #endif
4881 } else {
4882 #ifndef TCC_TARGET_ARM64
4883 #ifdef TCC_TARGET_X86_64
4884 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4885 #else
4886 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4887 #endif
4888 ret.r2 = REG_LRET;
4889 #endif
4890 ret.r = REG_IRET;
4892 ret.c.i = 0;
4894 if (tok != ')') {
4895 for(;;) {
4896 expr_eq();
4897 gfunc_param_typed(s, sa);
4898 nb_args++;
4899 if (sa)
4900 sa = sa->next;
4901 if (tok == ')')
4902 break;
4903 skip(',');
4906 if (sa)
4907 tcc_error("too few arguments to function");
4908 skip(')');
4909 if (!nocode_wanted) {
4910 gfunc_call(nb_args);
4911 } else {
4912 vtop -= (nb_args + 1);
4915 /* return value */
4916 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4917 vsetc(&ret.type, r, &ret.c);
4918 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4921 /* handle packed struct return */
4922 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4923 int addr, offset;
4925 size = type_size(&s->type, &align);
4926 /* We're writing whole regs often, make sure there's enough
4927 space. Assume register size is power of 2. */
4928 if (regsize > align)
4929 align = regsize;
4930 loc = (loc - size) & -align;
4931 addr = loc;
4932 offset = 0;
4933 for (;;) {
4934 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4935 vswap();
4936 vstore();
4937 vtop--;
4938 if (--ret_nregs == 0)
4939 break;
4940 offset += regsize;
4942 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4944 } else {
4945 break;
4950 ST_FUNC void expr_prod(void)
4952 int t;
4954 unary();
4955 while (tok == '*' || tok == '/' || tok == '%') {
4956 t = tok;
4957 next();
4958 unary();
4959 gen_op(t);
4963 ST_FUNC void expr_sum(void)
4965 int t;
4967 expr_prod();
4968 while (tok == '+' || tok == '-') {
4969 t = tok;
4970 next();
4971 expr_prod();
4972 gen_op(t);
4976 static void expr_shift(void)
4978 int t;
4980 expr_sum();
4981 while (tok == TOK_SHL || tok == TOK_SAR) {
4982 t = tok;
4983 next();
4984 expr_sum();
4985 gen_op(t);
4989 static void expr_cmp(void)
4991 int t;
4993 expr_shift();
4994 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4995 tok == TOK_ULT || tok == TOK_UGE) {
4996 t = tok;
4997 next();
4998 expr_shift();
4999 gen_op(t);
5003 static void expr_cmpeq(void)
5005 int t;
5007 expr_cmp();
5008 while (tok == TOK_EQ || tok == TOK_NE) {
5009 t = tok;
5010 next();
5011 expr_cmp();
5012 gen_op(t);
5016 static void expr_and(void)
5018 expr_cmpeq();
5019 while (tok == '&') {
5020 next();
5021 expr_cmpeq();
5022 gen_op('&');
5026 static void expr_xor(void)
5028 expr_and();
5029 while (tok == '^') {
5030 next();
5031 expr_and();
5032 gen_op('^');
5036 static void expr_or(void)
5038 expr_xor();
5039 while (tok == '|') {
5040 next();
5041 expr_xor();
5042 gen_op('|');
5046 /* XXX: fix this mess */
5047 static void expr_land_const(void)
5049 expr_or();
5050 while (tok == TOK_LAND) {
5051 next();
5052 expr_or();
5053 gen_op(TOK_LAND);
5056 static void expr_lor_const(void)
5058 expr_land_const();
5059 while (tok == TOK_LOR) {
5060 next();
5061 expr_land_const();
5062 gen_op(TOK_LOR);
5066 static void expr_land(void)
5068 expr_or();
5069 if (tok == TOK_LAND) {
5070 int t = 0;
5071 for(;;) {
5072 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5073 CType ctb;
5074 ctb.t = VT_BOOL;
5075 gen_cast(&ctb);
5076 if (vtop->c.i) {
5077 vpop();
5078 } else {
5079 int saved_nocode_wanted = nocode_wanted;
5080 nocode_wanted = 1;
5081 while (tok == TOK_LAND) {
5082 next();
5083 expr_or();
5084 vpop();
5086 if (t)
5087 gsym(t);
5088 nocode_wanted = saved_nocode_wanted;
5089 gen_cast(&int_type);
5090 break;
5092 } else {
5093 if (!t)
5094 save_regs(1);
5095 t = gvtst(1, t);
5097 if (tok != TOK_LAND) {
5098 if (t)
5099 vseti(VT_JMPI, t);
5100 else
5101 vpushi(1);
5102 break;
5104 next();
5105 expr_or();
5110 static void expr_lor(void)
5112 expr_land();
5113 if (tok == TOK_LOR) {
5114 int t = 0;
5115 for(;;) {
5116 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5117 CType ctb;
5118 ctb.t = VT_BOOL;
5119 gen_cast(&ctb);
5120 if (!vtop->c.i) {
5121 vpop();
5122 } else {
5123 int saved_nocode_wanted = nocode_wanted;
5124 nocode_wanted = 1;
5125 while (tok == TOK_LOR) {
5126 next();
5127 expr_land();
5128 vpop();
5130 if (t)
5131 gsym(t);
5132 nocode_wanted = saved_nocode_wanted;
5133 gen_cast(&int_type);
5134 break;
5136 } else {
5137 if (!t)
5138 save_regs(1);
5139 t = gvtst(0, t);
5141 if (tok != TOK_LOR) {
5142 if (t)
5143 vseti(VT_JMP, t);
5144 else
5145 vpushi(0);
5146 break;
5148 next();
5149 expr_land();
5154 /* Assuming vtop is a value used in a conditional context
5155 (i.e. compared with zero) return 0 if it's false, 1 if
5156 true and -1 if it can't be statically determined. */
5157 static int condition_3way(void)
5159 int c = -1;
5160 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5161 (!(vtop->r & VT_SYM) ||
5162 !(vtop->sym->type.t & VT_WEAK))) {
5163 CType boolean;
5164 boolean.t = VT_BOOL;
5165 vdup();
5166 gen_cast(&boolean);
5167 c = vtop->c.i;
5168 vpop();
5170 return c;
5173 static void expr_cond(void)
5175 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5176 int c;
5177 SValue sv;
5178 CType type, type1, type2;
5180 expr_lor();
5181 if (tok == '?') {
5182 next();
5183 c = condition_3way();
5184 if (c >= 0) {
5185 int saved_nocode_wanted = nocode_wanted;
5186 if (c) {
5187 if (tok != ':' || !gnu_ext) {
5188 vpop();
5189 gexpr();
5191 skip(':');
5192 nocode_wanted = 1;
5193 expr_cond();
5194 vpop();
5195 nocode_wanted = saved_nocode_wanted;
5196 } else {
5197 vpop();
5198 if (tok != ':' || !gnu_ext) {
5199 nocode_wanted = 1;
5200 gexpr();
5201 vpop();
5202 nocode_wanted = saved_nocode_wanted;
5204 skip(':');
5205 expr_cond();
5208 else {
5209 /* XXX This doesn't handle nocode_wanted correctly at all.
5210 It unconditionally calls gv/gvtst and friends. That's
5211 the case for many of the expr_ routines. Currently
5212 that should generate only useless code, but depending
5213 on other operand handling this might also generate
5214 pointer derefs for lvalue conversions whose result
5215 is useless, but nevertheless can lead to segfault.
5217 Somewhen we need to overhaul the whole nocode_wanted
5218 handling. */
5219 if (vtop != vstack) {
5220 /* needed to avoid having different registers saved in
5221 each branch */
5222 if (is_float(vtop->type.t)) {
5223 rc = RC_FLOAT;
5224 #ifdef TCC_TARGET_X86_64
5225 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5226 rc = RC_ST0;
5228 #endif
5230 else
5231 rc = RC_INT;
5232 gv(rc);
5233 save_regs(1);
5235 if (tok == ':' && gnu_ext) {
5236 gv_dup();
5237 tt = gvtst(1, 0);
5238 } else {
5239 tt = gvtst(1, 0);
5240 gexpr();
5242 type1 = vtop->type;
5243 sv = *vtop; /* save value to handle it later */
5244 vtop--; /* no vpop so that FP stack is not flushed */
5245 skip(':');
5246 u = gjmp(0);
5247 gsym(tt);
5248 expr_cond();
5249 type2 = vtop->type;
5251 t1 = type1.t;
5252 bt1 = t1 & VT_BTYPE;
5253 t2 = type2.t;
5254 bt2 = t2 & VT_BTYPE;
5255 /* cast operands to correct type according to ISOC rules */
5256 if (is_float(bt1) || is_float(bt2)) {
5257 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5258 type.t = VT_LDOUBLE;
5259 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5260 type.t = VT_DOUBLE;
5261 } else {
5262 type.t = VT_FLOAT;
5264 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5265 /* cast to biggest op */
5266 type.t = VT_LLONG;
5267 /* convert to unsigned if it does not fit in a long long */
5268 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5269 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5270 type.t |= VT_UNSIGNED;
5271 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5272 /* If one is a null ptr constant the result type
5273 is the other. */
5274 if (is_null_pointer (vtop))
5275 type = type1;
5276 else if (is_null_pointer (&sv))
5277 type = type2;
5278 /* XXX: test pointer compatibility, C99 has more elaborate
5279 rules here. */
5280 else
5281 type = type1;
5282 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5283 /* XXX: test function pointer compatibility */
5284 type = bt1 == VT_FUNC ? type1 : type2;
5285 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5286 /* XXX: test structure compatibility */
5287 type = bt1 == VT_STRUCT ? type1 : type2;
5288 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5289 /* NOTE: as an extension, we accept void on only one side */
5290 type.t = VT_VOID;
5291 } else {
5292 /* integer operations */
5293 type.t = VT_INT;
5294 /* convert to unsigned if it does not fit in an integer */
5295 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5296 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5297 type.t |= VT_UNSIGNED;
5299 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5300 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5301 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5303 /* now we convert second operand */
5304 gen_cast(&type);
5305 if (islv) {
5306 mk_pointer(&vtop->type);
5307 gaddrof();
5309 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5310 gaddrof();
5311 rc = RC_INT;
5312 if (is_float(type.t)) {
5313 rc = RC_FLOAT;
5314 #ifdef TCC_TARGET_X86_64
5315 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5316 rc = RC_ST0;
5318 #endif
5319 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5320 /* for long longs, we use fixed registers to avoid having
5321 to handle a complicated move */
5322 rc = RC_IRET;
5325 r2 = gv(rc);
5326 /* this is horrible, but we must also convert first
5327 operand */
5328 tt = gjmp(0);
5329 gsym(u);
5330 /* put again first value and cast it */
5331 *vtop = sv;
5332 gen_cast(&type);
5333 if (islv) {
5334 mk_pointer(&vtop->type);
5335 gaddrof();
5337 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5338 gaddrof();
5339 r1 = gv(rc);
5340 move_reg(r2, r1, type.t);
5341 vtop->r = r2;
5342 gsym(tt);
5343 if (islv)
5344 indir();
5349 static void expr_eq(void)
5351 int t;
5353 expr_cond();
5354 if (tok == '=' ||
5355 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5356 tok == TOK_A_XOR || tok == TOK_A_OR ||
5357 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5358 test_lvalue();
5359 t = tok;
5360 next();
5361 if (t == '=') {
5362 expr_eq();
5363 } else {
5364 vdup();
5365 expr_eq();
5366 gen_op(t & 0x7f);
5368 vstore();
5372 ST_FUNC void gexpr(void)
5374 while (1) {
5375 expr_eq();
5376 if (tok != ',')
5377 break;
5378 vpop();
5379 next();
5383 /* parse an expression and return its type without any side effect. */
5384 static void expr_type(CType *type)
5386 int saved_nocode_wanted;
5388 saved_nocode_wanted = nocode_wanted;
5389 nocode_wanted = 1;
5390 gexpr();
5391 *type = vtop->type;
5392 vpop();
5393 nocode_wanted = saved_nocode_wanted;
5396 /* parse a unary expression and return its type without any side
5397 effect. */
5398 static void unary_type(CType *type)
5400 int a;
5402 a = nocode_wanted;
5403 nocode_wanted = 1;
5404 unary();
5405 *type = vtop->type;
5406 vpop();
5407 nocode_wanted = a;
5410 /* parse a constant expression and return value in vtop. */
5411 static void expr_const1(void)
5413 int a;
5414 a = const_wanted;
5415 const_wanted = 1;
5416 expr_cond();
5417 const_wanted = a;
5420 /* parse an integer constant and return its value. */
5421 static inline int64_t expr_const64(void)
5423 int64_t c;
5424 expr_const1();
5425 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5426 expect("constant expression");
5427 c = vtop->c.i;
5428 vpop();
5429 return c;
5432 /* parse an integer constant and return its value.
5433 Complain if it doesn't fit 32bit (signed or unsigned). */
5434 ST_FUNC int expr_const(void)
5436 int c;
5437 int64_t wc = expr_const64();
5438 c = wc;
5439 if (c != wc && (unsigned)c != wc)
5440 tcc_error("constant exceeds 32 bit");
5441 return c;
5444 /* return the label token if current token is a label, otherwise
5445 return zero */
5446 static int is_label(void)
5448 int last_tok;
5450 /* fast test first */
5451 if (tok < TOK_UIDENT)
5452 return 0;
5453 /* no need to save tokc because tok is an identifier */
5454 last_tok = tok;
5455 next();
5456 if (tok == ':') {
5457 next();
5458 return last_tok;
5459 } else {
5460 unget_tok(last_tok);
5461 return 0;
5465 static void label_or_decl(int l)
5467 int last_tok;
5469 /* fast test first */
5470 if (tok >= TOK_UIDENT)
5472 /* no need to save tokc because tok is an identifier */
5473 last_tok = tok;
5474 next();
5475 if (tok == ':') {
5476 unget_tok(last_tok);
5477 return;
5479 unget_tok(last_tok);
5481 decl(l);
5484 static int case_cmp(const void *pa, const void *pb)
5486 int64_t a = (*(struct case_t**) pa)->v1;
5487 int64_t b = (*(struct case_t**) pb)->v1;
5488 return a < b ? -1 : a > b;
5491 static void gcase(struct case_t **base, int len, int *bsym)
5493 struct case_t *p;
5494 int e;
5495 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5496 gv(RC_INT);
5497 while (len > 4) {
5498 /* binary search */
5499 p = base[len/2];
5500 vdup();
5501 if (ll)
5502 vpushll(p->v2);
5503 else
5504 vpushi(p->v2);
5505 gen_op(TOK_LE);
5506 e = gtst(1, 0);
5507 vdup();
5508 if (ll)
5509 vpushll(p->v1);
5510 else
5511 vpushi(p->v1);
5512 gen_op(TOK_GE);
5513 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5514 /* x < v1 */
5515 gcase(base, len/2, bsym);
5516 if (cur_switch->def_sym)
5517 gjmp_addr(cur_switch->def_sym);
5518 else
5519 *bsym = gjmp(*bsym);
5520 /* x > v2 */
5521 gsym(e);
5522 e = len/2 + 1;
5523 base += e; len -= e;
5525 /* linear scan */
5526 while (len--) {
5527 p = *base++;
5528 vdup();
5529 if (ll)
5530 vpushll(p->v2);
5531 else
5532 vpushi(p->v2);
5533 if (p->v1 == p->v2) {
5534 gen_op(TOK_EQ);
5535 gtst_addr(0, p->sym);
5536 } else {
5537 gen_op(TOK_LE);
5538 e = gtst(1, 0);
5539 vdup();
5540 if (ll)
5541 vpushll(p->v1);
5542 else
5543 vpushi(p->v1);
5544 gen_op(TOK_GE);
5545 gtst_addr(0, p->sym);
5546 gsym(e);
5551 static void block(int *bsym, int *csym, int is_expr)
5553 int a, b, c, d, cond;
5554 Sym *s;
5556 /* generate line number info */
5557 if (tcc_state->do_debug &&
5558 (last_line_num != file->line_num || last_ind != ind)) {
5559 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5560 last_ind = ind;
5561 last_line_num = file->line_num;
5564 if (is_expr) {
5565 /* default return value is (void) */
5566 vpushi(0);
5567 vtop->type.t = VT_VOID;
5570 if (tok == TOK_IF) {
5571 /* if test */
5572 int saved_nocode_wanted = nocode_wanted;
5573 next();
5574 skip('(');
5575 gexpr();
5576 skip(')');
5577 cond = condition_3way();
5578 if (cond == 0)
5579 nocode_wanted |= 2;
5580 a = gvtst(1, 0);
5581 block(bsym, csym, 0);
5582 if (cond != 1)
5583 nocode_wanted = saved_nocode_wanted;
5584 c = tok;
5585 if (c == TOK_ELSE) {
5586 next();
5587 if (cond == 1)
5588 nocode_wanted |= 2;
5589 d = gjmp(0);
5590 gsym(a);
5591 block(bsym, csym, 0);
5592 gsym(d); /* patch else jmp */
5593 if (cond != 0)
5594 nocode_wanted = saved_nocode_wanted;
5595 } else
5596 gsym(a);
5597 } else if (tok == TOK_WHILE) {
5598 int saved_nocode_wanted;
5599 nocode_wanted &= ~2;
5600 next();
5601 d = ind;
5602 vla_sp_restore();
5603 skip('(');
5604 gexpr();
5605 skip(')');
5606 a = gvtst(1, 0);
5607 b = 0;
5608 ++local_scope;
5609 saved_nocode_wanted = nocode_wanted;
5610 block(&a, &b, 0);
5611 nocode_wanted = saved_nocode_wanted;
5612 --local_scope;
5613 if(!nocode_wanted)
5614 gjmp_addr(d);
5615 gsym(a);
5616 gsym_addr(b, d);
5617 } else if (tok == '{') {
5618 Sym *llabel;
5619 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5621 next();
5622 /* record local declaration stack position */
5623 s = local_stack;
5624 llabel = local_label_stack;
5625 ++local_scope;
5627 /* handle local labels declarations */
5628 if (tok == TOK_LABEL) {
5629 next();
5630 for(;;) {
5631 if (tok < TOK_UIDENT)
5632 expect("label identifier");
5633 label_push(&local_label_stack, tok, LABEL_DECLARED);
5634 next();
5635 if (tok == ',') {
5636 next();
5637 } else {
5638 skip(';');
5639 break;
5643 while (tok != '}') {
5644 label_or_decl(VT_LOCAL);
5645 if (tok != '}') {
5646 if (is_expr)
5647 vpop();
5648 block(bsym, csym, is_expr);
5651 /* pop locally defined labels */
5652 label_pop(&local_label_stack, llabel);
5653 /* pop locally defined symbols */
5654 --local_scope;
5655 /* In the is_expr case (a statement expression is finished here),
5656 vtop might refer to symbols on the local_stack. Either via the
5657 type or via vtop->sym. We can't pop those nor any that in turn
5658 might be referred to. To make it easier we don't roll back
5659 any symbols in that case; some upper level call to block() will
5660 do that. We do have to remove such symbols from the lookup
5661 tables, though. sym_pop will do that. */
5662 sym_pop(&local_stack, s, is_expr);
5664 /* Pop VLA frames and restore stack pointer if required */
5665 if (vlas_in_scope > saved_vlas_in_scope) {
5666 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5667 vla_sp_restore();
5669 vlas_in_scope = saved_vlas_in_scope;
5671 next();
5672 } else if (tok == TOK_RETURN) {
5673 next();
5674 if (tok != ';') {
5675 gexpr();
5676 gen_assign_cast(&func_vt);
5677 #ifdef TCC_TARGET_ARM64
5678 // Perhaps it would be better to use this for all backends:
5679 greturn();
5680 #else
5681 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5682 CType type, ret_type;
5683 int ret_align, ret_nregs, regsize;
5684 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5685 &ret_align, &regsize);
5686 if (0 == ret_nregs) {
5687 /* if returning structure, must copy it to implicit
5688 first pointer arg location */
5689 type = func_vt;
5690 mk_pointer(&type);
5691 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5692 indir();
5693 vswap();
5694 /* copy structure value to pointer */
5695 vstore();
5696 } else {
5697 /* returning structure packed into registers */
5698 int r, size, addr, align;
5699 size = type_size(&func_vt,&align);
5700 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5701 (vtop->c.i & (ret_align-1)))
5702 && (align & (ret_align-1))) {
5703 loc = (loc - size) & -ret_align;
5704 addr = loc;
5705 type = func_vt;
5706 vset(&type, VT_LOCAL | VT_LVAL, addr);
5707 vswap();
5708 vstore();
5709 vpop();
5710 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5712 vtop->type = ret_type;
5713 if (is_float(ret_type.t))
5714 r = rc_fret(ret_type.t);
5715 else
5716 r = RC_IRET;
5718 if (ret_nregs == 1)
5719 gv(r);
5720 else {
5721 for (;;) {
5722 vdup();
5723 gv(r);
5724 vpop();
5725 if (--ret_nregs == 0)
5726 break;
5727 /* We assume that when a structure is returned in multiple
5728 registers, their classes are consecutive values of the
5729 suite s(n) = 2^n */
5730 r <<= 1;
5731 vtop->c.i += regsize;
5735 } else if (is_float(func_vt.t)) {
5736 gv(rc_fret(func_vt.t));
5737 } else {
5738 gv(RC_IRET);
5740 #endif
5741 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5743 skip(';');
5744 /* jump unless last stmt in top-level block */
5745 if (tok != '}' || local_scope != 1)
5746 rsym = gjmp(rsym);
5747 nocode_wanted |= 2;
5748 } else if (tok == TOK_BREAK) {
5749 /* compute jump */
5750 if (!bsym)
5751 tcc_error("cannot break");
5752 *bsym = gjmp(*bsym);
5753 next();
5754 skip(';');
5755 nocode_wanted |= 2;
5756 } else if (tok == TOK_CONTINUE) {
5757 /* compute jump */
5758 if (!csym)
5759 tcc_error("cannot continue");
5760 vla_sp_restore_root();
5761 *csym = gjmp(*csym);
5762 next();
5763 skip(';');
5764 } else if (tok == TOK_FOR) {
5765 int e;
5766 int saved_nocode_wanted;
5767 nocode_wanted &= ~2;
5768 next();
5769 skip('(');
5770 s = local_stack;
5771 ++local_scope;
5772 if (tok != ';') {
5773 /* c99 for-loop init decl? */
5774 if (!decl0(VT_LOCAL, 1)) {
5775 /* no, regular for-loop init expr */
5776 gexpr();
5777 vpop();
5780 skip(';');
5781 d = ind;
5782 c = ind;
5783 vla_sp_restore();
5784 a = 0;
5785 b = 0;
5786 if (tok != ';') {
5787 gexpr();
5788 a = gvtst(1, 0);
5790 skip(';');
5791 if (tok != ')') {
5792 e = gjmp(0);
5793 c = ind;
5794 vla_sp_restore();
5795 gexpr();
5796 vpop();
5797 gjmp_addr(d);
5798 gsym(e);
5800 skip(')');
5801 saved_nocode_wanted = nocode_wanted;
5802 block(&a, &b, 0);
5803 nocode_wanted = saved_nocode_wanted;
5804 if(!nocode_wanted)
5805 gjmp_addr(c);
5806 gsym(a);
5807 gsym_addr(b, c);
5808 --local_scope;
5809 sym_pop(&local_stack, s, 0);
5811 } else
5812 if (tok == TOK_DO) {
5813 int saved_nocode_wanted;
5814 nocode_wanted &= ~2;
5815 next();
5816 a = 0;
5817 b = 0;
5818 d = ind;
5819 vla_sp_restore();
5820 saved_nocode_wanted = nocode_wanted;
5821 block(&a, &b, 0);
5822 nocode_wanted = saved_nocode_wanted;
5823 skip(TOK_WHILE);
5824 skip('(');
5825 gsym(b);
5826 gexpr();
5827 c = gvtst(0, 0);
5828 if (!nocode_wanted)
5829 gsym_addr(c, d);
5830 skip(')');
5831 gsym(a);
5832 skip(';');
5833 } else
5834 if (tok == TOK_SWITCH) {
5835 struct switch_t *saved, sw;
5836 int saved_nocode_wanted = nocode_wanted;
5837 SValue switchval;
5838 next();
5839 skip('(');
5840 gexpr();
5841 skip(')');
5842 switchval = *vtop--;
5843 a = 0;
5844 b = gjmp(0); /* jump to first case */
5845 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5846 saved = cur_switch;
5847 cur_switch = &sw;
5848 block(&a, csym, 0);
5849 nocode_wanted = saved_nocode_wanted;
5850 a = gjmp(a); /* add implicit break */
5851 /* case lookup */
5852 gsym(b);
5853 if (!nocode_wanted) {
5854 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5855 for (b = 1; b < sw.n; b++)
5856 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5857 tcc_error("duplicate case value");
5858 /* Our switch table sorting is signed, so the compared
5859 value needs to be as well when it's 64bit. */
5860 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5861 switchval.type.t &= ~VT_UNSIGNED;
5862 vpushv(&switchval);
5863 gcase(sw.p, sw.n, &a);
5864 vpop();
5865 if (sw.def_sym)
5866 gjmp_addr(sw.def_sym);
5868 dynarray_reset(&sw.p, &sw.n);
5869 cur_switch = saved;
5870 /* break label */
5871 gsym(a);
5872 } else
5873 if (tok == TOK_CASE) {
5874 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5875 if (!cur_switch)
5876 expect("switch");
5877 nocode_wanted &= ~2;
5878 next();
5879 cr->v1 = cr->v2 = expr_const64();
5880 if (gnu_ext && tok == TOK_DOTS) {
5881 next();
5882 cr->v2 = expr_const64();
5883 if (cr->v2 < cr->v1)
5884 tcc_warning("empty case range");
5886 cr->sym = ind;
5887 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5888 skip(':');
5889 is_expr = 0;
5890 goto block_after_label;
5891 } else
5892 if (tok == TOK_DEFAULT) {
5893 next();
5894 skip(':');
5895 if (!cur_switch)
5896 expect("switch");
5897 if (cur_switch->def_sym)
5898 tcc_error("too many 'default'");
5899 cur_switch->def_sym = ind;
5900 is_expr = 0;
5901 goto block_after_label;
5902 } else
5903 if (tok == TOK_GOTO) {
5904 next();
5905 if (tok == '*' && gnu_ext) {
5906 /* computed goto */
5907 next();
5908 gexpr();
5909 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5910 expect("pointer");
5911 if (!nocode_wanted)
5912 ggoto();
5913 else
5914 vtop--;
5915 } else if (tok >= TOK_UIDENT) {
5916 s = label_find(tok);
5917 /* put forward definition if needed */
5918 if (!s) {
5919 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5920 } else {
5921 if (s->r == LABEL_DECLARED)
5922 s->r = LABEL_FORWARD;
5924 vla_sp_restore_root();
5925 if (nocode_wanted)
5927 else if (s->r & LABEL_FORWARD)
5928 s->jnext = gjmp(s->jnext);
5929 else
5930 gjmp_addr(s->jnext);
5931 next();
5932 } else {
5933 expect("label identifier");
5935 skip(';');
5936 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5937 asm_instr();
5938 } else {
5939 b = is_label();
5940 if (b) {
5941 /* label case */
5942 s = label_find(b);
5943 if (s) {
5944 if (s->r == LABEL_DEFINED)
5945 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5946 gsym(s->jnext);
5947 s->r = LABEL_DEFINED;
5948 } else {
5949 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5951 s->jnext = ind;
5952 vla_sp_restore();
5953 /* we accept this, but it is a mistake */
5954 block_after_label:
5955 nocode_wanted &= ~2;
5956 if (tok == '}') {
5957 tcc_warning("deprecated use of label at end of compound statement");
5958 } else {
5959 if (is_expr)
5960 vpop();
5961 block(bsym, csym, is_expr);
5963 } else {
5964 /* expression case */
5965 if (tok != ';') {
5966 if (is_expr) {
5967 vpop();
5968 gexpr();
5969 } else {
5970 gexpr();
5971 vpop();
5974 skip(';');
5979 #define EXPR_CONST 1
5980 #define EXPR_ANY 2
5982 static void parse_init_elem(int expr_type)
5984 int saved_global_expr;
5985 switch(expr_type) {
5986 case EXPR_CONST:
5987 /* compound literals must be allocated globally in this case */
5988 saved_global_expr = global_expr;
5989 global_expr = 1;
5990 expr_const1();
5991 global_expr = saved_global_expr;
5992 /* NOTE: symbols are accepted */
5993 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5994 tcc_error("initializer element is not constant");
5995 break;
5996 case EXPR_ANY:
5997 expr_eq();
5998 break;
6002 /* t is the array or struct type. c is the array or struct
6003 address. cur_field is the pointer to the current
6004 value, for arrays the 'c' member contains the current start
6005 index and the 'r' contains the end index (in case of range init).
6006 'size_only' is true if only size info is needed (only used
6007 in arrays) */
6008 static void decl_designator(CType *type, Section *sec, unsigned long c,
6009 Sym **cur_field, int size_only)
6011 Sym *s, *f;
6012 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6013 CType type1;
6015 notfirst = 0;
6016 elem_size = 0;
6017 nb_elems = 1;
6018 if (gnu_ext && (l = is_label()) != 0)
6019 goto struct_field;
6020 while (tok == '[' || tok == '.') {
6021 if (tok == '[') {
6022 if (!(type->t & VT_ARRAY))
6023 expect("array type");
6024 s = type->ref;
6025 next();
6026 index = expr_const();
6027 if (index < 0 || (s->c >= 0 && index >= s->c))
6028 tcc_error("invalid index");
6029 if (tok == TOK_DOTS && gnu_ext) {
6030 next();
6031 index_last = expr_const();
6032 if (index_last < 0 ||
6033 (s->c >= 0 && index_last >= s->c) ||
6034 index_last < index)
6035 tcc_error("invalid index");
6036 } else {
6037 index_last = index;
6039 skip(']');
6040 if (!notfirst) {
6041 (*cur_field)->c = index;
6042 (*cur_field)->r = index_last;
6044 type = pointed_type(type);
6045 elem_size = type_size(type, &align);
6046 c += index * elem_size;
6047 /* NOTE: we only support ranges for last designator */
6048 nb_elems = index_last - index + 1;
6049 if (nb_elems != 1) {
6050 notfirst = 1;
6051 break;
6053 } else {
6054 next();
6055 l = tok;
6056 next();
6057 struct_field:
6058 if ((type->t & VT_BTYPE) != VT_STRUCT)
6059 expect("struct/union type");
6060 f = find_field(type, l);
6061 if (!f)
6062 expect("field");
6063 if (!notfirst)
6064 *cur_field = f;
6065 /* XXX: fix this mess by using explicit storage field */
6066 type1 = f->type;
6067 type1.t |= (type->t & ~VT_TYPE);
6068 type = &type1;
6069 c += f->c;
6071 notfirst = 1;
6073 if (notfirst) {
6074 if (tok == '=') {
6075 next();
6076 } else {
6077 if (!gnu_ext)
6078 expect("=");
6080 } else {
6081 if (type->t & VT_ARRAY) {
6082 index = (*cur_field)->c;
6083 if (type->ref->c >= 0 && index >= type->ref->c)
6084 tcc_error("index too large");
6085 type = pointed_type(type);
6086 c += index * type_size(type, &align);
6087 } else {
6088 f = *cur_field;
6089 if (!f)
6090 tcc_error("too many field init");
6091 /* XXX: fix this mess by using explicit storage field */
6092 type1 = f->type;
6093 type1.t |= (type->t & ~VT_TYPE);
6094 type = &type1;
6095 c += f->c;
6098 decl_initializer(type, sec, c, 0, size_only);
6100 /* XXX: make it more general */
6101 if (!size_only && nb_elems > 1) {
6102 unsigned long c_end;
6103 uint8_t *src, *dst;
6104 int i;
6106 if (!sec) {
6107 vset(type, VT_LOCAL|VT_LVAL, c);
6108 for (i = 1; i < nb_elems; i++) {
6109 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6110 vswap();
6111 vstore();
6113 vpop();
6114 } else {
6115 c_end = c + nb_elems * elem_size;
6116 if (c_end > sec->data_allocated)
6117 section_realloc(sec, c_end);
6118 src = sec->data + c;
6119 dst = src;
6120 for(i = 1; i < nb_elems; i++) {
6121 dst += elem_size;
6122 memcpy(dst, src, elem_size);
6128 /* store a value or an expression directly in global data or in local array */
6129 static void init_putv(CType *type, Section *sec, unsigned long c)
6131 int bt, bit_pos, bit_size;
6132 void *ptr;
6133 unsigned long long bit_mask;
6134 CType dtype;
6136 dtype = *type;
6137 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6139 if (sec) {
6140 int size, align;
6141 /* XXX: not portable */
6142 /* XXX: generate error if incorrect relocation */
6143 gen_assign_cast(&dtype);
6144 bt = type->t & VT_BTYPE;
6145 size = type_size(type, &align);
6146 if (c + size > sec->data_allocated) {
6147 section_realloc(sec, c + size);
6149 ptr = sec->data + c;
6150 /* XXX: make code faster ? */
6151 if (!(type->t & VT_BITFIELD)) {
6152 bit_pos = 0;
6153 bit_size = PTR_SIZE * 8;
6154 bit_mask = -1LL;
6155 } else {
6156 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6157 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6158 bit_mask = (1LL << bit_size) - 1;
6160 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6161 vtop->sym->v >= SYM_FIRST_ANOM &&
6162 /* XXX This rejects compount literals like
6163 '(void *){ptr}'. The problem is that '&sym' is
6164 represented the same way, which would be ruled out
6165 by the SYM_FIRST_ANOM check above, but also '"string"'
6166 in 'char *p = "string"' is represented the same
6167 with the type being VT_PTR and the symbol being an
6168 anonymous one. That is, there's no difference in vtop
6169 between '(void *){x}' and '&(void *){x}'. Ignore
6170 pointer typed entities here. Hopefully no real code
6171 will every use compound literals with scalar type. */
6172 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6173 /* These come from compound literals, memcpy stuff over. */
6174 Section *ssec;
6175 ElfW(Sym) *esym;
6176 ElfW_Rel *rel;
6177 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6178 ssec = tcc_state->sections[esym->st_shndx];
6179 memmove (ptr, ssec->data + esym->st_value, size);
6180 if (ssec->reloc) {
6181 /* We need to copy over all memory contents, and that
6182 includes relocations. Use the fact that relocs are
6183 created it order, so look from the end of relocs
6184 until we hit one before the copied region. */
6185 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6186 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6187 while (num_relocs--) {
6188 rel--;
6189 if (rel->r_offset >= esym->st_value + size)
6190 continue;
6191 if (rel->r_offset < esym->st_value)
6192 break;
6193 /* Note: if the same fields are initialized multiple
6194 times (possible with designators) then we possibly
6195 add multiple relocations for the same offset here.
6196 That would lead to wrong code, the last reloc needs
6197 to win. We clean this up later after the whole
6198 initializer is parsed. */
6199 put_elf_reloca(symtab_section, sec,
6200 c + rel->r_offset - esym->st_value,
6201 ELFW(R_TYPE)(rel->r_info),
6202 ELFW(R_SYM)(rel->r_info),
6203 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6204 rel->r_addend
6205 #else
6207 #endif
6211 } else {
6212 if ((vtop->r & VT_SYM) &&
6213 (bt == VT_BYTE ||
6214 bt == VT_SHORT ||
6215 bt == VT_DOUBLE ||
6216 bt == VT_LDOUBLE ||
6217 #if PTR_SIZE == 8
6218 (bt == VT_LLONG && bit_size != 64) ||
6219 bt == VT_INT
6220 #else
6221 bt == VT_LLONG ||
6222 (bt == VT_INT && bit_size != 32)
6223 #endif
6225 tcc_error("initializer element is not computable at load time");
6226 switch(bt) {
6227 /* XXX: when cross-compiling we assume that each type has the
6228 same representation on host and target, which is likely to
6229 be wrong in the case of long double */
6230 case VT_BOOL:
6231 vtop->c.i = (vtop->c.i != 0);
6232 case VT_BYTE:
6233 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6234 break;
6235 case VT_SHORT:
6236 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6237 break;
6238 case VT_DOUBLE:
6239 *(double *)ptr = vtop->c.d;
6240 break;
6241 case VT_LDOUBLE:
6242 if (sizeof(long double) == LDOUBLE_SIZE)
6243 *(long double *)ptr = vtop->c.ld;
6244 else if (sizeof(double) == LDOUBLE_SIZE)
6245 *(double *)ptr = vtop->c.ld;
6246 else
6247 tcc_error("can't cross compile long double constants");
6248 break;
6249 #if PTR_SIZE != 8
6250 case VT_LLONG:
6251 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6252 break;
6253 #else
6254 case VT_LLONG:
6255 #endif
6256 case VT_PTR:
6258 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6259 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6260 if (vtop->r & VT_SYM)
6261 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6262 else
6263 *(addr_t *)ptr |= val;
6264 #else
6265 if (vtop->r & VT_SYM)
6266 greloc(sec, vtop->sym, c, R_DATA_PTR);
6267 *(addr_t *)ptr |= val;
6268 #endif
6269 break;
6271 default:
6273 int val = (vtop->c.i & bit_mask) << bit_pos;
6274 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6275 if (vtop->r & VT_SYM)
6276 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6277 else
6278 *(int *)ptr |= val;
6279 #else
6280 if (vtop->r & VT_SYM)
6281 greloc(sec, vtop->sym, c, R_DATA_PTR);
6282 *(int *)ptr |= val;
6283 #endif
6284 break;
6288 vtop--;
6289 } else {
6290 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6291 vswap();
6292 vstore();
6293 vpop();
6297 /* put zeros for variable based init */
6298 static void init_putz(Section *sec, unsigned long c, int size)
6300 if (sec) {
6301 /* nothing to do because globals are already set to zero */
6302 } else {
6303 vpush_global_sym(&func_old_type, TOK_memset);
6304 vseti(VT_LOCAL, c);
6305 #ifdef TCC_TARGET_ARM
6306 vpushs(size);
6307 vpushi(0);
6308 #else
6309 vpushi(0);
6310 vpushs(size);
6311 #endif
6312 gfunc_call(3);
6316 /* 't' contains the type and storage info. 'c' is the offset of the
6317 object in section 'sec'. If 'sec' is NULL, it means stack based
6318 allocation. 'first' is true if array '{' must be read (multi
6319 dimension implicit array init handling). 'size_only' is true if
6320 size only evaluation is wanted (only for arrays). */
6321 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6322 int first, int size_only)
6324 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6325 int size1, align1;
6326 int have_elem;
6327 Sym *s, *f;
6328 Sym indexsym;
6329 CType *t1;
6331 /* If we currently are at an '}' or ',' we have read an initializer
6332 element in one of our callers, and not yet consumed it. */
6333 have_elem = tok == '}' || tok == ',';
6334 if (!have_elem && tok != '{' &&
6335 /* In case of strings we have special handling for arrays, so
6336 don't consume them as initializer value (which would commit them
6337 to some anonymous symbol). */
6338 tok != TOK_LSTR && tok != TOK_STR &&
6339 !size_only) {
6340 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6341 have_elem = 1;
6344 if (have_elem &&
6345 !(type->t & VT_ARRAY) &&
6346 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6347 The source type might have VT_CONSTANT set, which is
6348 of course assignable to non-const elements. */
6349 is_compatible_parameter_types(type, &vtop->type)) {
6350 init_putv(type, sec, c);
6351 } else if (type->t & VT_ARRAY) {
6352 s = type->ref;
6353 n = s->c;
6354 array_length = 0;
6355 t1 = pointed_type(type);
6356 size1 = type_size(t1, &align1);
6358 no_oblock = 1;
6359 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6360 tok == '{') {
6361 if (tok != '{')
6362 tcc_error("character array initializer must be a literal,"
6363 " optionally enclosed in braces");
6364 skip('{');
6365 no_oblock = 0;
6368 /* only parse strings here if correct type (otherwise: handle
6369 them as ((w)char *) expressions */
6370 if ((tok == TOK_LSTR &&
6371 #ifdef TCC_TARGET_PE
6372 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6373 #else
6374 (t1->t & VT_BTYPE) == VT_INT
6375 #endif
6376 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6377 while (tok == TOK_STR || tok == TOK_LSTR) {
6378 int cstr_len, ch;
6380 /* compute maximum number of chars wanted */
6381 if (tok == TOK_STR)
6382 cstr_len = tokc.str.size;
6383 else
6384 cstr_len = tokc.str.size / sizeof(nwchar_t);
6385 cstr_len--;
6386 nb = cstr_len;
6387 if (n >= 0 && nb > (n - array_length))
6388 nb = n - array_length;
6389 if (!size_only) {
6390 if (cstr_len > nb)
6391 tcc_warning("initializer-string for array is too long");
6392 /* in order to go faster for common case (char
6393 string in global variable, we handle it
6394 specifically */
6395 if (sec && tok == TOK_STR && size1 == 1) {
6396 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6397 } else {
6398 for(i=0;i<nb;i++) {
6399 if (tok == TOK_STR)
6400 ch = ((unsigned char *)tokc.str.data)[i];
6401 else
6402 ch = ((nwchar_t *)tokc.str.data)[i];
6403 vpushi(ch);
6404 init_putv(t1, sec, c + (array_length + i) * size1);
6408 array_length += nb;
6409 next();
6411 /* only add trailing zero if enough storage (no
6412 warning in this case since it is standard) */
6413 if (n < 0 || array_length < n) {
6414 if (!size_only) {
6415 vpushi(0);
6416 init_putv(t1, sec, c + (array_length * size1));
6418 array_length++;
6420 } else {
6421 indexsym.c = 0;
6422 indexsym.r = 0;
6423 f = &indexsym;
6425 do_init_list:
6426 while (tok != '}' || have_elem) {
6427 decl_designator(type, sec, c, &f, size_only);
6428 have_elem = 0;
6429 index = f->c;
6430 /* must put zero in holes (note that doing it that way
6431 ensures that it even works with designators) */
6432 if (!size_only && array_length < index) {
6433 init_putz(sec, c + array_length * size1,
6434 (index - array_length) * size1);
6436 if (type->t & VT_ARRAY) {
6437 index = indexsym.c = ++indexsym.r;
6438 } else {
6439 index = index + type_size(&f->type, &align1);
6440 if (s->type.t == TOK_UNION)
6441 f = NULL;
6442 else
6443 f = f->next;
6445 if (index > array_length)
6446 array_length = index;
6448 if (type->t & VT_ARRAY) {
6449 /* special test for multi dimensional arrays (may not
6450 be strictly correct if designators are used at the
6451 same time) */
6452 if (no_oblock && index >= n)
6453 break;
6454 } else {
6455 if (no_oblock && f == NULL)
6456 break;
6458 if (tok == '}')
6459 break;
6460 skip(',');
6463 /* put zeros at the end */
6464 if (!size_only && array_length < n) {
6465 init_putz(sec, c + array_length * size1,
6466 (n - array_length) * size1);
6468 if (!no_oblock)
6469 skip('}');
6470 /* patch type size if needed, which happens only for array types */
6471 if (n < 0)
6472 s->c = array_length;
6473 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6474 size1 = 1;
6475 no_oblock = 1;
6476 if (first || tok == '{') {
6477 skip('{');
6478 no_oblock = 0;
6480 s = type->ref;
6481 f = s->next;
6482 array_length = 0;
6483 n = s->c;
6484 goto do_init_list;
6485 } else if (tok == '{') {
6486 next();
6487 decl_initializer(type, sec, c, first, size_only);
6488 skip('}');
6489 } else if (size_only) {
6490 /* If we supported only ISO C we wouldn't have to accept calling
6491 this on anything than an array size_only==1 (and even then
6492 only on the outermost level, so no recursion would be needed),
6493 because initializing a flex array member isn't supported.
6494 But GNU C supports it, so we need to recurse even into
6495 subfields of structs and arrays when size_only is set. */
6496 /* just skip expression */
6497 parlevel = parlevel1 = 0;
6498 while ((parlevel > 0 || parlevel1 > 0 ||
6499 (tok != '}' && tok != ',')) && tok != -1) {
6500 if (tok == '(')
6501 parlevel++;
6502 else if (tok == ')') {
6503 if (parlevel == 0 && parlevel1 == 0)
6504 break;
6505 parlevel--;
6507 else if (tok == '{')
6508 parlevel1++;
6509 else if (tok == '}') {
6510 if (parlevel == 0 && parlevel1 == 0)
6511 break;
6512 parlevel1--;
6514 next();
6516 } else {
6517 if (!have_elem) {
6518 /* This should happen only when we haven't parsed
6519 the init element above for fear of committing a
6520 string constant to memory too early. */
6521 if (tok != TOK_STR && tok != TOK_LSTR)
6522 expect("string constant");
6523 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6525 init_putv(type, sec, c);
6529 /* parse an initializer for type 't' if 'has_init' is non zero, and
6530 allocate space in local or global data space ('r' is either
6531 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6532 variable 'v' of scope 'scope' is declared before initializers
6533 are parsed. If 'v' is zero, then a reference to the new object
6534 is put in the value stack. If 'has_init' is 2, a special parsing
6535 is done to handle string constants. */
6536 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6537 int has_init, int v, int scope)
6539 int size, align, addr, data_offset;
6540 int level;
6541 ParseState saved_parse_state = {0};
6542 TokenString *init_str = NULL;
6543 Section *sec;
6544 Sym *flexible_array;
6546 flexible_array = NULL;
6547 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6548 Sym *field = type->ref->next;
6549 if (field) {
6550 while (field->next)
6551 field = field->next;
6552 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6553 flexible_array = field;
6557 size = type_size(type, &align);
6558 /* If unknown size, we must evaluate it before
6559 evaluating initializers because
6560 initializers can generate global data too
6561 (e.g. string pointers or ISOC99 compound
6562 literals). It also simplifies local
6563 initializers handling */
6564 if (size < 0 || (flexible_array && has_init)) {
6565 if (!has_init)
6566 tcc_error("unknown type size");
6567 /* get all init string */
6568 init_str = tok_str_alloc();
6569 if (has_init == 2) {
6570 /* only get strings */
6571 while (tok == TOK_STR || tok == TOK_LSTR) {
6572 tok_str_add_tok(init_str);
6573 next();
6575 } else {
6576 level = 0;
6577 while (level > 0 || (tok != ',' && tok != ';')) {
6578 if (tok < 0)
6579 tcc_error("unexpected end of file in initializer");
6580 tok_str_add_tok(init_str);
6581 if (tok == '{')
6582 level++;
6583 else if (tok == '}') {
6584 level--;
6585 if (level <= 0) {
6586 next();
6587 break;
6590 next();
6593 tok_str_add(init_str, -1);
6594 tok_str_add(init_str, 0);
6596 /* compute size */
6597 save_parse_state(&saved_parse_state);
6599 begin_macro(init_str, 1);
6600 next();
6601 decl_initializer(type, NULL, 0, 1, 1);
6602 /* prepare second initializer parsing */
6603 macro_ptr = init_str->str;
6604 next();
6606 /* if still unknown size, error */
6607 size = type_size(type, &align);
6608 if (size < 0)
6609 tcc_error("unknown type size");
6611 /* If there's a flex member and it was used in the initializer
6612 adjust size. */
6613 if (flexible_array &&
6614 flexible_array->type.ref->c > 0)
6615 size += flexible_array->type.ref->c
6616 * pointed_size(&flexible_array->type);
6617 /* take into account specified alignment if bigger */
6618 if (ad->a.aligned) {
6619 int speca = 1 << (ad->a.aligned - 1);
6620 if (speca > align)
6621 align = speca;
6622 } else if (ad->a.packed) {
6623 align = 1;
6625 if ((r & VT_VALMASK) == VT_LOCAL) {
6626 sec = NULL;
6627 #ifdef CONFIG_TCC_BCHECK
6628 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6629 loc--;
6631 #endif
6632 loc = (loc - size) & -align;
6633 addr = loc;
6634 #ifdef CONFIG_TCC_BCHECK
6635 /* handles bounds */
6636 /* XXX: currently, since we do only one pass, we cannot track
6637 '&' operators, so we add only arrays */
6638 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6639 addr_t *bounds_ptr;
6640 /* add padding between regions */
6641 loc--;
6642 /* then add local bound info */
6643 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6644 bounds_ptr[0] = addr;
6645 bounds_ptr[1] = size;
6647 #endif
6648 if (v) {
6649 /* local variable */
6650 #ifdef CONFIG_TCC_ASM
6651 if (ad->asm_label) {
6652 int reg = asm_parse_regvar(ad->asm_label);
6653 if (reg >= 0)
6654 r = (r & ~VT_VALMASK) | reg;
6656 #endif
6657 sym_push(v, type, r, addr);
6658 } else {
6659 /* push local reference */
6660 vset(type, r, addr);
6662 } else {
6663 Sym *sym;
6665 sym = NULL;
6666 if (v && scope == VT_CONST) {
6667 /* see if the symbol was already defined */
6668 sym = sym_find(v);
6669 if (sym) {
6670 if (!is_compatible_types(&sym->type, type))
6671 tcc_error("incompatible types for redefinition of '%s'",
6672 get_tok_str(v, NULL));
6673 if (sym->type.t & VT_EXTERN) {
6674 /* if the variable is extern, it was not allocated */
6675 sym->type.t &= ~VT_EXTERN;
6676 /* set array size if it was omitted in extern
6677 declaration */
6678 if ((sym->type.t & VT_ARRAY) &&
6679 sym->type.ref->c < 0 &&
6680 type->ref->c >= 0)
6681 sym->type.ref->c = type->ref->c;
6682 } else {
6683 /* we accept several definitions of the same
6684 global variable. this is tricky, because we
6685 must play with the SHN_COMMON type of the symbol */
6686 /* XXX: should check if the variable was already
6687 initialized. It is incorrect to initialized it
6688 twice */
6689 /* no init data, we won't add more to the symbol */
6690 if (!has_init)
6691 goto no_alloc;
6696 /* allocate symbol in corresponding section */
6697 sec = ad->section;
6698 if (!sec) {
6699 if (has_init)
6700 sec = data_section;
6701 else if (tcc_state->nocommon)
6702 sec = bss_section;
6704 if (sec) {
6705 data_offset = sec->data_offset;
6706 data_offset = (data_offset + align - 1) & -align;
6707 addr = data_offset;
6708 /* very important to increment global pointer at this time
6709 because initializers themselves can create new initializers */
6710 data_offset += size;
6711 #ifdef CONFIG_TCC_BCHECK
6712 /* add padding if bound check */
6713 if (tcc_state->do_bounds_check)
6714 data_offset++;
6715 #endif
6716 sec->data_offset = data_offset;
6717 /* allocate section space to put the data */
6718 if (sec->sh_type != SHT_NOBITS &&
6719 data_offset > sec->data_allocated)
6720 section_realloc(sec, data_offset);
6721 /* align section if needed */
6722 if (align > sec->sh_addralign)
6723 sec->sh_addralign = align;
6724 } else {
6725 addr = 0; /* avoid warning */
6728 if (v) {
6729 if (scope != VT_CONST || !sym) {
6730 sym = sym_push(v, type, r | VT_SYM, 0);
6731 sym->asm_label = ad->asm_label;
6733 /* update symbol definition */
6734 if (sec) {
6735 put_extern_sym(sym, sec, addr, size);
6736 } else {
6737 ElfW(Sym) *esym;
6738 /* put a common area */
6739 put_extern_sym(sym, NULL, align, size);
6740 /* XXX: find a nicer way */
6741 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6742 esym->st_shndx = SHN_COMMON;
6744 } else {
6745 /* push global reference */
6746 sym = get_sym_ref(type, sec, addr, size);
6747 vpushsym(type, sym);
6749 /* patch symbol weakness */
6750 if (type->t & VT_WEAK)
6751 weaken_symbol(sym);
6752 apply_visibility(sym, type);
6753 #ifdef CONFIG_TCC_BCHECK
6754 /* handles bounds now because the symbol must be defined
6755 before for the relocation */
6756 if (tcc_state->do_bounds_check) {
6757 addr_t *bounds_ptr;
6759 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6760 /* then add global bound info */
6761 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6762 bounds_ptr[0] = 0; /* relocated */
6763 bounds_ptr[1] = size;
6765 #endif
6767 if (type->t & VT_VLA) {
6768 int a;
6770 /* save current stack pointer */
6771 if (vlas_in_scope == 0) {
6772 if (vla_sp_root_loc == -1)
6773 vla_sp_root_loc = (loc -= PTR_SIZE);
6774 gen_vla_sp_save(vla_sp_root_loc);
6777 vla_runtime_type_size(type, &a);
6778 gen_vla_alloc(type, a);
6779 gen_vla_sp_save(addr);
6780 vla_sp_loc = addr;
6781 vlas_in_scope++;
6782 } else if (has_init) {
6783 size_t oldreloc_offset = 0;
6784 if (sec && sec->reloc)
6785 oldreloc_offset = sec->reloc->data_offset;
6786 decl_initializer(type, sec, addr, 1, 0);
6787 if (sec && sec->reloc)
6788 squeeze_multi_relocs(sec, oldreloc_offset);
6789 /* patch flexible array member size back to -1, */
6790 /* for possible subsequent similar declarations */
6791 if (flexible_array)
6792 flexible_array->type.ref->c = -1;
6794 no_alloc: ;
6795 /* restore parse state if needed */
6796 if (init_str) {
6797 end_macro();
6798 restore_parse_state(&saved_parse_state);
6802 static void put_func_debug(Sym *sym)
6804 char buf[512];
6806 /* stabs info */
6807 /* XXX: we put here a dummy type */
6808 snprintf(buf, sizeof(buf), "%s:%c1",
6809 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6810 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6811 cur_text_section, sym->c);
6812 /* //gr gdb wants a line at the function */
6813 put_stabn(N_SLINE, 0, file->line_num, 0);
6814 last_ind = 0;
6815 last_line_num = 0;
6818 /* parse an old style function declaration list */
6819 /* XXX: check multiple parameter */
6820 static void func_decl_list(Sym *func_sym)
6822 AttributeDef ad;
6823 int v;
6824 Sym *s;
6825 CType btype, type;
6827 /* parse each declaration */
6828 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6829 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6830 if (!parse_btype(&btype, &ad))
6831 expect("declaration list");
6832 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6833 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6834 tok == ';') {
6835 /* we accept no variable after */
6836 } else {
6837 for(;;) {
6838 type = btype;
6839 type_decl(&type, &ad, &v, TYPE_DIRECT);
6840 /* find parameter in function parameter list */
6841 s = func_sym->next;
6842 while (s != NULL) {
6843 if ((s->v & ~SYM_FIELD) == v)
6844 goto found;
6845 s = s->next;
6847 tcc_error("declaration for parameter '%s' but no such parameter",
6848 get_tok_str(v, NULL));
6849 found:
6850 /* check that no storage specifier except 'register' was given */
6851 if (type.t & VT_STORAGE)
6852 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6853 convert_parameter_type(&type);
6854 /* we can add the type (NOTE: it could be local to the function) */
6855 s->type = type;
6856 /* accept other parameters */
6857 if (tok == ',')
6858 next();
6859 else
6860 break;
6863 skip(';');
6867 /* parse a function defined by symbol 'sym' and generate its code in
6868 'cur_text_section' */
6869 static void gen_function(Sym *sym)
6871 int saved_nocode_wanted = nocode_wanted;
6873 nocode_wanted = 0;
6874 ind = cur_text_section->data_offset;
6875 /* NOTE: we patch the symbol size later */
6876 put_extern_sym(sym, cur_text_section, ind, 0);
6877 funcname = get_tok_str(sym->v, NULL);
6878 func_ind = ind;
6879 /* Initialize VLA state */
6880 vla_sp_loc = -1;
6881 vla_sp_root_loc = -1;
6882 /* put debug symbol */
6883 if (tcc_state->do_debug)
6884 put_func_debug(sym);
6886 /* push a dummy symbol to enable local sym storage */
6887 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6888 local_scope = 1; /* for function parameters */
6889 gfunc_prolog(&sym->type);
6890 local_scope = 0;
6892 rsym = 0;
6893 block(NULL, NULL, 0);
6894 gsym(rsym);
6895 gfunc_epilog();
6896 cur_text_section->data_offset = ind;
6897 label_pop(&global_label_stack, NULL);
6898 /* reset local stack */
6899 local_scope = 0;
6900 sym_pop(&local_stack, NULL, 0);
6901 /* end of function */
6902 /* patch symbol size */
6903 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6904 ind - func_ind;
6905 /* patch symbol weakness (this definition overrules any prototype) */
6906 if (sym->type.t & VT_WEAK)
6907 weaken_symbol(sym);
6908 apply_visibility(sym, &sym->type);
6909 if (tcc_state->do_debug) {
6910 put_stabn(N_FUN, 0, 0, ind - func_ind);
6912 /* It's better to crash than to generate wrong code */
6913 cur_text_section = NULL;
6914 funcname = ""; /* for safety */
6915 func_vt.t = VT_VOID; /* for safety */
6916 func_var = 0; /* for safety */
6917 ind = 0; /* for safety */
6918 nocode_wanted = saved_nocode_wanted;
6919 check_vstack();
6922 static void gen_inline_functions(TCCState *s)
6924 Sym *sym;
6925 int inline_generated, i, ln;
6926 struct InlineFunc *fn;
6928 ln = file->line_num;
6929 /* iterate while inline function are referenced */
6930 for(;;) {
6931 inline_generated = 0;
6932 for (i = 0; i < s->nb_inline_fns; ++i) {
6933 fn = s->inline_fns[i];
6934 sym = fn->sym;
6935 if (sym && sym->c) {
6936 /* the function was used: generate its code and
6937 convert it to a normal function */
6938 fn->sym = NULL;
6939 if (file)
6940 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6941 sym->r = VT_SYM | VT_CONST;
6942 sym->type.t &= ~VT_INLINE;
6944 begin_macro(fn->func_str, 1);
6945 next();
6946 cur_text_section = text_section;
6947 gen_function(sym);
6948 end_macro();
6950 inline_generated = 1;
6953 if (!inline_generated)
6954 break;
6956 file->line_num = ln;
6959 ST_FUNC void free_inline_functions(TCCState *s)
6961 int i;
6962 /* free tokens of unused inline functions */
6963 for (i = 0; i < s->nb_inline_fns; ++i) {
6964 struct InlineFunc *fn = s->inline_fns[i];
6965 if (fn->sym)
6966 tok_str_free(fn->func_str);
6968 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6971 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6972 static int decl0(int l, int is_for_loop_init)
6974 int v, has_init, r;
6975 CType type, btype;
6976 Sym *sym;
6977 AttributeDef ad;
6979 while (1) {
6980 if (!parse_btype(&btype, &ad)) {
6981 if (is_for_loop_init)
6982 return 0;
6983 /* skip redundant ';' */
6984 /* XXX: find more elegant solution */
6985 if (tok == ';') {
6986 next();
6987 continue;
6989 if (l == VT_CONST &&
6990 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6991 /* global asm block */
6992 asm_global_instr();
6993 continue;
6995 /* special test for old K&R protos without explicit int
6996 type. Only accepted when defining global data */
6997 if (l == VT_LOCAL || tok < TOK_UIDENT)
6998 break;
6999 btype.t = VT_INT;
7001 if (((btype.t & VT_BTYPE) == VT_ENUM ||
7002 (btype.t & VT_BTYPE) == VT_STRUCT) &&
7003 tok == ';') {
7004 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7005 int v = btype.ref->v;
7006 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7007 tcc_warning("unnamed struct/union that defines no instances");
7009 next();
7010 continue;
7012 while (1) { /* iterate thru each declaration */
7013 type = btype;
7014 /* If the base type itself was an array type of unspecified
7015 size (like in 'typedef int arr[]; arr x = {1};') then
7016 we will overwrite the unknown size by the real one for
7017 this decl. We need to unshare the ref symbol holding
7018 that size. */
7019 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7020 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7022 type_decl(&type, &ad, &v, TYPE_DIRECT);
7023 #if 0
7025 char buf[500];
7026 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
7027 printf("type = '%s'\n", buf);
7029 #endif
7030 if ((type.t & VT_BTYPE) == VT_FUNC) {
7031 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7032 tcc_error("function without file scope cannot be static");
7034 /* if old style function prototype, we accept a
7035 declaration list */
7036 sym = type.ref;
7037 if (sym->c == FUNC_OLD)
7038 func_decl_list(sym);
7041 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7042 ad.asm_label = asm_label_instr();
7043 /* parse one last attribute list, after asm label */
7044 parse_attribute(&ad);
7045 if (tok == '{')
7046 expect(";");
7049 if (ad.a.weak)
7050 type.t |= VT_WEAK;
7051 #ifdef TCC_TARGET_PE
7052 if (ad.a.func_import)
7053 type.t |= VT_IMPORT;
7054 if (ad.a.func_export)
7055 type.t |= VT_EXPORT;
7056 #endif
7057 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7059 if (tok == '{') {
7060 if (l == VT_LOCAL)
7061 tcc_error("cannot use local functions");
7062 if ((type.t & VT_BTYPE) != VT_FUNC)
7063 expect("function definition");
7065 /* reject abstract declarators in function definition */
7066 sym = type.ref;
7067 while ((sym = sym->next) != NULL)
7068 if (!(sym->v & ~SYM_FIELD))
7069 expect("identifier");
7071 /* XXX: cannot do better now: convert extern line to static inline */
7072 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7073 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7075 sym = sym_find(v);
7076 if (sym) {
7077 Sym *ref;
7078 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7079 goto func_error1;
7081 ref = sym->type.ref;
7082 if (0 == ref->a.func_proto)
7083 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7085 /* use func_call from prototype if not defined */
7086 if (ref->a.func_call != FUNC_CDECL
7087 && type.ref->a.func_call == FUNC_CDECL)
7088 type.ref->a.func_call = ref->a.func_call;
7090 /* use export from prototype */
7091 if (ref->a.func_export)
7092 type.ref->a.func_export = 1;
7094 /* use static from prototype */
7095 if (sym->type.t & VT_STATIC)
7096 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7098 /* If the definition has no visibility use the
7099 one from prototype. */
7100 if (! (type.t & VT_VIS_MASK))
7101 type.t |= sym->type.t & VT_VIS_MASK;
7103 if (!is_compatible_types(&sym->type, &type)) {
7104 func_error1:
7105 tcc_error("incompatible types for redefinition of '%s'",
7106 get_tok_str(v, NULL));
7108 type.ref->a.func_proto = 0;
7109 /* if symbol is already defined, then put complete type */
7110 sym->type = type;
7111 } else {
7112 /* put function symbol */
7113 sym = global_identifier_push(v, type.t, 0);
7114 sym->type.ref = type.ref;
7117 /* static inline functions are just recorded as a kind
7118 of macro. Their code will be emitted at the end of
7119 the compilation unit only if they are used */
7120 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7121 (VT_INLINE | VT_STATIC)) {
7122 int block_level;
7123 struct InlineFunc *fn;
7124 const char *filename;
7126 filename = file ? file->filename : "";
7127 fn = tcc_malloc(sizeof *fn + strlen(filename));
7128 strcpy(fn->filename, filename);
7129 fn->sym = sym;
7130 fn->func_str = tok_str_alloc();
7132 block_level = 0;
7133 for(;;) {
7134 int t;
7135 if (tok == TOK_EOF)
7136 tcc_error("unexpected end of file");
7137 tok_str_add_tok(fn->func_str);
7138 t = tok;
7139 next();
7140 if (t == '{') {
7141 block_level++;
7142 } else if (t == '}') {
7143 block_level--;
7144 if (block_level == 0)
7145 break;
7148 tok_str_add(fn->func_str, -1);
7149 tok_str_add(fn->func_str, 0);
7150 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7152 } else {
7153 /* compute text section */
7154 cur_text_section = ad.section;
7155 if (!cur_text_section)
7156 cur_text_section = text_section;
7157 sym->r = VT_SYM | VT_CONST;
7158 gen_function(sym);
7160 break;
7161 } else {
7162 if (btype.t & VT_TYPEDEF) {
7163 /* save typedefed type */
7164 /* XXX: test storage specifiers ? */
7165 sym = sym_find(v);
7166 if (sym && sym->scope == local_scope) {
7167 if (!is_compatible_types(&sym->type, &type)
7168 || !(sym->type.t & VT_TYPEDEF))
7169 tcc_error("incompatible redefinition of '%s'",
7170 get_tok_str(v, NULL));
7171 sym->type = type;
7172 } else {
7173 sym = sym_push(v, &type, 0, 0);
7175 sym->a = ad.a;
7176 sym->type.t |= VT_TYPEDEF;
7177 } else {
7178 r = 0;
7179 if ((type.t & VT_BTYPE) == VT_FUNC) {
7180 /* external function definition */
7181 /* specific case for func_call attribute */
7182 ad.a.func_proto = 1;
7183 type.ref->a = ad.a;
7184 } else if (!(type.t & VT_ARRAY)) {
7185 /* not lvalue if array */
7186 r |= lvalue_type(type.t);
7188 has_init = (tok == '=');
7189 if (has_init && (type.t & VT_VLA))
7190 tcc_error("variable length array cannot be initialized");
7191 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7192 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7193 !has_init && l == VT_CONST && type.ref->c < 0)) {
7194 /* external variable or function */
7195 /* NOTE: as GCC, uninitialized global static
7196 arrays of null size are considered as
7197 extern */
7198 sym = external_sym(v, &type, r);
7199 sym->asm_label = ad.asm_label;
7201 if (ad.alias_target) {
7202 Section tsec;
7203 ElfW(Sym) *esym;
7204 Sym *alias_target;
7206 alias_target = sym_find(ad.alias_target);
7207 if (!alias_target || !alias_target->c)
7208 tcc_error("unsupported forward __alias__ attribute");
7209 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7210 tsec.sh_num = esym->st_shndx;
7211 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7213 } else {
7214 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7215 if (type.t & VT_STATIC)
7216 r |= VT_CONST;
7217 else
7218 r |= l;
7219 if (has_init)
7220 next();
7221 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7224 if (tok != ',') {
7225 if (is_for_loop_init)
7226 return 1;
7227 skip(';');
7228 break;
7230 next();
7232 ad.a.aligned = 0;
7235 return 0;
7238 ST_FUNC void decl(int l)
7240 decl0(l, 0);
7243 /* ------------------------------------------------------------------------- */