Support large alignment requests
[tinycc.git] / tccgen.c
blob75959adf3189e0bf313e6eccaefdd62168fe00ed
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
72 static void gen_cast(CType *type);
73 static inline CType *pointed_type(CType *type);
74 static int is_compatible_types(CType *type1, CType *type2);
75 static int parse_btype(CType *type, AttributeDef *ad);
76 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
77 static void parse_expr_type(CType *type);
78 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
79 static void block(int *bsym, int *csym, int is_expr);
80 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
81 static int decl0(int l, int is_for_loop_init);
82 static void expr_eq(void);
83 static void expr_lor_const(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 ST_FUNC void tccgen_start(TCCState *s1)
144 cur_text_section = NULL;
145 funcname = "";
146 anon_sym = SYM_FIRST_ANOM;
147 section_sym = 0;
148 nocode_wanted = 1;
150 /* define some often used types */
151 int_type.t = VT_INT;
152 char_pointer_type.t = VT_BYTE;
153 mk_pointer(&char_pointer_type);
154 #if PTR_SIZE == 4
155 size_type.t = VT_INT;
156 #else
157 size_type.t = VT_LLONG;
158 #endif
159 func_old_type.t = VT_FUNC;
160 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
162 if (s1->do_debug) {
163 char buf[512];
165 /* file info: full path + filename */
166 section_sym = put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
168 text_section->sh_num, NULL);
169 getcwd(buf, sizeof(buf));
170 #ifdef _WIN32
171 normalize_slashes(buf);
172 #endif
173 pstrcat(buf, sizeof(buf), "/");
174 put_stabs_r(buf, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
176 put_stabs_r(file->filename, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
179 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
180 symbols can be safely used */
181 put_elf_sym(symtab_section, 0, 0,
182 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
183 SHN_ABS, file->filename);
185 #ifdef TCC_TARGET_ARM
186 arm_init(s1);
187 #endif
190 ST_FUNC void tccgen_end(TCCState *s1)
192 gen_inline_functions(s1);
193 check_vstack();
194 /* end of translation unit info */
195 if (s1->do_debug) {
196 put_stabs_r(NULL, N_SO, 0, 0,
197 text_section->data_offset, text_section, section_sym);
201 /* ------------------------------------------------------------------------- */
202 /* update sym->c so that it points to an external symbol in section
203 'section' with value 'value' */
205 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
206 addr_t value, unsigned long size,
207 int can_add_underscore)
209 int sym_type, sym_bind, sh_num, info, other;
210 ElfW(Sym) *esym;
211 const char *name;
212 char buf1[256];
214 #ifdef CONFIG_TCC_BCHECK
215 char buf[32];
216 #endif
218 if (section == NULL)
219 sh_num = SHN_UNDEF;
220 else if (section == SECTION_ABS)
221 sh_num = SHN_ABS;
222 else
223 sh_num = section->sh_num;
225 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
226 sym_type = STT_FUNC;
227 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
228 sym_type = STT_NOTYPE;
229 } else {
230 sym_type = STT_OBJECT;
233 if (sym->type.t & VT_STATIC)
234 sym_bind = STB_LOCAL;
235 else {
236 if (sym->type.t & VT_WEAK)
237 sym_bind = STB_WEAK;
238 else
239 sym_bind = STB_GLOBAL;
242 if (!sym->c) {
243 name = get_tok_str(sym->v, NULL);
244 #ifdef CONFIG_TCC_BCHECK
245 if (tcc_state->do_bounds_check) {
246 /* XXX: avoid doing that for statics ? */
247 /* if bound checking is activated, we change some function
248 names by adding the "__bound" prefix */
249 switch(sym->v) {
250 #ifdef TCC_TARGET_PE
251 /* XXX: we rely only on malloc hooks */
252 case TOK_malloc:
253 case TOK_free:
254 case TOK_realloc:
255 case TOK_memalign:
256 case TOK_calloc:
257 #endif
258 case TOK_memcpy:
259 case TOK_memmove:
260 case TOK_memset:
261 case TOK_strlen:
262 case TOK_strcpy:
263 case TOK_alloca:
264 strcpy(buf, "__bound_");
265 strcat(buf, name);
266 name = buf;
267 break;
270 #endif
271 other = 0;
273 #ifdef TCC_TARGET_PE
274 if (sym->type.t & VT_EXPORT)
275 other |= ST_PE_EXPORT;
276 if (sym_type == STT_FUNC && sym->type.ref) {
277 Sym *ref = sym->type.ref;
278 if (ref->a.func_export)
279 other |= ST_PE_EXPORT;
280 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
281 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
282 name = buf1;
283 other |= ST_PE_STDCALL;
284 can_add_underscore = 0;
286 } else {
287 if (find_elf_sym(tcc_state->dynsymtab_section, name))
288 other |= ST_PE_IMPORT;
289 if (sym->type.t & VT_IMPORT)
290 other |= ST_PE_IMPORT;
292 #else
293 if (! (sym->type.t & VT_STATIC))
294 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
295 #endif
296 if (tcc_state->leading_underscore && can_add_underscore) {
297 buf1[0] = '_';
298 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
299 name = buf1;
301 if (sym->asm_label) {
302 name = get_tok_str(sym->asm_label, NULL);
304 info = ELFW(ST_INFO)(sym_bind, sym_type);
305 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
306 } else {
307 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
308 esym->st_value = value;
309 esym->st_size = size;
310 esym->st_shndx = sh_num;
314 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
315 addr_t value, unsigned long size)
317 put_extern_sym2(sym, section, value, size, 1);
320 /* add a new relocation entry to symbol 'sym' in section 's' */
321 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
322 addr_t addend)
324 int c = 0;
325 if (sym) {
326 if (0 == sym->c)
327 put_extern_sym(sym, NULL, 0, 0);
328 c = sym->c;
330 /* now we can add ELF relocation info */
331 put_elf_reloca(symtab_section, s, offset, type, c, addend);
334 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
336 greloca(s, sym, offset, type, 0);
339 /* ------------------------------------------------------------------------- */
340 /* symbol allocator */
341 static Sym *__sym_malloc(void)
343 Sym *sym_pool, *sym, *last_sym;
344 int i;
346 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
347 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
349 last_sym = sym_free_first;
350 sym = sym_pool;
351 for(i = 0; i < SYM_POOL_NB; i++) {
352 sym->next = last_sym;
353 last_sym = sym;
354 sym++;
356 sym_free_first = last_sym;
357 return last_sym;
360 static inline Sym *sym_malloc(void)
362 Sym *sym;
363 #ifndef SYM_DEBUG
364 sym = sym_free_first;
365 if (!sym)
366 sym = __sym_malloc();
367 sym_free_first = sym->next;
368 return sym;
369 #else
370 sym = tcc_malloc(sizeof(Sym));
371 return sym;
372 #endif
375 ST_INLN void sym_free(Sym *sym)
377 #ifndef SYM_DEBUG
378 sym->next = sym_free_first;
379 sym_free_first = sym;
380 #else
381 tcc_free(sym);
382 #endif
385 /* push, without hashing */
386 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
388 Sym *s;
390 s = sym_malloc();
391 s->scope = 0;
392 s->v = v;
393 s->type.t = t;
394 s->type.ref = NULL;
395 #ifdef _WIN64
396 s->d = NULL;
397 #endif
398 s->c = c;
399 s->next = NULL;
400 /* add in stack */
401 s->prev = *ps;
402 *ps = s;
403 return s;
406 /* find a symbol and return its associated structure. 's' is the top
407 of the symbol stack */
408 ST_FUNC Sym *sym_find2(Sym *s, int v)
410 while (s) {
411 if (s->v == v)
412 return s;
413 else if (s->v == -1)
414 return NULL;
415 s = s->prev;
417 return NULL;
420 /* structure lookup */
421 ST_INLN Sym *struct_find(int v)
423 v -= TOK_IDENT;
424 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
425 return NULL;
426 return table_ident[v]->sym_struct;
429 /* find an identifier */
430 ST_INLN Sym *sym_find(int v)
432 v -= TOK_IDENT;
433 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
434 return NULL;
435 return table_ident[v]->sym_identifier;
438 /* push a given symbol on the symbol stack */
439 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
441 Sym *s, **ps;
442 TokenSym *ts;
444 if (local_stack)
445 ps = &local_stack;
446 else
447 ps = &global_stack;
448 s = sym_push2(ps, v, type->t, c);
449 s->type.ref = type->ref;
450 s->r = r;
451 /* don't record fields or anonymous symbols */
452 /* XXX: simplify */
453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
454 /* record symbol in token array */
455 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
456 if (v & SYM_STRUCT)
457 ps = &ts->sym_struct;
458 else
459 ps = &ts->sym_identifier;
460 s->prev_tok = *ps;
461 *ps = s;
462 s->scope = local_scope;
463 if (s->prev_tok && s->prev_tok->scope == s->scope)
464 tcc_error("redeclaration of '%s'",
465 get_tok_str(v & ~SYM_STRUCT, NULL));
467 return s;
470 /* push a global identifier */
471 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
473 Sym *s, **ps;
474 s = sym_push2(&global_stack, v, t, c);
475 /* don't record anonymous symbol */
476 if (v < SYM_FIRST_ANOM) {
477 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
478 /* modify the top most local identifier, so that
479 sym_identifier will point to 's' when popped */
480 while (*ps != NULL)
481 ps = &(*ps)->prev_tok;
482 s->prev_tok = NULL;
483 *ps = s;
485 return s;
488 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
489 pop them yet from the list, but do remove them from the token array. */
490 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
492 Sym *s, *ss, **ps;
493 TokenSym *ts;
494 int v;
496 s = *ptop;
497 while(s != b) {
498 ss = s->prev;
499 v = s->v;
500 /* remove symbol in token array */
501 /* XXX: simplify */
502 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
503 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
504 if (v & SYM_STRUCT)
505 ps = &ts->sym_struct;
506 else
507 ps = &ts->sym_identifier;
508 *ps = s->prev_tok;
510 if (!keep)
511 sym_free(s);
512 s = ss;
514 if (!keep)
515 *ptop = b;
518 static void weaken_symbol(Sym *sym)
520 sym->type.t |= VT_WEAK;
521 if (sym->c > 0) {
522 int esym_type;
523 ElfW(Sym) *esym;
525 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
526 esym_type = ELFW(ST_TYPE)(esym->st_info);
527 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
531 static void apply_visibility(Sym *sym, CType *type)
533 int vis = sym->type.t & VT_VIS_MASK;
534 int vis2 = type->t & VT_VIS_MASK;
535 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
536 vis = vis2;
537 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
539 else
540 vis = (vis < vis2) ? vis : vis2;
541 sym->type.t &= ~VT_VIS_MASK;
542 sym->type.t |= vis;
544 if (sym->c > 0) {
545 ElfW(Sym) *esym;
547 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
548 vis >>= VT_VIS_SHIFT;
549 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
553 /* ------------------------------------------------------------------------- */
555 ST_FUNC void swap(int *p, int *q)
557 int t;
558 t = *p;
559 *p = *q;
560 *q = t;
563 static void vsetc(CType *type, int r, CValue *vc)
565 int v;
567 if (vtop >= vstack + (VSTACK_SIZE - 1))
568 tcc_error("memory full (vstack)");
569 /* cannot let cpu flags if other instruction are generated. Also
570 avoid leaving VT_JMP anywhere except on the top of the stack
571 because it would complicate the code generator. */
572 if (vtop >= vstack) {
573 v = vtop->r & VT_VALMASK;
574 if (v == VT_CMP || (v & ~1) == VT_JMP)
575 gv(RC_INT);
577 vtop++;
578 vtop->type = *type;
579 vtop->r = r;
580 vtop->r2 = VT_CONST;
581 vtop->c = *vc;
582 vtop->sym = NULL;
585 /* push constant of type "type" with useless value */
586 ST_FUNC void vpush(CType *type)
588 CValue cval;
589 vsetc(type, VT_CONST, &cval);
592 /* push integer constant */
593 ST_FUNC void vpushi(int v)
595 CValue cval;
596 cval.i = v;
597 vsetc(&int_type, VT_CONST, &cval);
600 /* push a pointer sized constant */
601 static void vpushs(addr_t v)
603 CValue cval;
604 cval.i = v;
605 vsetc(&size_type, VT_CONST, &cval);
608 /* push arbitrary 64bit constant */
609 ST_FUNC void vpush64(int ty, unsigned long long v)
611 CValue cval;
612 CType ctype;
613 ctype.t = ty;
614 ctype.ref = NULL;
615 cval.i = v;
616 vsetc(&ctype, VT_CONST, &cval);
619 /* push long long constant */
620 static inline void vpushll(long long v)
622 vpush64(VT_LLONG, v);
625 /* push a symbol value of TYPE */
626 static inline void vpushsym(CType *type, Sym *sym)
628 CValue cval;
629 cval.i = 0;
630 vsetc(type, VT_CONST | VT_SYM, &cval);
631 vtop->sym = sym;
634 /* Return a static symbol pointing to a section */
635 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
637 int v;
638 Sym *sym;
640 v = anon_sym++;
641 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
642 sym->type.ref = type->ref;
643 sym->r = VT_CONST | VT_SYM;
644 put_extern_sym(sym, sec, offset, size);
645 return sym;
648 /* push a reference to a section offset by adding a dummy symbol */
649 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
651 vpushsym(type, get_sym_ref(type, sec, offset, size));
654 /* define a new external reference to a symbol 'v' of type 'u' */
655 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
663 s->type.ref = type->ref;
664 s->r = r | VT_CONST | VT_SYM;
666 return s;
669 /* define a new external reference to a symbol 'v' */
670 static Sym *external_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
678 s->type.t |= VT_EXTERN;
679 } else if (s->type.ref == func_old_type.ref) {
680 s->type.ref = type->ref;
681 s->r = r | VT_CONST | VT_SYM;
682 s->type.t |= VT_EXTERN;
683 } else if (!is_compatible_types(&s->type, type)) {
684 tcc_error("incompatible types for redefinition of '%s'",
685 get_tok_str(v, NULL));
687 /* Merge some storage attributes. */
688 if (type->t & VT_WEAK)
689 weaken_symbol(s);
691 if (type->t & VT_VIS_MASK)
692 apply_visibility(s, type);
694 return s;
697 /* push a reference to global symbol v */
698 ST_FUNC void vpush_global_sym(CType *type, int v)
700 vpushsym(type, external_global_sym(v, type, 0));
703 ST_FUNC void vset(CType *type, int r, long v)
705 CValue cval;
707 cval.i = v;
708 vsetc(type, r, &cval);
711 static void vseti(int r, int v)
713 CType type;
714 type.t = VT_INT;
715 type.ref = 0;
716 vset(&type, r, v);
719 ST_FUNC void vswap(void)
721 SValue tmp;
722 /* cannot let cpu flags if other instruction are generated. Also
723 avoid leaving VT_JMP anywhere except on the top of the stack
724 because it would complicate the code generator. */
725 if (vtop >= vstack) {
726 int v = vtop->r & VT_VALMASK;
727 if (v == VT_CMP || (v & ~1) == VT_JMP)
728 gv(RC_INT);
730 tmp = vtop[0];
731 vtop[0] = vtop[-1];
732 vtop[-1] = tmp;
734 /* XXX: +2% overall speed possible with optimized memswap
736 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
740 ST_FUNC void vpushv(SValue *v)
742 if (vtop >= vstack + (VSTACK_SIZE - 1))
743 tcc_error("memory full (vstack)");
744 vtop++;
745 *vtop = *v;
748 static void vdup(void)
750 vpushv(vtop);
753 /* save registers up to (vtop - n) stack entry */
754 ST_FUNC void save_regs(int n)
756 SValue *p, *p1;
757 for(p = vstack, p1 = vtop - n; p <= p1; p++)
758 save_reg(p->r);
761 /* save r to the memory stack, and mark it as being free */
762 ST_FUNC void save_reg(int r)
764 save_reg_upstack(r, 0);
767 /* save r to the memory stack, and mark it as being free,
768 if seen up to (vtop - n) stack entry */
769 ST_FUNC void save_reg_upstack(int r, int n)
771 int l, saved, size, align;
772 SValue *p, *p1, sv;
773 CType *type;
775 if ((r &= VT_VALMASK) >= VT_CONST)
776 return;
778 /* modify all stack values */
779 saved = 0;
780 l = 0;
781 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
782 if ((p->r & VT_VALMASK) == r ||
783 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
784 /* must save value on stack if not already done */
785 if (!saved) {
786 /* NOTE: must reload 'r' because r might be equal to r2 */
787 r = p->r & VT_VALMASK;
788 /* store register in the stack */
789 type = &p->type;
790 if ((p->r & VT_LVAL) ||
791 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
792 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
793 type = &char_pointer_type;
794 #else
795 type = &int_type;
796 #endif
797 size = type_size(type, &align);
798 loc = (loc - size) & -align;
799 sv.type.t = type->t;
800 sv.r = VT_LOCAL | VT_LVAL;
801 sv.c.i = loc;
802 store(r, &sv);
803 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
804 /* x86 specific: need to pop fp register ST0 if saved */
805 if (r == TREG_ST0) {
806 o(0xd8dd); /* fstp %st(0) */
808 #endif
809 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
810 /* special long long case */
811 if ((type->t & VT_BTYPE) == VT_LLONG) {
812 sv.c.i += 4;
813 store(p->r2, &sv);
815 #endif
816 l = loc;
817 saved = 1;
819 /* mark that stack entry as being saved on the stack */
820 if (p->r & VT_LVAL) {
821 /* also clear the bounded flag because the
822 relocation address of the function was stored in
823 p->c.i */
824 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
825 } else {
826 p->r = lvalue_type(p->type.t) | VT_LOCAL;
828 p->r2 = VT_CONST;
829 p->c.i = l;
834 #ifdef TCC_TARGET_ARM
835 /* find a register of class 'rc2' with at most one reference on stack.
836 * If none, call get_reg(rc) */
837 ST_FUNC int get_reg_ex(int rc, int rc2)
839 int r;
840 SValue *p;
842 for(r=0;r<NB_REGS;r++) {
843 if (reg_classes[r] & rc2) {
844 int n;
845 n=0;
846 for(p = vstack; p <= vtop; p++) {
847 if ((p->r & VT_VALMASK) == r ||
848 (p->r2 & VT_VALMASK) == r)
849 n++;
851 if (n <= 1)
852 return r;
855 return get_reg(rc);
857 #endif
859 /* find a free register of class 'rc'. If none, save one register */
860 ST_FUNC int get_reg(int rc)
862 int r;
863 SValue *p;
865 /* find a free register */
866 for(r=0;r<NB_REGS;r++) {
867 if (reg_classes[r] & rc) {
868 for(p=vstack;p<=vtop;p++) {
869 if ((p->r & VT_VALMASK) == r ||
870 (p->r2 & VT_VALMASK) == r)
871 goto notfound;
873 return r;
875 notfound: ;
878 /* no register left : free the first one on the stack (VERY
879 IMPORTANT to start from the bottom to ensure that we don't
880 spill registers used in gen_opi()) */
881 for(p=vstack;p<=vtop;p++) {
882 /* look at second register (if long long) */
883 r = p->r2 & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc))
885 goto save_found;
886 r = p->r & VT_VALMASK;
887 if (r < VT_CONST && (reg_classes[r] & rc)) {
888 save_found:
889 save_reg(r);
890 return r;
893 /* Should never comes here */
894 return -1;
897 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
898 if needed */
899 static void move_reg(int r, int s, int t)
901 SValue sv;
903 if (r != s) {
904 save_reg(r);
905 sv.type.t = t;
906 sv.type.ref = NULL;
907 sv.r = s;
908 sv.c.i = 0;
909 load(r, &sv);
913 /* get address of vtop (vtop MUST BE an lvalue) */
914 ST_FUNC void gaddrof(void)
916 if (vtop->r & VT_REF && !nocode_wanted)
917 gv(RC_INT);
918 vtop->r &= ~VT_LVAL;
919 /* tricky: if saved lvalue, then we can go back to lvalue */
920 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
921 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
926 #ifdef CONFIG_TCC_BCHECK
927 /* generate lvalue bound code */
928 static void gbound(void)
930 int lval_type;
931 CType type1;
933 vtop->r &= ~VT_MUSTBOUND;
934 /* if lvalue, then use checking code before dereferencing */
935 if (vtop->r & VT_LVAL) {
936 /* if not VT_BOUNDED value, then make one */
937 if (!(vtop->r & VT_BOUNDED)) {
938 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
939 /* must save type because we must set it to int to get pointer */
940 type1 = vtop->type;
941 vtop->type.t = VT_PTR;
942 gaddrof();
943 vpushi(0);
944 gen_bounded_ptr_add();
945 vtop->r |= lval_type;
946 vtop->type = type1;
948 /* then check for dereferencing */
949 gen_bounded_ptr_deref();
952 #endif
954 /* store vtop a register belonging to class 'rc'. lvalues are
955 converted to values. Cannot be used if cannot be converted to
956 register value (such as structures). */
957 ST_FUNC int gv(int rc)
959 int r, bit_pos, bit_size, size, align, i;
960 int rc2;
962 /* NOTE: get_reg can modify vstack[] */
963 if (vtop->type.t & VT_BITFIELD) {
964 CType type;
965 int bits = 32;
966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
968 /* remove bit field info to avoid loops */
969 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
970 /* cast to int to propagate signedness in following ops */
971 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
972 type.t = VT_LLONG;
973 bits = 64;
974 } else
975 type.t = VT_INT;
976 if((vtop->type.t & VT_UNSIGNED) ||
977 (vtop->type.t & VT_BTYPE) == VT_BOOL)
978 type.t |= VT_UNSIGNED;
979 gen_cast(&type);
980 /* generate shifts */
981 vpushi(bits - (bit_pos + bit_size));
982 gen_op(TOK_SHL);
983 vpushi(bits - bit_size);
984 /* NOTE: transformed to SHR if unsigned */
985 gen_op(TOK_SAR);
986 r = gv(rc);
987 } else {
988 if (is_float(vtop->type.t) &&
989 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
990 Sym *sym;
991 int *ptr;
992 unsigned long offset;
993 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
994 CValue check;
995 #endif
997 /* XXX: unify with initializers handling ? */
998 /* CPUs usually cannot use float constants, so we store them
999 generically in data segment */
1000 size = type_size(&vtop->type, &align);
1001 offset = (data_section->data_offset + align - 1) & -align;
1002 data_section->data_offset = offset;
1003 /* XXX: not portable yet */
1004 #if defined(__i386__) || defined(__x86_64__)
1005 /* Zero pad x87 tenbyte long doubles */
1006 if (size == LDOUBLE_SIZE) {
1007 vtop->c.tab[2] &= 0xffff;
1008 #if LDOUBLE_SIZE == 16
1009 vtop->c.tab[3] = 0;
1010 #endif
1012 #endif
1013 ptr = section_ptr_add(data_section, size);
1014 size = size >> 2;
1015 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1016 check.d = 1;
1017 if(check.tab[0])
1018 for(i=0;i<size;i++)
1019 ptr[i] = vtop->c.tab[size-1-i];
1020 else
1021 #endif
1022 for(i=0;i<size;i++)
1023 ptr[i] = vtop->c.tab[i];
1024 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1025 vtop->r |= VT_LVAL | VT_SYM;
1026 vtop->sym = sym;
1027 vtop->c.i = 0;
1029 #ifdef CONFIG_TCC_BCHECK
1030 if (vtop->r & VT_MUSTBOUND)
1031 gbound();
1032 #endif
1034 r = vtop->r & VT_VALMASK;
1035 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1036 #ifndef TCC_TARGET_ARM64
1037 if (rc == RC_IRET)
1038 rc2 = RC_LRET;
1039 #ifdef TCC_TARGET_X86_64
1040 else if (rc == RC_FRET)
1041 rc2 = RC_QRET;
1042 #endif
1043 #endif
1045 /* need to reload if:
1046 - constant
1047 - lvalue (need to dereference pointer)
1048 - already a register, but not in the right class */
1049 if (r >= VT_CONST
1050 || (vtop->r & VT_LVAL)
1051 || !(reg_classes[r] & rc)
1052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1053 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1054 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1055 #else
1056 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1057 #endif
1060 r = get_reg(rc);
1061 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1062 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1063 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1064 #else
1065 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1066 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1067 unsigned long long ll;
1068 #endif
1069 int r2, original_type;
1070 original_type = vtop->type.t;
1071 /* two register type load : expand to two words
1072 temporarily */
1073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1074 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1075 /* load constant */
1076 ll = vtop->c.i;
1077 vtop->c.i = ll; /* first word */
1078 load(r, vtop);
1079 vtop->r = r; /* save register value */
1080 vpushi(ll >> 32); /* second word */
1081 } else
1082 #endif
1083 if (vtop->r & VT_LVAL) {
1084 /* We do not want to modifier the long long
1085 pointer here, so the safest (and less
1086 efficient) is to save all the other registers
1087 in the stack. XXX: totally inefficient. */
1088 #if 0
1089 save_regs(1);
1090 #else
1091 /* lvalue_save: save only if used further down the stack */
1092 save_reg_upstack(vtop->r, 1);
1093 #endif
1094 /* load from memory */
1095 vtop->type.t = load_type;
1096 load(r, vtop);
1097 vdup();
1098 vtop[-1].r = r; /* save register value */
1099 /* increment pointer to get second word */
1100 vtop->type.t = addr_type;
1101 gaddrof();
1102 vpushi(load_size);
1103 gen_op('+');
1104 vtop->r |= VT_LVAL;
1105 vtop->type.t = load_type;
1106 } else {
1107 /* move registers */
1108 load(r, vtop);
1109 vdup();
1110 vtop[-1].r = r; /* save register value */
1111 vtop->r = vtop[-1].r2;
1113 /* Allocate second register. Here we rely on the fact that
1114 get_reg() tries first to free r2 of an SValue. */
1115 r2 = get_reg(rc2);
1116 load(r2, vtop);
1117 vpop();
1118 /* write second register */
1119 vtop->r2 = r2;
1120 vtop->type.t = original_type;
1121 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1122 int t1, t;
1123 /* lvalue of scalar type : need to use lvalue type
1124 because of possible cast */
1125 t = vtop->type.t;
1126 t1 = t;
1127 /* compute memory access type */
1128 if (vtop->r & VT_REF)
1129 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1130 t = VT_PTR;
1131 #else
1132 t = VT_INT;
1133 #endif
1134 else if (vtop->r & VT_LVAL_BYTE)
1135 t = VT_BYTE;
1136 else if (vtop->r & VT_LVAL_SHORT)
1137 t = VT_SHORT;
1138 if (vtop->r & VT_LVAL_UNSIGNED)
1139 t |= VT_UNSIGNED;
1140 vtop->type.t = t;
1141 load(r, vtop);
1142 /* restore wanted type */
1143 vtop->type.t = t1;
1144 } else {
1145 /* one register type load */
1146 load(r, vtop);
1149 vtop->r = r;
1150 #ifdef TCC_TARGET_C67
1151 /* uses register pairs for doubles */
1152 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1153 vtop->r2 = r+1;
1154 #endif
1156 return r;
1159 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1160 ST_FUNC void gv2(int rc1, int rc2)
1162 int v;
1164 /* generate more generic register first. But VT_JMP or VT_CMP
1165 values must be generated first in all cases to avoid possible
1166 reload errors */
1167 v = vtop[0].r & VT_VALMASK;
1168 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1169 vswap();
1170 gv(rc1);
1171 vswap();
1172 gv(rc2);
1173 /* test if reload is needed for first register */
1174 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1175 vswap();
1176 gv(rc1);
1177 vswap();
1179 } else {
1180 gv(rc2);
1181 vswap();
1182 gv(rc1);
1183 vswap();
1184 /* test if reload is needed for first register */
1185 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1186 gv(rc2);
1191 #ifndef TCC_TARGET_ARM64
1192 /* wrapper around RC_FRET to return a register by type */
1193 static int rc_fret(int t)
1195 #ifdef TCC_TARGET_X86_64
1196 if (t == VT_LDOUBLE) {
1197 return RC_ST0;
1199 #endif
1200 return RC_FRET;
1202 #endif
1204 /* wrapper around REG_FRET to return a register by type */
1205 static int reg_fret(int t)
1207 #ifdef TCC_TARGET_X86_64
1208 if (t == VT_LDOUBLE) {
1209 return TREG_ST0;
1211 #endif
1212 return REG_FRET;
1215 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1216 /* expand 64bit on stack in two ints */
1217 static void lexpand(void)
1219 int u, v;
1220 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1221 v = vtop->r & (VT_VALMASK | VT_LVAL);
1222 if (v == VT_CONST) {
1223 vdup();
1224 vtop[0].c.i >>= 32;
1225 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1226 vdup();
1227 vtop[0].c.i += 4;
1228 } else {
1229 gv(RC_INT);
1230 vdup();
1231 vtop[0].r = vtop[-1].r2;
1232 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1234 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1236 #endif
1238 #ifdef TCC_TARGET_ARM
1239 /* expand long long on stack */
1240 ST_FUNC void lexpand_nr(void)
1242 int u,v;
1244 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1245 vdup();
1246 vtop->r2 = VT_CONST;
1247 vtop->type.t = VT_INT | u;
1248 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1249 if (v == VT_CONST) {
1250 vtop[-1].c.i = vtop->c.i;
1251 vtop->c.i = vtop->c.i >> 32;
1252 vtop->r = VT_CONST;
1253 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1254 vtop->c.i += 4;
1255 vtop->r = vtop[-1].r;
1256 } else if (v > VT_CONST) {
1257 vtop--;
1258 lexpand();
1259 } else
1260 vtop->r = vtop[-1].r2;
1261 vtop[-1].r2 = VT_CONST;
1262 vtop[-1].type.t = VT_INT | u;
1264 #endif
1266 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1267 /* build a long long from two ints */
1268 static void lbuild(int t)
1270 gv2(RC_INT, RC_INT);
1271 vtop[-1].r2 = vtop[0].r;
1272 vtop[-1].type.t = t;
1273 vpop();
1275 #endif
1277 /* rotate n first stack elements to the bottom
1278 I1 ... In -> I2 ... In I1 [top is right]
1280 ST_FUNC void vrotb(int n)
1282 int i;
1283 SValue tmp;
1285 tmp = vtop[-n + 1];
1286 for(i=-n+1;i!=0;i++)
1287 vtop[i] = vtop[i+1];
1288 vtop[0] = tmp;
1291 /* rotate the n elements before entry e towards the top
1292 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1294 ST_FUNC void vrote(SValue *e, int n)
1296 int i;
1297 SValue tmp;
1299 tmp = *e;
1300 for(i = 0;i < n - 1; i++)
1301 e[-i] = e[-i - 1];
1302 e[-n + 1] = tmp;
1305 /* rotate n first stack elements to the top
1306 I1 ... In -> In I1 ... I(n-1) [top is right]
1308 ST_FUNC void vrott(int n)
1310 vrote(vtop, n);
1313 /* pop stack value */
1314 ST_FUNC void vpop(void)
1316 int v;
1317 v = vtop->r & VT_VALMASK;
1318 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1319 /* for x86, we need to pop the FP stack */
1320 if (v == TREG_ST0 && !nocode_wanted) {
1321 o(0xd8dd); /* fstp %st(0) */
1322 } else
1323 #endif
1324 if (v == VT_JMP || v == VT_JMPI) {
1325 /* need to put correct jump if && or || without test */
1326 gsym(vtop->c.i);
1328 vtop--;
1331 /* convert stack entry to register and duplicate its value in another
1332 register */
1333 static void gv_dup(void)
1335 int rc, t, r, r1;
1336 SValue sv;
1338 t = vtop->type.t;
1339 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1340 if ((t & VT_BTYPE) == VT_LLONG) {
1341 lexpand();
1342 gv_dup();
1343 vswap();
1344 vrotb(3);
1345 gv_dup();
1346 vrotb(4);
1347 /* stack: H L L1 H1 */
1348 lbuild(t);
1349 vrotb(3);
1350 vrotb(3);
1351 vswap();
1352 lbuild(t);
1353 vswap();
1354 } else
1355 #endif
1357 /* duplicate value */
1358 rc = RC_INT;
1359 sv.type.t = VT_INT;
1360 if (is_float(t)) {
1361 rc = RC_FLOAT;
1362 #ifdef TCC_TARGET_X86_64
1363 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1364 rc = RC_ST0;
1366 #endif
1367 sv.type.t = t;
1369 r = gv(rc);
1370 r1 = get_reg(rc);
1371 sv.r = r;
1372 sv.c.i = 0;
1373 load(r1, &sv); /* move r to r1 */
1374 vdup();
1375 /* duplicates value */
1376 if (r != r1)
1377 vtop->r = r1;
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 ST_FUNC int gvtst(int inv, int t)
1386 int v = vtop->r & VT_VALMASK;
1387 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1388 vpushi(0);
1389 gen_op(TOK_NE);
1391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1392 /* constant jmp optimization */
1393 if ((vtop->c.i != 0) != inv)
1394 t = gjmp(t);
1395 vtop--;
1396 return t;
1398 return gtst(inv, t);
1401 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1402 /* generate CPU independent (unsigned) long long operations */
1403 static void gen_opl(int op)
1405 int t, a, b, op1, c, i;
1406 int func;
1407 unsigned short reg_iret = REG_IRET;
1408 unsigned short reg_lret = REG_LRET;
1409 SValue tmp;
1411 switch(op) {
1412 case '/':
1413 case TOK_PDIV:
1414 func = TOK___divdi3;
1415 goto gen_func;
1416 case TOK_UDIV:
1417 func = TOK___udivdi3;
1418 goto gen_func;
1419 case '%':
1420 func = TOK___moddi3;
1421 goto gen_mod_func;
1422 case TOK_UMOD:
1423 func = TOK___umoddi3;
1424 gen_mod_func:
1425 #ifdef TCC_ARM_EABI
1426 reg_iret = TREG_R2;
1427 reg_lret = TREG_R3;
1428 #endif
1429 gen_func:
1430 /* call generic long long function */
1431 vpush_global_sym(&func_old_type, func);
1432 vrott(3);
1433 gfunc_call(2);
1434 vpushi(0);
1435 vtop->r = reg_iret;
1436 vtop->r2 = reg_lret;
1437 break;
1438 case '^':
1439 case '&':
1440 case '|':
1441 case '*':
1442 case '+':
1443 case '-':
1444 //pv("gen_opl A",0,2);
1445 t = vtop->type.t;
1446 vswap();
1447 lexpand();
1448 vrotb(3);
1449 lexpand();
1450 /* stack: L1 H1 L2 H2 */
1451 tmp = vtop[0];
1452 vtop[0] = vtop[-3];
1453 vtop[-3] = tmp;
1454 tmp = vtop[-2];
1455 vtop[-2] = vtop[-3];
1456 vtop[-3] = tmp;
1457 vswap();
1458 /* stack: H1 H2 L1 L2 */
1459 //pv("gen_opl B",0,4);
1460 if (op == '*') {
1461 vpushv(vtop - 1);
1462 vpushv(vtop - 1);
1463 gen_op(TOK_UMULL);
1464 lexpand();
1465 /* stack: H1 H2 L1 L2 ML MH */
1466 for(i=0;i<4;i++)
1467 vrotb(6);
1468 /* stack: ML MH H1 H2 L1 L2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-2];
1471 vtop[-2] = tmp;
1472 /* stack: ML MH H1 L2 H2 L1 */
1473 gen_op('*');
1474 vrotb(3);
1475 vrotb(3);
1476 gen_op('*');
1477 /* stack: ML MH M1 M2 */
1478 gen_op('+');
1479 gen_op('+');
1480 } else if (op == '+' || op == '-') {
1481 /* XXX: add non carry method too (for MIPS or alpha) */
1482 if (op == '+')
1483 op1 = TOK_ADDC1;
1484 else
1485 op1 = TOK_SUBC1;
1486 gen_op(op1);
1487 /* stack: H1 H2 (L1 op L2) */
1488 vrotb(3);
1489 vrotb(3);
1490 gen_op(op1 + 1); /* TOK_xxxC2 */
1491 } else {
1492 gen_op(op);
1493 /* stack: H1 H2 (L1 op L2) */
1494 vrotb(3);
1495 vrotb(3);
1496 /* stack: (L1 op L2) H1 H2 */
1497 gen_op(op);
1498 /* stack: (L1 op L2) (H1 op H2) */
1500 /* stack: L H */
1501 lbuild(t);
1502 break;
1503 case TOK_SAR:
1504 case TOK_SHR:
1505 case TOK_SHL:
1506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1507 t = vtop[-1].type.t;
1508 vswap();
1509 lexpand();
1510 vrotb(3);
1511 /* stack: L H shift */
1512 c = (int)vtop->c.i;
1513 /* constant: simpler */
1514 /* NOTE: all comments are for SHL. the other cases are
1515 done by swaping words */
1516 vpop();
1517 if (op != TOK_SHL)
1518 vswap();
1519 if (c >= 32) {
1520 /* stack: L H */
1521 vpop();
1522 if (c > 32) {
1523 vpushi(c - 32);
1524 gen_op(op);
1526 if (op != TOK_SAR) {
1527 vpushi(0);
1528 } else {
1529 gv_dup();
1530 vpushi(31);
1531 gen_op(TOK_SAR);
1533 vswap();
1534 } else {
1535 vswap();
1536 gv_dup();
1537 /* stack: H L L */
1538 vpushi(c);
1539 gen_op(op);
1540 vswap();
1541 vpushi(32 - c);
1542 if (op == TOK_SHL)
1543 gen_op(TOK_SHR);
1544 else
1545 gen_op(TOK_SHL);
1546 vrotb(3);
1547 /* stack: L L H */
1548 vpushi(c);
1549 if (op == TOK_SHL)
1550 gen_op(TOK_SHL);
1551 else
1552 gen_op(TOK_SHR);
1553 gen_op('|');
1555 if (op != TOK_SHL)
1556 vswap();
1557 lbuild(t);
1558 } else {
1559 /* XXX: should provide a faster fallback on x86 ? */
1560 switch(op) {
1561 case TOK_SAR:
1562 func = TOK___ashrdi3;
1563 goto gen_func;
1564 case TOK_SHR:
1565 func = TOK___lshrdi3;
1566 goto gen_func;
1567 case TOK_SHL:
1568 func = TOK___ashldi3;
1569 goto gen_func;
1572 break;
1573 default:
1574 /* compare operations */
1575 t = vtop->type.t;
1576 vswap();
1577 lexpand();
1578 vrotb(3);
1579 lexpand();
1580 /* stack: L1 H1 L2 H2 */
1581 tmp = vtop[-1];
1582 vtop[-1] = vtop[-2];
1583 vtop[-2] = tmp;
1584 /* stack: L1 L2 H1 H2 */
1585 /* compare high */
1586 op1 = op;
1587 /* when values are equal, we need to compare low words. since
1588 the jump is inverted, we invert the test too. */
1589 if (op1 == TOK_LT)
1590 op1 = TOK_LE;
1591 else if (op1 == TOK_GT)
1592 op1 = TOK_GE;
1593 else if (op1 == TOK_ULT)
1594 op1 = TOK_ULE;
1595 else if (op1 == TOK_UGT)
1596 op1 = TOK_UGE;
1597 a = 0;
1598 b = 0;
1599 gen_op(op1);
1600 if (op1 != TOK_NE) {
1601 a = gvtst(1, 0);
1603 if (op != TOK_EQ) {
1604 /* generate non equal test */
1605 /* XXX: NOT PORTABLE yet */
1606 if (a == 0) {
1607 b = gvtst(0, 0);
1608 } else {
1609 #if defined(TCC_TARGET_I386)
1610 b = psym(0x850f, 0);
1611 #elif defined(TCC_TARGET_ARM)
1612 b = ind;
1613 o(0x1A000000 | encbranch(ind, 0, 1));
1614 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1615 tcc_error("not implemented");
1616 #else
1617 #error not supported
1618 #endif
1621 /* compare low. Always unsigned */
1622 op1 = op;
1623 if (op1 == TOK_LT)
1624 op1 = TOK_ULT;
1625 else if (op1 == TOK_LE)
1626 op1 = TOK_ULE;
1627 else if (op1 == TOK_GT)
1628 op1 = TOK_UGT;
1629 else if (op1 == TOK_GE)
1630 op1 = TOK_UGE;
1631 gen_op(op1);
1632 a = gvtst(1, a);
1633 gsym(b);
1634 vseti(VT_JMPI, a);
1635 break;
1638 #endif
1640 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1642 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1643 return (a ^ b) >> 63 ? -x : x;
1646 static int gen_opic_lt(uint64_t a, uint64_t b)
1648 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1651 /* handle integer constant optimizations and various machine
1652 independent opt */
1653 static void gen_opic(int op)
1655 SValue *v1 = vtop - 1;
1656 SValue *v2 = vtop;
1657 int t1 = v1->type.t & VT_BTYPE;
1658 int t2 = v2->type.t & VT_BTYPE;
1659 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1661 uint64_t l1 = c1 ? v1->c.i : 0;
1662 uint64_t l2 = c2 ? v2->c.i : 0;
1663 int shm = (t1 == VT_LLONG) ? 63 : 31;
1665 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1666 l1 = ((uint32_t)l1 |
1667 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1668 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1669 l2 = ((uint32_t)l2 |
1670 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1672 if (c1 && c2) {
1673 switch(op) {
1674 case '+': l1 += l2; break;
1675 case '-': l1 -= l2; break;
1676 case '&': l1 &= l2; break;
1677 case '^': l1 ^= l2; break;
1678 case '|': l1 |= l2; break;
1679 case '*': l1 *= l2; break;
1681 case TOK_PDIV:
1682 case '/':
1683 case '%':
1684 case TOK_UDIV:
1685 case TOK_UMOD:
1686 /* if division by zero, generate explicit division */
1687 if (l2 == 0) {
1688 if (const_wanted)
1689 tcc_error("division by zero in constant");
1690 goto general_case;
1692 switch(op) {
1693 default: l1 = gen_opic_sdiv(l1, l2); break;
1694 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1695 case TOK_UDIV: l1 = l1 / l2; break;
1696 case TOK_UMOD: l1 = l1 % l2; break;
1698 break;
1699 case TOK_SHL: l1 <<= (l2 & shm); break;
1700 case TOK_SHR: l1 >>= (l2 & shm); break;
1701 case TOK_SAR:
1702 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1703 break;
1704 /* tests */
1705 case TOK_ULT: l1 = l1 < l2; break;
1706 case TOK_UGE: l1 = l1 >= l2; break;
1707 case TOK_EQ: l1 = l1 == l2; break;
1708 case TOK_NE: l1 = l1 != l2; break;
1709 case TOK_ULE: l1 = l1 <= l2; break;
1710 case TOK_UGT: l1 = l1 > l2; break;
1711 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1712 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1713 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1714 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1715 /* logical */
1716 case TOK_LAND: l1 = l1 && l2; break;
1717 case TOK_LOR: l1 = l1 || l2; break;
1718 default:
1719 goto general_case;
1721 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1722 l1 = ((uint32_t)l1 |
1723 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1724 v1->c.i = l1;
1725 vtop--;
1726 } else {
1727 /* if commutative ops, put c2 as constant */
1728 if (c1 && (op == '+' || op == '&' || op == '^' ||
1729 op == '|' || op == '*')) {
1730 vswap();
1731 c2 = c1; //c = c1, c1 = c2, c2 = c;
1732 l2 = l1; //l = l1, l1 = l2, l2 = l;
1734 if (!const_wanted &&
1735 c1 && ((l1 == 0 &&
1736 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1737 (l1 == -1 && op == TOK_SAR))) {
1738 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1739 vtop--;
1740 } else if (!const_wanted &&
1741 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1742 (l2 == -1 && op == '|') ||
1743 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1744 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1745 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1746 if (l2 == 1)
1747 vtop->c.i = 0;
1748 vswap();
1749 vtop--;
1750 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1751 op == TOK_PDIV) &&
1752 l2 == 1) ||
1753 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1754 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1755 l2 == 0) ||
1756 (op == '&' &&
1757 l2 == -1))) {
1758 /* filter out NOP operations like x*1, x-0, x&-1... */
1759 vtop--;
1760 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1761 /* try to use shifts instead of muls or divs */
1762 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1763 int n = -1;
1764 while (l2) {
1765 l2 >>= 1;
1766 n++;
1768 vtop->c.i = n;
1769 if (op == '*')
1770 op = TOK_SHL;
1771 else if (op == TOK_PDIV)
1772 op = TOK_SAR;
1773 else
1774 op = TOK_SHR;
1776 goto general_case;
1777 } else if (c2 && (op == '+' || op == '-') &&
1778 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1779 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1780 /* symbol + constant case */
1781 if (op == '-')
1782 l2 = -l2;
1783 l2 += vtop[-1].c.i;
1784 /* The backends can't always deal with addends to symbols
1785 larger than +-1<<31. Don't construct such. */
1786 if ((int)l2 != l2)
1787 goto general_case;
1788 vtop--;
1789 vtop->c.i = l2;
1790 } else {
1791 general_case:
1792 if (!nocode_wanted) {
1793 /* call low level op generator */
1794 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1795 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1796 gen_opl(op);
1797 else
1798 gen_opi(op);
1799 } else {
1800 vtop--;
1801 /* Ensure vtop isn't marked VT_CONST in case something
1802 up our callchain is interested in const-ness of the
1803 expression. Also make it a non-LVAL if it was,
1804 so that further code can't accidentally generate
1805 a deref (happen only for buggy uses of e.g.
1806 gv() under nocode_wanted). */
1807 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 if (!nocode_wanted) {
1870 gen_opf(op);
1871 } else {
1872 vtop--;
1877 static int pointed_size(CType *type)
1879 int align;
1880 return type_size(pointed_type(type), &align);
1883 static void vla_runtime_pointed_size(CType *type)
1885 int align;
1886 vla_runtime_type_size(pointed_type(type), &align);
1889 static inline int is_null_pointer(SValue *p)
1891 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1892 return 0;
1893 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1894 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1895 ((p->type.t & VT_BTYPE) == VT_PTR &&
1896 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1899 static inline int is_integer_btype(int bt)
1901 return (bt == VT_BYTE || bt == VT_SHORT ||
1902 bt == VT_INT || bt == VT_LLONG);
1905 /* check types for comparison or subtraction of pointers */
1906 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1908 CType *type1, *type2, tmp_type1, tmp_type2;
1909 int bt1, bt2;
1911 /* null pointers are accepted for all comparisons as gcc */
1912 if (is_null_pointer(p1) || is_null_pointer(p2))
1913 return;
1914 type1 = &p1->type;
1915 type2 = &p2->type;
1916 bt1 = type1->t & VT_BTYPE;
1917 bt2 = type2->t & VT_BTYPE;
1918 /* accept comparison between pointer and integer with a warning */
1919 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1920 if (op != TOK_LOR && op != TOK_LAND )
1921 tcc_warning("comparison between pointer and integer");
1922 return;
1925 /* both must be pointers or implicit function pointers */
1926 if (bt1 == VT_PTR) {
1927 type1 = pointed_type(type1);
1928 } else if (bt1 != VT_FUNC)
1929 goto invalid_operands;
1931 if (bt2 == VT_PTR) {
1932 type2 = pointed_type(type2);
1933 } else if (bt2 != VT_FUNC) {
1934 invalid_operands:
1935 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1937 if ((type1->t & VT_BTYPE) == VT_VOID ||
1938 (type2->t & VT_BTYPE) == VT_VOID)
1939 return;
1940 tmp_type1 = *type1;
1941 tmp_type2 = *type2;
1942 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1943 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1944 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1945 /* gcc-like error if '-' is used */
1946 if (op == '-')
1947 goto invalid_operands;
1948 else
1949 tcc_warning("comparison of distinct pointer types lacks a cast");
1953 /* generic gen_op: handles types problems */
1954 ST_FUNC void gen_op(int op)
1956 int u, t1, t2, bt1, bt2, t;
1957 CType type1;
1959 redo:
1960 t1 = vtop[-1].type.t;
1961 t2 = vtop[0].type.t;
1962 bt1 = t1 & VT_BTYPE;
1963 bt2 = t2 & VT_BTYPE;
1965 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1966 tcc_error("operation on a struct");
1967 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1968 if (bt2 == VT_FUNC) {
1969 mk_pointer(&vtop->type);
1970 gaddrof();
1972 if (bt1 == VT_FUNC) {
1973 vswap();
1974 mk_pointer(&vtop->type);
1975 gaddrof();
1976 vswap();
1978 goto redo;
1979 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1980 /* at least one operand is a pointer */
1981 /* relationnal op: must be both pointers */
1982 if (op >= TOK_ULT && op <= TOK_LOR) {
1983 check_comparison_pointer_types(vtop - 1, vtop, op);
1984 /* pointers are handled are unsigned */
1985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1986 t = VT_LLONG | VT_UNSIGNED;
1987 #else
1988 t = VT_INT | VT_UNSIGNED;
1989 #endif
1990 goto std_op;
1992 /* if both pointers, then it must be the '-' op */
1993 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1994 if (op != '-')
1995 tcc_error("cannot use pointers here");
1996 check_comparison_pointer_types(vtop - 1, vtop, op);
1997 /* XXX: check that types are compatible */
1998 if (vtop[-1].type.t & VT_VLA) {
1999 vla_runtime_pointed_size(&vtop[-1].type);
2000 } else {
2001 vpushi(pointed_size(&vtop[-1].type));
2003 vrott(3);
2004 gen_opic(op);
2005 /* set to integer type */
2006 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2007 vtop->type.t = VT_LLONG;
2008 #else
2009 vtop->type.t = VT_INT;
2010 #endif
2011 vswap();
2012 gen_op(TOK_PDIV);
2013 } else {
2014 /* exactly one pointer : must be '+' or '-'. */
2015 if (op != '-' && op != '+')
2016 tcc_error("cannot use pointers here");
2017 /* Put pointer as first operand */
2018 if (bt2 == VT_PTR) {
2019 vswap();
2020 swap(&t1, &t2);
2022 #if PTR_SIZE == 4
2023 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2024 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2025 gen_cast(&int_type);
2026 #endif
2027 type1 = vtop[-1].type;
2028 type1.t &= ~VT_ARRAY;
2029 if (vtop[-1].type.t & VT_VLA)
2030 vla_runtime_pointed_size(&vtop[-1].type);
2031 else {
2032 u = pointed_size(&vtop[-1].type);
2033 if (u < 0)
2034 tcc_error("unknown array element size");
2035 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2036 vpushll(u);
2037 #else
2038 /* XXX: cast to int ? (long long case) */
2039 vpushi(u);
2040 #endif
2042 gen_op('*');
2043 #if 0
2044 /* #ifdef CONFIG_TCC_BCHECK
2045 The main reason to removing this code:
2046 #include <stdio.h>
2047 int main ()
2049 int v[10];
2050 int i = 10;
2051 int j = 9;
2052 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2053 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2055 When this code is on. then the output looks like
2056 v+i-j = 0xfffffffe
2057 v+(i-j) = 0xbff84000
2059 /* if evaluating constant expression, no code should be
2060 generated, so no bound check */
2061 if (tcc_state->do_bounds_check && !const_wanted) {
2062 /* if bounded pointers, we generate a special code to
2063 test bounds */
2064 if (op == '-') {
2065 vpushi(0);
2066 vswap();
2067 gen_op('-');
2069 gen_bounded_ptr_add();
2070 } else
2071 #endif
2073 gen_opic(op);
2075 /* put again type if gen_opic() swaped operands */
2076 vtop->type = type1;
2078 } else if (is_float(bt1) || is_float(bt2)) {
2079 /* compute bigger type and do implicit casts */
2080 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2081 t = VT_LDOUBLE;
2082 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2083 t = VT_DOUBLE;
2084 } else {
2085 t = VT_FLOAT;
2087 /* floats can only be used for a few operations */
2088 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2089 (op < TOK_ULT || op > TOK_GT))
2090 tcc_error("invalid operands for binary operation");
2091 goto std_op;
2092 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2093 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2095 t |= VT_UNSIGNED;
2096 goto std_op;
2097 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2098 /* cast to biggest op */
2099 t = VT_LLONG;
2100 /* convert to unsigned if it does not fit in a long long */
2101 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2102 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2103 t |= VT_UNSIGNED;
2104 goto std_op;
2105 } else {
2106 /* integer operations */
2107 t = VT_INT;
2108 /* convert to unsigned if it does not fit in an integer */
2109 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2110 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2111 t |= VT_UNSIGNED;
2112 std_op:
2113 /* XXX: currently, some unsigned operations are explicit, so
2114 we modify them here */
2115 if (t & VT_UNSIGNED) {
2116 if (op == TOK_SAR)
2117 op = TOK_SHR;
2118 else if (op == '/')
2119 op = TOK_UDIV;
2120 else if (op == '%')
2121 op = TOK_UMOD;
2122 else if (op == TOK_LT)
2123 op = TOK_ULT;
2124 else if (op == TOK_GT)
2125 op = TOK_UGT;
2126 else if (op == TOK_LE)
2127 op = TOK_ULE;
2128 else if (op == TOK_GE)
2129 op = TOK_UGE;
2131 vswap();
2132 type1.t = t;
2133 gen_cast(&type1);
2134 vswap();
2135 /* special case for shifts and long long: we keep the shift as
2136 an integer */
2137 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2138 type1.t = VT_INT;
2139 gen_cast(&type1);
2140 if (is_float(t))
2141 gen_opif(op);
2142 else
2143 gen_opic(op);
2144 if (op >= TOK_ULT && op <= TOK_GT) {
2145 /* relationnal op: the result is an int */
2146 vtop->type.t = VT_INT;
2147 } else {
2148 vtop->type.t = t;
2151 // Make sure that we have converted to an rvalue:
2152 if (vtop->r & VT_LVAL && !nocode_wanted)
2153 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2156 #ifndef TCC_TARGET_ARM
2157 /* generic itof for unsigned long long case */
2158 static void gen_cvt_itof1(int t)
2160 #ifdef TCC_TARGET_ARM64
2161 gen_cvt_itof(t);
2162 #else
2163 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2164 (VT_LLONG | VT_UNSIGNED)) {
2166 if (t == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___floatundisf);
2168 #if LDOUBLE_SIZE != 8
2169 else if (t == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___floatundixf);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___floatundidf);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = reg_fret(t);
2178 } else {
2179 gen_cvt_itof(t);
2181 #endif
2183 #endif
2185 /* generic ftoi for unsigned long long case */
2186 static void gen_cvt_ftoi1(int t)
2188 #ifdef TCC_TARGET_ARM64
2189 gen_cvt_ftoi(t);
2190 #else
2191 int st;
2193 if (t == (VT_LLONG | VT_UNSIGNED)) {
2194 /* not handled natively */
2195 st = vtop->type.t & VT_BTYPE;
2196 if (st == VT_FLOAT)
2197 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2198 #if LDOUBLE_SIZE != 8
2199 else if (st == VT_LDOUBLE)
2200 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2201 #endif
2202 else
2203 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2204 vrott(2);
2205 gfunc_call(1);
2206 vpushi(0);
2207 vtop->r = REG_IRET;
2208 vtop->r2 = REG_LRET;
2209 } else {
2210 gen_cvt_ftoi(t);
2212 #endif
2215 /* force char or short cast */
2216 static void force_charshort_cast(int t)
2218 int bits, dbt;
2219 dbt = t & VT_BTYPE;
2220 /* XXX: add optimization if lvalue : just change type and offset */
2221 if (dbt == VT_BYTE)
2222 bits = 8;
2223 else
2224 bits = 16;
2225 if (t & VT_UNSIGNED) {
2226 vpushi((1 << bits) - 1);
2227 gen_op('&');
2228 } else {
2229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2230 bits = 64 - bits;
2231 else
2232 bits = 32 - bits;
2233 vpushi(bits);
2234 gen_op(TOK_SHL);
2235 /* result must be signed or the SAR is converted to an SHL
2236 This was not the case when "t" was a signed short
2237 and the last value on the stack was an unsigned int */
2238 vtop->type.t &= ~VT_UNSIGNED;
2239 vpushi(bits);
2240 gen_op(TOK_SAR);
2244 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2245 static void gen_cast(CType *type)
2247 int sbt, dbt, sf, df, c, p;
2249 /* special delayed cast for char/short */
2250 /* XXX: in some cases (multiple cascaded casts), it may still
2251 be incorrect */
2252 if (vtop->r & VT_MUSTCAST) {
2253 vtop->r &= ~VT_MUSTCAST;
2254 force_charshort_cast(vtop->type.t);
2257 /* bitfields first get cast to ints */
2258 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2259 gv(RC_INT);
2262 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2263 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2265 if (sbt != dbt) {
2266 sf = is_float(sbt);
2267 df = is_float(dbt);
2268 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2269 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2270 if (c) {
2271 /* constant case: we can do it now */
2272 /* XXX: in ISOC, cannot do it if error in convert */
2273 if (sbt == VT_FLOAT)
2274 vtop->c.ld = vtop->c.f;
2275 else if (sbt == VT_DOUBLE)
2276 vtop->c.ld = vtop->c.d;
2278 if (df) {
2279 if ((sbt & VT_BTYPE) == VT_LLONG) {
2280 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2281 vtop->c.ld = vtop->c.i;
2282 else
2283 vtop->c.ld = -(long double)-vtop->c.i;
2284 } else if(!sf) {
2285 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2286 vtop->c.ld = (uint32_t)vtop->c.i;
2287 else
2288 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2291 if (dbt == VT_FLOAT)
2292 vtop->c.f = (float)vtop->c.ld;
2293 else if (dbt == VT_DOUBLE)
2294 vtop->c.d = (double)vtop->c.ld;
2295 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2296 vtop->c.i = vtop->c.ld;
2297 } else if (sf && dbt == VT_BOOL) {
2298 vtop->c.i = (vtop->c.ld != 0);
2299 } else {
2300 if(sf)
2301 vtop->c.i = vtop->c.ld;
2302 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2304 else if (sbt & VT_UNSIGNED)
2305 vtop->c.i = (uint32_t)vtop->c.i;
2306 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2307 else if (sbt == VT_PTR)
2309 #endif
2310 else if (sbt != VT_LLONG)
2311 vtop->c.i = ((uint32_t)vtop->c.i |
2312 -(vtop->c.i & 0x80000000));
2314 if (dbt == (VT_LLONG|VT_UNSIGNED))
2316 else if (dbt == VT_BOOL)
2317 vtop->c.i = (vtop->c.i != 0);
2318 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2319 else if (dbt == VT_PTR)
2321 #endif
2322 else if (dbt != VT_LLONG) {
2323 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2324 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2325 0xffffffff);
2326 vtop->c.i &= m;
2327 if (!(dbt & VT_UNSIGNED))
2328 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2331 } else if (p && dbt == VT_BOOL) {
2332 vtop->r = VT_CONST;
2333 vtop->c.i = 1;
2334 } else if (!nocode_wanted) {
2335 /* non constant case: generate code */
2336 if (sf && df) {
2337 /* convert from fp to fp */
2338 gen_cvt_ftof(dbt);
2339 } else if (df) {
2340 /* convert int to fp */
2341 gen_cvt_itof1(dbt);
2342 } else if (sf) {
2343 /* convert fp to int */
2344 if (dbt == VT_BOOL) {
2345 vpushi(0);
2346 gen_op(TOK_NE);
2347 } else {
2348 /* we handle char/short/etc... with generic code */
2349 if (dbt != (VT_INT | VT_UNSIGNED) &&
2350 dbt != (VT_LLONG | VT_UNSIGNED) &&
2351 dbt != VT_LLONG)
2352 dbt = VT_INT;
2353 gen_cvt_ftoi1(dbt);
2354 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2355 /* additional cast for char/short... */
2356 vtop->type.t = dbt;
2357 gen_cast(type);
2360 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2361 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2362 if ((sbt & VT_BTYPE) != VT_LLONG) {
2363 /* scalar to long long */
2364 /* machine independent conversion */
2365 gv(RC_INT);
2366 /* generate high word */
2367 if (sbt == (VT_INT | VT_UNSIGNED)) {
2368 vpushi(0);
2369 gv(RC_INT);
2370 } else {
2371 if (sbt == VT_PTR) {
2372 /* cast from pointer to int before we apply
2373 shift operation, which pointers don't support*/
2374 gen_cast(&int_type);
2376 gv_dup();
2377 vpushi(31);
2378 gen_op(TOK_SAR);
2380 /* patch second register */
2381 vtop[-1].r2 = vtop->r;
2382 vpop();
2384 #else
2385 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2386 (dbt & VT_BTYPE) == VT_PTR ||
2387 (dbt & VT_BTYPE) == VT_FUNC) {
2388 if ((sbt & VT_BTYPE) != VT_LLONG &&
2389 (sbt & VT_BTYPE) != VT_PTR &&
2390 (sbt & VT_BTYPE) != VT_FUNC) {
2391 /* need to convert from 32bit to 64bit */
2392 gv(RC_INT);
2393 if (sbt != (VT_INT | VT_UNSIGNED)) {
2394 #if defined(TCC_TARGET_ARM64)
2395 gen_cvt_sxtw();
2396 #elif defined(TCC_TARGET_X86_64)
2397 int r = gv(RC_INT);
2398 /* x86_64 specific: movslq */
2399 o(0x6348);
2400 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2401 #else
2402 #error
2403 #endif
2406 #endif
2407 } else if (dbt == VT_BOOL) {
2408 /* scalar to bool */
2409 vpushi(0);
2410 gen_op(TOK_NE);
2411 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2412 (dbt & VT_BTYPE) == VT_SHORT) {
2413 if (sbt == VT_PTR) {
2414 vtop->type.t = VT_INT;
2415 tcc_warning("nonportable conversion from pointer to char/short");
2417 force_charshort_cast(dbt);
2418 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2419 } else if ((dbt & VT_BTYPE) == VT_INT) {
2420 /* scalar to int */
2421 if ((sbt & VT_BTYPE) == VT_LLONG) {
2422 /* from long long: just take low order word */
2423 lexpand();
2424 vpop();
2426 /* if lvalue and single word type, nothing to do because
2427 the lvalue already contains the real type size (see
2428 VT_LVAL_xxx constants) */
2429 #endif
2432 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2433 /* if we are casting between pointer types,
2434 we must update the VT_LVAL_xxx size */
2435 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2436 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2438 vtop->type = *type;
2441 /* return type size as known at compile time. Put alignment at 'a' */
2442 ST_FUNC int type_size(CType *type, int *a)
2444 Sym *s;
2445 int bt;
2447 bt = type->t & VT_BTYPE;
2448 if (bt == VT_STRUCT) {
2449 /* struct/union */
2450 s = type->ref;
2451 *a = s->r;
2452 return s->c;
2453 } else if (bt == VT_PTR) {
2454 if (type->t & VT_ARRAY) {
2455 int ts;
2457 s = type->ref;
2458 ts = type_size(&s->type, a);
2460 if (ts < 0 && s->c < 0)
2461 ts = -ts;
2463 return ts * s->c;
2464 } else {
2465 *a = PTR_SIZE;
2466 return PTR_SIZE;
2468 } else if (bt == VT_LDOUBLE) {
2469 *a = LDOUBLE_ALIGN;
2470 return LDOUBLE_SIZE;
2471 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2472 #ifdef TCC_TARGET_I386
2473 #ifdef TCC_TARGET_PE
2474 *a = 8;
2475 #else
2476 *a = 4;
2477 #endif
2478 #elif defined(TCC_TARGET_ARM)
2479 #ifdef TCC_ARM_EABI
2480 *a = 8;
2481 #else
2482 *a = 4;
2483 #endif
2484 #else
2485 *a = 8;
2486 #endif
2487 return 8;
2488 } else if (bt == VT_INT || bt == VT_FLOAT) {
2489 *a = 4;
2490 return 4;
2491 } else if (bt == VT_SHORT) {
2492 *a = 2;
2493 return 2;
2494 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2495 *a = 8;
2496 return 16;
2497 } else if (bt == VT_ENUM) {
2498 *a = 4;
2499 /* Enums might be incomplete, so don't just return '4' here. */
2500 return type->ref->c;
2501 } else {
2502 /* char, void, function, _Bool */
2503 *a = 1;
2504 return 1;
2508 /* push type size as known at runtime time on top of value stack. Put
2509 alignment at 'a' */
2510 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2512 if (type->t & VT_VLA) {
2513 type_size(&type->ref->type, a);
2514 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2515 } else {
2516 vpushi(type_size(type, a));
2520 static void vla_sp_restore(void) {
2521 if (vlas_in_scope) {
2522 gen_vla_sp_restore(vla_sp_loc);
2526 static void vla_sp_restore_root(void) {
2527 if (vlas_in_scope) {
2528 gen_vla_sp_restore(vla_sp_root_loc);
2532 /* return the pointed type of t */
2533 static inline CType *pointed_type(CType *type)
2535 return &type->ref->type;
2538 /* modify type so that its it is a pointer to type. */
2539 ST_FUNC void mk_pointer(CType *type)
2541 Sym *s;
2542 s = sym_push(SYM_FIELD, type, 0, -1);
2543 type->t = VT_PTR | (type->t & ~VT_TYPE);
2544 type->ref = s;
2547 /* compare function types. OLD functions match any new functions */
2548 static int is_compatible_func(CType *type1, CType *type2)
2550 Sym *s1, *s2;
2552 s1 = type1->ref;
2553 s2 = type2->ref;
2554 if (!is_compatible_types(&s1->type, &s2->type))
2555 return 0;
2556 /* check func_call */
2557 if (s1->a.func_call != s2->a.func_call)
2558 return 0;
2559 /* XXX: not complete */
2560 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2561 return 1;
2562 if (s1->c != s2->c)
2563 return 0;
2564 while (s1 != NULL) {
2565 if (s2 == NULL)
2566 return 0;
2567 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2568 return 0;
2569 s1 = s1->next;
2570 s2 = s2->next;
2572 if (s2)
2573 return 0;
2574 return 1;
2577 /* return true if type1 and type2 are the same. If unqualified is
2578 true, qualifiers on the types are ignored.
2580 - enums are not checked as gcc __builtin_types_compatible_p ()
2582 static int compare_types(CType *type1, CType *type2, int unqualified)
2584 int bt1, t1, t2;
2586 t1 = type1->t & VT_TYPE;
2587 t2 = type2->t & VT_TYPE;
2588 if (unqualified) {
2589 /* strip qualifiers before comparing */
2590 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2591 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2593 /* Default Vs explicit signedness only matters for char */
2594 if ((t1 & VT_BTYPE) != VT_BYTE) {
2595 t1 &= ~VT_DEFSIGN;
2596 t2 &= ~VT_DEFSIGN;
2598 /* An enum is compatible with (unsigned) int. Ideally we would
2599 store the enums signedness in type->ref.a.<some_bit> and
2600 only accept unsigned enums with unsigned int and vice versa.
2601 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2602 from pointer target types, so we can't add it here either. */
2603 if ((t1 & VT_BTYPE) == VT_ENUM) {
2604 t1 = VT_INT;
2605 if (type1->ref->a.unsigned_enum)
2606 t1 |= VT_UNSIGNED;
2608 if ((t2 & VT_BTYPE) == VT_ENUM) {
2609 t2 = VT_INT;
2610 if (type2->ref->a.unsigned_enum)
2611 t2 |= VT_UNSIGNED;
2613 /* XXX: bitfields ? */
2614 if (t1 != t2)
2615 return 0;
2616 /* test more complicated cases */
2617 bt1 = t1 & VT_BTYPE;
2618 if (bt1 == VT_PTR) {
2619 type1 = pointed_type(type1);
2620 type2 = pointed_type(type2);
2621 return is_compatible_types(type1, type2);
2622 } else if (bt1 == VT_STRUCT) {
2623 return (type1->ref == type2->ref);
2624 } else if (bt1 == VT_FUNC) {
2625 return is_compatible_func(type1, type2);
2626 } else {
2627 return 1;
2631 /* return true if type1 and type2 are exactly the same (including
2632 qualifiers).
2634 static int is_compatible_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,0);
2639 /* return true if type1 and type2 are the same (ignoring qualifiers).
2641 static int is_compatible_parameter_types(CType *type1, CType *type2)
2643 return compare_types(type1,type2,1);
2646 /* print a type. If 'varstr' is not NULL, then the variable is also
2647 printed in the type */
2648 /* XXX: union */
2649 /* XXX: add array and function pointers */
2650 static void type_to_str(char *buf, int buf_size,
2651 CType *type, const char *varstr)
2653 int bt, v, t;
2654 Sym *s, *sa;
2655 char buf1[256];
2656 const char *tstr;
2658 t = type->t & VT_TYPE;
2659 bt = t & VT_BTYPE;
2660 buf[0] = '\0';
2661 if (t & VT_CONSTANT)
2662 pstrcat(buf, buf_size, "const ");
2663 if (t & VT_VOLATILE)
2664 pstrcat(buf, buf_size, "volatile ");
2665 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2666 pstrcat(buf, buf_size, "unsigned ");
2667 else if (t & VT_DEFSIGN)
2668 pstrcat(buf, buf_size, "signed ");
2669 switch(bt) {
2670 case VT_VOID:
2671 tstr = "void";
2672 goto add_tstr;
2673 case VT_BOOL:
2674 tstr = "_Bool";
2675 goto add_tstr;
2676 case VT_BYTE:
2677 tstr = "char";
2678 goto add_tstr;
2679 case VT_SHORT:
2680 tstr = "short";
2681 goto add_tstr;
2682 case VT_INT:
2683 tstr = "int";
2684 goto add_tstr;
2685 case VT_LONG:
2686 tstr = "long";
2687 goto add_tstr;
2688 case VT_LLONG:
2689 tstr = "long long";
2690 goto add_tstr;
2691 case VT_FLOAT:
2692 tstr = "float";
2693 goto add_tstr;
2694 case VT_DOUBLE:
2695 tstr = "double";
2696 goto add_tstr;
2697 case VT_LDOUBLE:
2698 tstr = "long double";
2699 add_tstr:
2700 pstrcat(buf, buf_size, tstr);
2701 break;
2702 case VT_ENUM:
2703 case VT_STRUCT:
2704 if (bt == VT_STRUCT)
2705 tstr = "struct ";
2706 else
2707 tstr = "enum ";
2708 pstrcat(buf, buf_size, tstr);
2709 v = type->ref->v & ~SYM_STRUCT;
2710 if (v >= SYM_FIRST_ANOM)
2711 pstrcat(buf, buf_size, "<anonymous>");
2712 else
2713 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2714 break;
2715 case VT_FUNC:
2716 s = type->ref;
2717 type_to_str(buf, buf_size, &s->type, varstr);
2718 pstrcat(buf, buf_size, "(");
2719 sa = s->next;
2720 while (sa != NULL) {
2721 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2722 pstrcat(buf, buf_size, buf1);
2723 sa = sa->next;
2724 if (sa)
2725 pstrcat(buf, buf_size, ", ");
2727 pstrcat(buf, buf_size, ")");
2728 goto no_var;
2729 case VT_PTR:
2730 s = type->ref;
2731 if (t & VT_ARRAY) {
2732 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2733 type_to_str(buf, buf_size, &s->type, buf1);
2734 goto no_var;
2736 pstrcpy(buf1, sizeof(buf1), "*");
2737 if (t & VT_CONSTANT)
2738 pstrcat(buf1, buf_size, "const ");
2739 if (t & VT_VOLATILE)
2740 pstrcat(buf1, buf_size, "volatile ");
2741 if (varstr)
2742 pstrcat(buf1, sizeof(buf1), varstr);
2743 type_to_str(buf, buf_size, &s->type, buf1);
2744 goto no_var;
2746 if (varstr) {
2747 pstrcat(buf, buf_size, " ");
2748 pstrcat(buf, buf_size, varstr);
2750 no_var: ;
2753 /* verify type compatibility to store vtop in 'dt' type, and generate
2754 casts if needed. */
2755 static void gen_assign_cast(CType *dt)
2757 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2758 char buf1[256], buf2[256];
2759 int dbt, sbt;
2761 st = &vtop->type; /* source type */
2762 dbt = dt->t & VT_BTYPE;
2763 sbt = st->t & VT_BTYPE;
2764 if (sbt == VT_VOID || dbt == VT_VOID) {
2765 if (sbt == VT_VOID && dbt == VT_VOID)
2766 ; /*
2767 It is Ok if both are void
2768 A test program:
2769 void func1() {}
2770 void func2() {
2771 return func1();
2773 gcc accepts this program
2775 else
2776 tcc_error("cannot cast from/to void");
2778 if (dt->t & VT_CONSTANT)
2779 tcc_warning("assignment of read-only location");
2780 switch(dbt) {
2781 case VT_PTR:
2782 /* special cases for pointers */
2783 /* '0' can also be a pointer */
2784 if (is_null_pointer(vtop))
2785 goto type_ok;
2786 /* accept implicit pointer to integer cast with warning */
2787 if (is_integer_btype(sbt)) {
2788 tcc_warning("assignment makes pointer from integer without a cast");
2789 goto type_ok;
2791 type1 = pointed_type(dt);
2792 /* a function is implicitely a function pointer */
2793 if (sbt == VT_FUNC) {
2794 if ((type1->t & VT_BTYPE) != VT_VOID &&
2795 !is_compatible_types(pointed_type(dt), st))
2796 tcc_warning("assignment from incompatible pointer type");
2797 goto type_ok;
2799 if (sbt != VT_PTR)
2800 goto error;
2801 type2 = pointed_type(st);
2802 if ((type1->t & VT_BTYPE) == VT_VOID ||
2803 (type2->t & VT_BTYPE) == VT_VOID) {
2804 /* void * can match anything */
2805 } else {
2806 /* exact type match, except for qualifiers */
2807 tmp_type1 = *type1;
2808 tmp_type2 = *type2;
2809 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2812 /* Like GCC don't warn by default for merely changes
2813 in pointer target signedness. Do warn for different
2814 base types, though, in particular for unsigned enums
2815 and signed int targets. */
2816 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2817 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2818 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2820 else
2821 tcc_warning("assignment from incompatible pointer type");
2824 /* check const and volatile */
2825 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2826 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2827 tcc_warning("assignment discards qualifiers from pointer target type");
2828 break;
2829 case VT_BYTE:
2830 case VT_SHORT:
2831 case VT_INT:
2832 case VT_LLONG:
2833 if (sbt == VT_PTR || sbt == VT_FUNC) {
2834 tcc_warning("assignment makes integer from pointer without a cast");
2835 } else if (sbt == VT_STRUCT) {
2836 goto case_VT_STRUCT;
2838 /* XXX: more tests */
2839 break;
2840 case VT_STRUCT:
2841 case_VT_STRUCT:
2842 tmp_type1 = *dt;
2843 tmp_type2 = *st;
2844 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2847 error:
2848 type_to_str(buf1, sizeof(buf1), st, NULL);
2849 type_to_str(buf2, sizeof(buf2), dt, NULL);
2850 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2852 break;
2854 type_ok:
2855 gen_cast(dt);
2858 /* store vtop in lvalue pushed on stack */
2859 ST_FUNC void vstore(void)
2861 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2863 ft = vtop[-1].type.t;
2864 sbt = vtop->type.t & VT_BTYPE;
2865 dbt = ft & VT_BTYPE;
2866 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2867 (sbt == VT_INT && dbt == VT_SHORT))
2868 && !(vtop->type.t & VT_BITFIELD)) {
2869 /* optimize char/short casts */
2870 delayed_cast = VT_MUSTCAST;
2871 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2872 ((1 << VT_STRUCT_SHIFT) - 1));
2873 /* XXX: factorize */
2874 if (ft & VT_CONSTANT)
2875 tcc_warning("assignment of read-only location");
2876 } else {
2877 delayed_cast = 0;
2878 if (!(ft & VT_BITFIELD))
2879 gen_assign_cast(&vtop[-1].type);
2882 if (sbt == VT_STRUCT) {
2883 /* if structure, only generate pointer */
2884 /* structure assignment : generate memcpy */
2885 /* XXX: optimize if small size */
2886 if (!nocode_wanted) {
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2913 } else {
2914 vswap();
2915 vpop();
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 if (!nocode_wanted) {
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop[-1].r & VT_MUSTBOUND) {
2968 vswap();
2969 gbound();
2970 vswap();
2972 #endif
2973 rc = RC_INT;
2974 if (is_float(ft)) {
2975 rc = RC_FLOAT;
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2978 rc = RC_ST0;
2979 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2980 rc = RC_FRET;
2982 #endif
2984 r = gv(rc); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2987 SValue sv;
2988 t = get_reg(RC_INT);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 sv.type.t = VT_PTR;
2991 #else
2992 sv.type.t = VT_INT;
2993 #endif
2994 sv.r = VT_LOCAL | VT_LVAL;
2995 sv.c.i = vtop[-1].c.i;
2996 load(t, &sv);
2997 vtop[-1].r = t | VT_LVAL;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3002 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3003 #else
3004 if ((ft & VT_BTYPE) == VT_LLONG) {
3005 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3006 #endif
3007 vtop[-1].type.t = load_type;
3008 store(r, vtop - 1);
3009 vswap();
3010 /* convert to int to increment easily */
3011 vtop->type.t = addr_type;
3012 gaddrof();
3013 vpushi(load_size);
3014 gen_op('+');
3015 vtop->r |= VT_LVAL;
3016 vswap();
3017 vtop[-1].type.t = load_type;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop->r2, vtop - 1);
3020 } else {
3021 store(r, vtop - 1);
3024 vswap();
3025 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop->r |= delayed_cast;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC void inc(int post, int c)
3033 test_lvalue();
3034 vdup(); /* save lvalue */
3035 if (post) {
3036 if (!nocode_wanted)
3037 gv_dup(); /* duplicate value */
3038 else
3039 vdup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* If I is >= 1 and a power of two, returns log2(i)+1.
3066 If I is 0 returns 0. */
3067 static int exact_log2p1(int i)
3069 int ret;
3070 if (!i)
3071 return 0;
3072 for (ret = 1; i >= 1 << 8; ret += 8)
3073 i >>= 8;
3074 if (i >= 1 << 4)
3075 ret += 4, i >>= 4;
3076 if (i >= 1 << 2)
3077 ret += 2, i >>= 2;
3078 if (i >= 1 << 1)
3079 ret++;
3080 return ret;
3083 /* Parse GNUC __attribute__ extension. Currently, the following
3084 extensions are recognized:
3085 - aligned(n) : set data/function alignment.
3086 - packed : force data alignment to 1
3087 - section(x) : generate data/code in this section.
3088 - unused : currently ignored, but may be used someday.
3089 - regparm(n) : pass function parameters in registers (i386 only)
3091 static void parse_attribute(AttributeDef *ad)
3093 int t, n;
3094 CString astr;
3096 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3097 next();
3098 skip('(');
3099 skip('(');
3100 while (tok != ')') {
3101 if (tok < TOK_IDENT)
3102 expect("attribute name");
3103 t = tok;
3104 next();
3105 switch(t) {
3106 case TOK_SECTION1:
3107 case TOK_SECTION2:
3108 skip('(');
3109 parse_mult_str(&astr, "section name");
3110 ad->section = find_section(tcc_state, (char *)astr.data);
3111 skip(')');
3112 cstr_free(&astr);
3113 break;
3114 case TOK_ALIAS1:
3115 case TOK_ALIAS2:
3116 skip('(');
3117 parse_mult_str(&astr, "alias(\"target\")");
3118 ad->alias_target = /* save string as token, for later */
3119 tok_alloc((char*)astr.data, astr.size-1)->tok;
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_VISIBILITY1:
3124 case TOK_VISIBILITY2:
3125 skip('(');
3126 parse_mult_str(&astr,
3127 "visibility(\"default|hidden|internal|protected\")");
3128 if (!strcmp (astr.data, "default"))
3129 ad->a.visibility = STV_DEFAULT;
3130 else if (!strcmp (astr.data, "hidden"))
3131 ad->a.visibility = STV_HIDDEN;
3132 else if (!strcmp (astr.data, "internal"))
3133 ad->a.visibility = STV_INTERNAL;
3134 else if (!strcmp (astr.data, "protected"))
3135 ad->a.visibility = STV_PROTECTED;
3136 else
3137 expect("visibility(\"default|hidden|internal|protected\")");
3138 skip(')');
3139 cstr_free(&astr);
3140 break;
3141 case TOK_ALIGNED1:
3142 case TOK_ALIGNED2:
3143 if (tok == '(') {
3144 next();
3145 n = expr_const();
3146 if (n <= 0 || (n & (n - 1)) != 0)
3147 tcc_error("alignment must be a positive power of two");
3148 skip(')');
3149 } else {
3150 n = MAX_ALIGN;
3152 ad->a.aligned = exact_log2p1(n);
3153 if (n != 1 << (ad->a.aligned - 1))
3154 tcc_error("alignment of %d is larger than implemented", n);
3155 break;
3156 case TOK_PACKED1:
3157 case TOK_PACKED2:
3158 ad->a.packed = 1;
3159 break;
3160 case TOK_WEAK1:
3161 case TOK_WEAK2:
3162 ad->a.weak = 1;
3163 break;
3164 case TOK_UNUSED1:
3165 case TOK_UNUSED2:
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3168 break;
3169 case TOK_NORETURN1:
3170 case TOK_NORETURN2:
3171 /* currently, no need to handle it because tcc does not
3172 track unused objects */
3173 break;
3174 case TOK_CDECL1:
3175 case TOK_CDECL2:
3176 case TOK_CDECL3:
3177 ad->a.func_call = FUNC_CDECL;
3178 break;
3179 case TOK_STDCALL1:
3180 case TOK_STDCALL2:
3181 case TOK_STDCALL3:
3182 ad->a.func_call = FUNC_STDCALL;
3183 break;
3184 #ifdef TCC_TARGET_I386
3185 case TOK_REGPARM1:
3186 case TOK_REGPARM2:
3187 skip('(');
3188 n = expr_const();
3189 if (n > 3)
3190 n = 3;
3191 else if (n < 0)
3192 n = 0;
3193 if (n > 0)
3194 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3195 skip(')');
3196 break;
3197 case TOK_FASTCALL1:
3198 case TOK_FASTCALL2:
3199 case TOK_FASTCALL3:
3200 ad->a.func_call = FUNC_FASTCALLW;
3201 break;
3202 #endif
3203 case TOK_MODE:
3204 skip('(');
3205 switch(tok) {
3206 case TOK_MODE_DI:
3207 ad->a.mode = VT_LLONG + 1;
3208 break;
3209 case TOK_MODE_QI:
3210 ad->a.mode = VT_BYTE + 1;
3211 break;
3212 case TOK_MODE_HI:
3213 ad->a.mode = VT_SHORT + 1;
3214 break;
3215 case TOK_MODE_SI:
3216 case TOK_MODE_word:
3217 ad->a.mode = VT_INT + 1;
3218 break;
3219 default:
3220 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3221 break;
3223 next();
3224 skip(')');
3225 break;
3226 case TOK_DLLEXPORT:
3227 ad->a.func_export = 1;
3228 break;
3229 case TOK_DLLIMPORT:
3230 ad->a.func_import = 1;
3231 break;
3232 default:
3233 if (tcc_state->warn_unsupported)
3234 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3235 /* skip parameters */
3236 if (tok == '(') {
3237 int parenthesis = 0;
3238 do {
3239 if (tok == '(')
3240 parenthesis++;
3241 else if (tok == ')')
3242 parenthesis--;
3243 next();
3244 } while (parenthesis && tok != -1);
3246 break;
3248 if (tok != ',')
3249 break;
3250 next();
3252 skip(')');
3253 skip(')');
3257 static Sym * find_field (CType *type, int v)
3259 Sym *s = type->ref;
3260 v |= SYM_FIELD;
3261 while ((s = s->next) != NULL) {
3262 if ((s->v & SYM_FIELD) &&
3263 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3264 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3265 Sym *ret = find_field (&s->type, v);
3266 if (ret)
3267 return ret;
3269 if (s->v == v)
3270 break;
3272 return s;
3275 static void struct_add_offset (Sym *s, int offset)
3277 while ((s = s->next) != NULL) {
3278 if ((s->v & SYM_FIELD) &&
3279 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3280 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3281 struct_add_offset(s->type.ref, offset);
3282 } else
3283 s->c += offset;
3287 static void struct_layout(CType *type, AttributeDef *ad)
3289 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3290 int pcc = !tcc_state->ms_bitfields;
3291 Sym *f;
3292 if (ad->a.aligned)
3293 maxalign = 1 << (ad->a.aligned - 1);
3294 else
3295 maxalign = 1;
3296 offset = 0;
3297 c = 0;
3298 bit_pos = 0;
3299 prevbt = VT_STRUCT; /* make it never match */
3300 prev_bit_size = 0;
3301 for (f = type->ref->next; f; f = f->next) {
3302 int typealign, bit_size;
3303 int size = type_size(&f->type, &typealign);
3304 if (f->type.t & VT_BITFIELD)
3305 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3306 else
3307 bit_size = -1;
3308 if (bit_size == 0 && pcc) {
3309 /* Zero-width bit-fields in PCC mode aren't affected
3310 by any packing (attribute or pragma). */
3311 align = typealign;
3312 } else if (f->r > 1) {
3313 align = f->r;
3314 } else if (ad->a.packed || f->r == 1) {
3315 align = 1;
3316 /* Packed fields or packed records don't let the base type
3317 influence the records type alignment. */
3318 typealign = 1;
3319 } else {
3320 align = typealign;
3322 if (type->ref->type.t != TOK_STRUCT) {
3323 if (pcc && bit_size >= 0)
3324 size = (bit_size + 7) >> 3;
3325 /* Bit position is already zero from our caller. */
3326 offset = 0;
3327 if (size > c)
3328 c = size;
3329 } else if (bit_size < 0) {
3330 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3331 prevbt = VT_STRUCT;
3332 prev_bit_size = 0;
3333 c = (c + addbytes + align - 1) & -align;
3334 offset = c;
3335 if (size > 0)
3336 c += size;
3337 bit_pos = 0;
3338 } else {
3339 /* A bit-field. Layout is more complicated. There are two
3340 options TCC implements: PCC compatible and MS compatible
3341 (PCC compatible is what GCC uses for almost all targets).
3342 In PCC layout the overall size of the struct (in c) is
3343 _excluding_ the current run of bit-fields (that is,
3344 there's at least additional bit_pos bits after c). In
3345 MS layout c does include the current run of bit-fields.
3347 This matters for calculating the natural alignment buckets
3348 in PCC mode. */
3350 /* 'align' will be used to influence records alignment,
3351 so it's the max of specified and type alignment, except
3352 in certain cases that depend on the mode. */
3353 if (align < typealign)
3354 align = typealign;
3355 if (pcc) {
3356 /* In PCC layout a non-packed bit-field is placed adjacent
3357 to the preceding bit-fields, except if it would overflow
3358 its container (depending on base type) or it's a zero-width
3359 bit-field. Packed non-zero-width bit-fields always are
3360 placed adjacent. */
3361 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3362 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3363 if (bit_size == 0 ||
3364 (typealign != 1 &&
3365 (ofs2 / (typealign * 8)) > (size/typealign))) {
3366 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3367 bit_pos = 0;
3369 offset = c;
3370 /* In PCC layout named bit-fields influence the alignment
3371 of the containing struct using the base types alignment,
3372 except for packed fields (which here have correct
3373 align/typealign). */
3374 if ((f->v & SYM_FIRST_ANOM))
3375 align = 1;
3376 } else {
3377 bt = f->type.t & VT_BTYPE;
3378 if ((bit_pos + bit_size > size * 8) ||
3379 (bit_size > 0) == (bt != prevbt)) {
3380 c = (c + typealign - 1) & -typealign;
3381 offset = c;
3382 bit_pos = 0;
3383 /* In MS bitfield mode a bit-field run always uses
3384 at least as many bits as the underlying type.
3385 To start a new run it's also required that this
3386 or the last bit-field had non-zero width. */
3387 if (bit_size || prev_bit_size)
3388 c += size;
3390 /* In MS layout the records alignment is normally
3391 influenced by the field, except for a zero-width
3392 field at the start of a run (but by further zero-width
3393 fields it is again). */
3394 if (bit_size == 0 && prevbt != bt)
3395 align = 1;
3396 prevbt = bt;
3397 prev_bit_size = bit_size;
3399 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3400 | (bit_pos << VT_STRUCT_SHIFT);
3401 bit_pos += bit_size;
3402 if (pcc && bit_pos >= size * 8) {
3403 c += size;
3404 bit_pos -= size * 8;
3407 if (align > maxalign)
3408 maxalign = align;
3409 #if 0
3410 printf("set field %s offset=%d c=%d",
3411 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3412 if (f->type.t & VT_BITFIELD) {
3413 printf(" pos=%d size=%d",
3414 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3415 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3417 printf("\n");
3418 #endif
3420 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3421 Sym *ass;
3422 /* An anonymous struct/union. Adjust member offsets
3423 to reflect the real offset of our containing struct.
3424 Also set the offset of this anon member inside
3425 the outer struct to be zero. Via this it
3426 works when accessing the field offset directly
3427 (from base object), as well as when recursing
3428 members in initializer handling. */
3429 int v2 = f->type.ref->v;
3430 if (!(v2 & SYM_FIELD) &&
3431 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3432 Sym **pps;
3433 /* This happens only with MS extensions. The
3434 anon member has a named struct type, so it
3435 potentially is shared with other references.
3436 We need to unshare members so we can modify
3437 them. */
3438 ass = f->type.ref;
3439 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3440 &f->type.ref->type, 0,
3441 f->type.ref->c);
3442 pps = &f->type.ref->next;
3443 while ((ass = ass->next) != NULL) {
3444 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3445 pps = &((*pps)->next);
3447 *pps = NULL;
3449 struct_add_offset(f->type.ref, offset);
3450 f->c = 0;
3451 } else {
3452 f->c = offset;
3455 f->r = 0;
3457 /* store size and alignment */
3458 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3459 + maxalign - 1) & -maxalign;
3460 type->ref->r = maxalign;
3463 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3464 static void struct_decl(CType *type, AttributeDef *ad, int u)
3466 int a, v, size, align, flexible, alignoverride;
3467 long c;
3468 int bit_size, bsize, bt;
3469 Sym *s, *ss, **ps;
3470 AttributeDef ad1;
3471 CType type1, btype;
3473 a = tok; /* save decl type */
3474 next();
3475 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3476 parse_attribute(ad);
3477 if (tok != '{') {
3478 v = tok;
3479 next();
3480 /* struct already defined ? return it */
3481 if (v < TOK_IDENT)
3482 expect("struct/union/enum name");
3483 s = struct_find(v);
3484 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3485 if (s->type.t != a)
3486 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3487 goto do_decl;
3489 } else {
3490 v = anon_sym++;
3492 /* Record the original enum/struct/union token. */
3493 type1.t = a;
3494 type1.ref = NULL;
3495 /* we put an undefined size for struct/union */
3496 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3497 s->r = 0; /* default alignment is zero as gcc */
3498 /* put struct/union/enum name in type */
3499 do_decl:
3500 type->t = u;
3501 type->ref = s;
3503 if (tok == '{') {
3504 next();
3505 if (s->c != -1)
3506 tcc_error("struct/union/enum already defined");
3507 /* cannot be empty */
3508 c = 0;
3509 /* non empty enums are not allowed */
3510 if (a == TOK_ENUM) {
3511 int seen_neg = 0;
3512 int seen_wide = 0;
3513 for(;;) {
3514 CType *t = &int_type;
3515 v = tok;
3516 if (v < TOK_UIDENT)
3517 expect("identifier");
3518 ss = sym_find(v);
3519 if (ss && !local_stack)
3520 tcc_error("redefinition of enumerator '%s'",
3521 get_tok_str(v, NULL));
3522 next();
3523 if (tok == '=') {
3524 next();
3525 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3526 c = expr_const64();
3527 #else
3528 /* We really want to support long long enums
3529 on i386 as well, but the Sym structure only
3530 holds a 'long' for associated constants,
3531 and enlarging it would bump its size (no
3532 available padding). So punt for now. */
3533 c = expr_const();
3534 #endif
3536 if (c < 0)
3537 seen_neg = 1;
3538 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3539 seen_wide = 1, t = &size_type;
3540 /* enum symbols have static storage */
3541 ss = sym_push(v, t, VT_CONST, c);
3542 ss->type.t |= VT_STATIC;
3543 if (tok != ',')
3544 break;
3545 next();
3546 c++;
3547 /* NOTE: we accept a trailing comma */
3548 if (tok == '}')
3549 break;
3551 if (!seen_neg)
3552 s->a.unsigned_enum = 1;
3553 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3554 skip('}');
3555 } else {
3556 ps = &s->next;
3557 flexible = 0;
3558 while (tok != '}') {
3559 if (!parse_btype(&btype, &ad1)) {
3560 skip(';');
3561 continue;
3563 while (1) {
3564 if (flexible)
3565 tcc_error("flexible array member '%s' not at the end of struct",
3566 get_tok_str(v, NULL));
3567 bit_size = -1;
3568 v = 0;
3569 type1 = btype;
3570 if (tok != ':') {
3571 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3572 if (v == 0) {
3573 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3574 expect("identifier");
3575 else {
3576 int v = btype.ref->v;
3577 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3578 if (tcc_state->ms_extensions == 0)
3579 expect("identifier");
3583 if (type_size(&type1, &align) < 0) {
3584 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3585 flexible = 1;
3586 else
3587 tcc_error("field '%s' has incomplete type",
3588 get_tok_str(v, NULL));
3590 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3591 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3592 tcc_error("invalid type for '%s'",
3593 get_tok_str(v, NULL));
3595 if (tok == ':') {
3596 next();
3597 bit_size = expr_const();
3598 /* XXX: handle v = 0 case for messages */
3599 if (bit_size < 0)
3600 tcc_error("negative width in bit-field '%s'",
3601 get_tok_str(v, NULL));
3602 if (v && bit_size == 0)
3603 tcc_error("zero width for bit-field '%s'",
3604 get_tok_str(v, NULL));
3605 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3606 parse_attribute(&ad1);
3608 size = type_size(&type1, &align);
3609 /* Only remember non-default alignment. */
3610 alignoverride = 0;
3611 if (ad1.a.aligned) {
3612 int speca = 1 << (ad1.a.aligned - 1);
3613 if (align < speca)
3614 alignoverride = speca;
3615 } else if (ad1.a.packed || ad->a.packed) {
3616 alignoverride = 1;
3617 } else if (*tcc_state->pack_stack_ptr) {
3618 if (align > *tcc_state->pack_stack_ptr)
3619 alignoverride = *tcc_state->pack_stack_ptr;
3621 if (bit_size >= 0) {
3622 bt = type1.t & VT_BTYPE;
3623 if (bt != VT_INT &&
3624 bt != VT_BYTE &&
3625 bt != VT_SHORT &&
3626 bt != VT_BOOL &&
3627 bt != VT_ENUM &&
3628 bt != VT_LLONG)
3629 tcc_error("bitfields must have scalar type");
3630 bsize = size * 8;
3631 if (bit_size > bsize) {
3632 tcc_error("width of '%s' exceeds its type",
3633 get_tok_str(v, NULL));
3634 } else if (bit_size == bsize) {
3635 /* no need for bit fields */
3637 } else {
3638 type1.t |= VT_BITFIELD |
3639 (0 << VT_STRUCT_SHIFT) |
3640 (bit_size << (VT_STRUCT_SHIFT + 6));
3643 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3644 /* Remember we've seen a real field to check
3645 for placement of flexible array member. */
3646 c = 1;
3648 /* If member is a struct or bit-field, enforce
3649 placing into the struct (as anonymous). */
3650 if (v == 0 &&
3651 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3652 bit_size >= 0)) {
3653 v = anon_sym++;
3655 if (v) {
3656 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3657 *ps = ss;
3658 ps = &ss->next;
3660 if (tok == ';' || tok == TOK_EOF)
3661 break;
3662 skip(',');
3664 skip(';');
3666 skip('}');
3667 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3668 parse_attribute(ad);
3669 struct_layout(type, ad);
3674 /* return 1 if basic type is a type size (short, long, long long) */
3675 ST_FUNC int is_btype_size(int bt)
3677 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3680 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3681 are added to the element type, copied because it could be a typedef. */
3682 static void parse_btype_qualify(CType *type, int qualifiers)
3684 while (type->t & VT_ARRAY) {
3685 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3686 type = &type->ref->type;
3688 type->t |= qualifiers;
3691 /* return 0 if no type declaration. otherwise, return the basic type
3692 and skip it.
3694 static int parse_btype(CType *type, AttributeDef *ad)
3696 int t, u, bt_size, complete, type_found, typespec_found;
3697 Sym *s;
3698 CType type1;
3700 memset(ad, 0, sizeof(AttributeDef));
3701 complete = 0;
3702 type_found = 0;
3703 typespec_found = 0;
3704 t = 0;
3705 while(1) {
3706 switch(tok) {
3707 case TOK_EXTENSION:
3708 /* currently, we really ignore extension */
3709 next();
3710 continue;
3712 /* basic types */
3713 case TOK_CHAR:
3714 u = VT_BYTE;
3715 basic_type:
3716 next();
3717 basic_type1:
3718 if (complete)
3719 tcc_error("too many basic types");
3720 t |= u;
3721 bt_size = is_btype_size (u & VT_BTYPE);
3722 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3723 complete = 1;
3724 typespec_found = 1;
3725 break;
3726 case TOK_VOID:
3727 u = VT_VOID;
3728 goto basic_type;
3729 case TOK_SHORT:
3730 u = VT_SHORT;
3731 goto basic_type;
3732 case TOK_INT:
3733 u = VT_INT;
3734 goto basic_type;
3735 case TOK_LONG:
3736 next();
3737 if ((t & VT_BTYPE) == VT_DOUBLE) {
3738 #ifndef TCC_TARGET_PE
3739 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3740 #endif
3741 } else if ((t & VT_BTYPE) == VT_LONG) {
3742 t = (t & ~VT_BTYPE) | VT_LLONG;
3743 } else {
3744 u = VT_LONG;
3745 goto basic_type1;
3747 break;
3748 #ifdef TCC_TARGET_ARM64
3749 case TOK_UINT128:
3750 /* GCC's __uint128_t appears in some Linux header files. Make it a
3751 synonym for long double to get the size and alignment right. */
3752 u = VT_LDOUBLE;
3753 goto basic_type;
3754 #endif
3755 case TOK_BOOL:
3756 u = VT_BOOL;
3757 goto basic_type;
3758 case TOK_FLOAT:
3759 u = VT_FLOAT;
3760 goto basic_type;
3761 case TOK_DOUBLE:
3762 next();
3763 if ((t & VT_BTYPE) == VT_LONG) {
3764 #ifdef TCC_TARGET_PE
3765 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3766 #else
3767 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3768 #endif
3769 } else {
3770 u = VT_DOUBLE;
3771 goto basic_type1;
3773 break;
3774 case TOK_ENUM:
3775 struct_decl(&type1, ad, VT_ENUM);
3776 basic_type2:
3777 u = type1.t;
3778 type->ref = type1.ref;
3779 goto basic_type1;
3780 case TOK_STRUCT:
3781 case TOK_UNION:
3782 struct_decl(&type1, ad, VT_STRUCT);
3783 goto basic_type2;
3785 /* type modifiers */
3786 case TOK_CONST1:
3787 case TOK_CONST2:
3788 case TOK_CONST3:
3789 type->t = t;
3790 parse_btype_qualify(type, VT_CONSTANT);
3791 t = type->t;
3792 next();
3793 break;
3794 case TOK_VOLATILE1:
3795 case TOK_VOLATILE2:
3796 case TOK_VOLATILE3:
3797 type->t = t;
3798 parse_btype_qualify(type, VT_VOLATILE);
3799 t = type->t;
3800 next();
3801 break;
3802 case TOK_SIGNED1:
3803 case TOK_SIGNED2:
3804 case TOK_SIGNED3:
3805 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3806 tcc_error("signed and unsigned modifier");
3807 typespec_found = 1;
3808 t |= VT_DEFSIGN;
3809 next();
3810 break;
3811 case TOK_REGISTER:
3812 case TOK_AUTO:
3813 case TOK_RESTRICT1:
3814 case TOK_RESTRICT2:
3815 case TOK_RESTRICT3:
3816 next();
3817 break;
3818 case TOK_UNSIGNED:
3819 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3820 tcc_error("signed and unsigned modifier");
3821 t |= VT_DEFSIGN | VT_UNSIGNED;
3822 next();
3823 typespec_found = 1;
3824 break;
3826 /* storage */
3827 case TOK_EXTERN:
3828 t |= VT_EXTERN;
3829 next();
3830 break;
3831 case TOK_STATIC:
3832 t |= VT_STATIC;
3833 next();
3834 break;
3835 case TOK_TYPEDEF:
3836 t |= VT_TYPEDEF;
3837 next();
3838 break;
3839 case TOK_INLINE1:
3840 case TOK_INLINE2:
3841 case TOK_INLINE3:
3842 t |= VT_INLINE;
3843 next();
3844 break;
3846 /* GNUC attribute */
3847 case TOK_ATTRIBUTE1:
3848 case TOK_ATTRIBUTE2:
3849 parse_attribute(ad);
3850 if (ad->a.mode) {
3851 u = ad->a.mode -1;
3852 t = (t & ~VT_BTYPE) | u;
3854 break;
3855 /* GNUC typeof */
3856 case TOK_TYPEOF1:
3857 case TOK_TYPEOF2:
3858 case TOK_TYPEOF3:
3859 next();
3860 parse_expr_type(&type1);
3861 /* remove all storage modifiers except typedef */
3862 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3863 goto basic_type2;
3864 default:
3865 if (typespec_found)
3866 goto the_end;
3867 s = sym_find(tok);
3868 if (!s || !(s->type.t & VT_TYPEDEF))
3869 goto the_end;
3871 type->t = ((s->type.t & ~VT_TYPEDEF) |
3872 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3873 type->ref = s->type.ref;
3874 if (t & (VT_CONSTANT | VT_VOLATILE))
3875 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3876 t = type->t;
3878 if (s->r) {
3879 /* get attributes from typedef */
3880 if (0 == ad->a.aligned)
3881 ad->a.aligned = s->a.aligned;
3882 if (0 == ad->a.func_call)
3883 ad->a.func_call = s->a.func_call;
3884 ad->a.packed |= s->a.packed;
3886 next();
3887 typespec_found = 1;
3888 break;
3890 type_found = 1;
3892 the_end:
3893 if (tcc_state->char_is_unsigned) {
3894 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3895 t |= VT_UNSIGNED;
3898 /* long is never used as type */
3899 if ((t & VT_BTYPE) == VT_LONG)
3900 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3901 defined TCC_TARGET_PE
3902 t = (t & ~VT_BTYPE) | VT_INT;
3903 #else
3904 t = (t & ~VT_BTYPE) | VT_LLONG;
3905 #endif
3906 type->t = t;
3907 return type_found;
3910 /* convert a function parameter type (array to pointer and function to
3911 function pointer) */
3912 static inline void convert_parameter_type(CType *pt)
3914 /* remove const and volatile qualifiers (XXX: const could be used
3915 to indicate a const function parameter */
3916 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3917 /* array must be transformed to pointer according to ANSI C */
3918 pt->t &= ~VT_ARRAY;
3919 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3920 mk_pointer(pt);
3924 ST_FUNC void parse_asm_str(CString *astr)
3926 skip('(');
3927 parse_mult_str(astr, "string constant");
3930 /* Parse an asm label and return the token */
3931 static int asm_label_instr(void)
3933 int v;
3934 CString astr;
3936 next();
3937 parse_asm_str(&astr);
3938 skip(')');
3939 #ifdef ASM_DEBUG
3940 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3941 #endif
3942 v = tok_alloc(astr.data, astr.size - 1)->tok;
3943 cstr_free(&astr);
3944 return v;
3947 static void post_type(CType *type, AttributeDef *ad, int storage)
3949 int n, l, t1, arg_size, align;
3950 Sym **plast, *s, *first;
3951 AttributeDef ad1;
3952 CType pt;
3954 if (tok == '(') {
3955 /* function declaration */
3956 next();
3957 l = 0;
3958 first = NULL;
3959 plast = &first;
3960 arg_size = 0;
3961 if (tok != ')') {
3962 for(;;) {
3963 /* read param name and compute offset */
3964 if (l != FUNC_OLD) {
3965 if (!parse_btype(&pt, &ad1)) {
3966 if (l) {
3967 tcc_error("invalid type");
3968 } else {
3969 l = FUNC_OLD;
3970 goto old_proto;
3973 l = FUNC_NEW;
3974 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3975 break;
3976 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3977 if ((pt.t & VT_BTYPE) == VT_VOID)
3978 tcc_error("parameter declared as void");
3979 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3980 } else {
3981 old_proto:
3982 n = tok;
3983 if (n < TOK_UIDENT)
3984 expect("identifier");
3985 pt.t = VT_INT;
3986 next();
3988 convert_parameter_type(&pt);
3989 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3990 *plast = s;
3991 plast = &s->next;
3992 if (tok == ')')
3993 break;
3994 skip(',');
3995 if (l == FUNC_NEW && tok == TOK_DOTS) {
3996 l = FUNC_ELLIPSIS;
3997 next();
3998 break;
4002 /* if no parameters, then old type prototype */
4003 if (l == 0)
4004 l = FUNC_OLD;
4005 skip(')');
4006 /* NOTE: const is ignored in returned type as it has a special
4007 meaning in gcc / C++ */
4008 type->t &= ~VT_CONSTANT;
4009 /* some ancient pre-K&R C allows a function to return an array
4010 and the array brackets to be put after the arguments, such
4011 that "int c()[]" means something like "int[] c()" */
4012 if (tok == '[') {
4013 next();
4014 skip(']'); /* only handle simple "[]" */
4015 type->t |= VT_PTR;
4017 /* we push a anonymous symbol which will contain the function prototype */
4018 ad->a.func_args = arg_size;
4019 s = sym_push(SYM_FIELD, type, 0, l);
4020 s->a = ad->a;
4021 s->next = first;
4022 type->t = VT_FUNC;
4023 type->ref = s;
4024 } else if (tok == '[') {
4025 int saved_nocode_wanted = nocode_wanted;
4026 /* array definition */
4027 next();
4028 if (tok == TOK_RESTRICT1)
4029 next();
4030 n = -1;
4031 t1 = 0;
4032 if (tok != ']') {
4033 if (!local_stack || (storage & VT_STATIC))
4034 vpushi(expr_const());
4035 else {
4036 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4037 length must always be evaluated, even under nocode_wanted,
4038 so that its size slot is initialized (e.g. under sizeof
4039 or typeof). */
4040 nocode_wanted = 0;
4041 gexpr();
4043 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4044 n = vtop->c.i;
4045 if (n < 0)
4046 tcc_error("invalid array size");
4047 } else {
4048 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4049 tcc_error("size of variable length array should be an integer");
4050 t1 = VT_VLA;
4053 skip(']');
4054 /* parse next post type */
4055 post_type(type, ad, storage);
4056 if (type->t == VT_FUNC)
4057 tcc_error("declaration of an array of functions");
4058 t1 |= type->t & VT_VLA;
4060 if (t1 & VT_VLA) {
4061 loc -= type_size(&int_type, &align);
4062 loc &= -align;
4063 n = loc;
4065 vla_runtime_type_size(type, &align);
4066 gen_op('*');
4067 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4068 vswap();
4069 vstore();
4071 if (n != -1)
4072 vpop();
4073 nocode_wanted = saved_nocode_wanted;
4075 /* we push an anonymous symbol which will contain the array
4076 element type */
4077 s = sym_push(SYM_FIELD, type, 0, n);
4078 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4079 type->ref = s;
4083 /* Parse a type declaration (except basic type), and return the type
4084 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4085 expected. 'type' should contain the basic type. 'ad' is the
4086 attribute definition of the basic type. It can be modified by
4087 type_decl().
4089 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4091 Sym *s;
4092 CType type1, *type2;
4093 int qualifiers, storage;
4095 while (tok == '*') {
4096 qualifiers = 0;
4097 redo:
4098 next();
4099 switch(tok) {
4100 case TOK_CONST1:
4101 case TOK_CONST2:
4102 case TOK_CONST3:
4103 qualifiers |= VT_CONSTANT;
4104 goto redo;
4105 case TOK_VOLATILE1:
4106 case TOK_VOLATILE2:
4107 case TOK_VOLATILE3:
4108 qualifiers |= VT_VOLATILE;
4109 goto redo;
4110 case TOK_RESTRICT1:
4111 case TOK_RESTRICT2:
4112 case TOK_RESTRICT3:
4113 goto redo;
4114 /* XXX: clarify attribute handling */
4115 case TOK_ATTRIBUTE1:
4116 case TOK_ATTRIBUTE2:
4117 parse_attribute(ad);
4118 break;
4120 mk_pointer(type);
4121 type->t |= qualifiers;
4124 /* recursive type */
4125 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4126 type1.t = 0; /* XXX: same as int */
4127 if (tok == '(') {
4128 next();
4129 /* XXX: this is not correct to modify 'ad' at this point, but
4130 the syntax is not clear */
4131 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4132 parse_attribute(ad);
4133 type_decl(&type1, ad, v, td);
4134 skip(')');
4135 } else {
4136 /* type identifier */
4137 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4138 *v = tok;
4139 next();
4140 } else {
4141 if (!(td & TYPE_ABSTRACT))
4142 expect("identifier");
4143 *v = 0;
4146 storage = type->t & VT_STORAGE;
4147 type->t &= ~VT_STORAGE;
4148 post_type(type, ad, storage);
4149 type->t |= storage;
4150 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4151 parse_attribute(ad);
4153 if (!type1.t)
4154 return;
4155 /* append type at the end of type1 */
4156 type2 = &type1;
4157 for(;;) {
4158 s = type2->ref;
4159 type2 = &s->type;
4160 if (!type2->t) {
4161 *type2 = *type;
4162 break;
4165 *type = type1;
4168 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4169 ST_FUNC int lvalue_type(int t)
4171 int bt, r;
4172 r = VT_LVAL;
4173 bt = t & VT_BTYPE;
4174 if (bt == VT_BYTE || bt == VT_BOOL)
4175 r |= VT_LVAL_BYTE;
4176 else if (bt == VT_SHORT)
4177 r |= VT_LVAL_SHORT;
4178 else
4179 return r;
4180 if (t & VT_UNSIGNED)
4181 r |= VT_LVAL_UNSIGNED;
4182 return r;
4185 /* indirection with full error checking and bound check */
4186 ST_FUNC void indir(void)
4188 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4189 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4190 return;
4191 expect("pointer");
4193 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4194 gv(RC_INT);
4195 vtop->type = *pointed_type(&vtop->type);
4196 /* Arrays and functions are never lvalues */
4197 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4198 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4199 vtop->r |= lvalue_type(vtop->type.t);
4200 /* if bound checking, the referenced pointer must be checked */
4201 #ifdef CONFIG_TCC_BCHECK
4202 if (tcc_state->do_bounds_check)
4203 vtop->r |= VT_MUSTBOUND;
4204 #endif
4208 /* pass a parameter to a function and do type checking and casting */
4209 static void gfunc_param_typed(Sym *func, Sym *arg)
4211 int func_type;
4212 CType type;
4214 func_type = func->c;
4215 if (func_type == FUNC_OLD ||
4216 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4217 /* default casting : only need to convert float to double */
4218 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4219 type.t = VT_DOUBLE;
4220 gen_cast(&type);
4221 } else if (vtop->type.t & VT_BITFIELD) {
4222 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4223 type.ref = vtop->type.ref;
4224 gen_cast(&type);
4226 } else if (arg == NULL) {
4227 tcc_error("too many arguments to function");
4228 } else {
4229 type = arg->type;
4230 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4231 gen_assign_cast(&type);
4235 /* parse an expression of the form '(type)' or '(expr)' and return its
4236 type */
4237 static void parse_expr_type(CType *type)
4239 int n;
4240 AttributeDef ad;
4242 skip('(');
4243 if (parse_btype(type, &ad)) {
4244 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4245 } else {
4246 expr_type(type);
4248 skip(')');
4251 static void parse_type(CType *type)
4253 AttributeDef ad;
4254 int n;
4256 if (!parse_btype(type, &ad)) {
4257 expect("type");
4259 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4262 static void vpush_tokc(int t)
4264 CType type;
4265 type.t = t;
4266 type.ref = 0;
4267 vsetc(&type, VT_CONST, &tokc);
4270 ST_FUNC void unary(void)
4272 int n, t, align, size, r, sizeof_caller;
4273 CType type;
4274 Sym *s;
4275 AttributeDef ad;
4277 sizeof_caller = in_sizeof;
4278 in_sizeof = 0;
4279 /* XXX: GCC 2.95.3 does not generate a table although it should be
4280 better here */
4281 tok_next:
4282 switch(tok) {
4283 case TOK_EXTENSION:
4284 next();
4285 goto tok_next;
4286 case TOK_CINT:
4287 case TOK_CCHAR:
4288 case TOK_LCHAR:
4289 vpushi(tokc.i);
4290 next();
4291 break;
4292 case TOK_CUINT:
4293 vpush_tokc(VT_INT | VT_UNSIGNED);
4294 next();
4295 break;
4296 case TOK_CLLONG:
4297 vpush_tokc(VT_LLONG);
4298 next();
4299 break;
4300 case TOK_CULLONG:
4301 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4302 next();
4303 break;
4304 case TOK_CFLOAT:
4305 vpush_tokc(VT_FLOAT);
4306 next();
4307 break;
4308 case TOK_CDOUBLE:
4309 vpush_tokc(VT_DOUBLE);
4310 next();
4311 break;
4312 case TOK_CLDOUBLE:
4313 vpush_tokc(VT_LDOUBLE);
4314 next();
4315 break;
4316 case TOK___FUNCTION__:
4317 if (!gnu_ext)
4318 goto tok_identifier;
4319 /* fall thru */
4320 case TOK___FUNC__:
4322 void *ptr;
4323 int len;
4324 /* special function name identifier */
4325 len = strlen(funcname) + 1;
4326 /* generate char[len] type */
4327 type.t = VT_BYTE;
4328 mk_pointer(&type);
4329 type.t |= VT_ARRAY;
4330 type.ref->c = len;
4331 vpush_ref(&type, data_section, data_section->data_offset, len);
4332 ptr = section_ptr_add(data_section, len);
4333 memcpy(ptr, funcname, len);
4334 next();
4336 break;
4337 case TOK_LSTR:
4338 #ifdef TCC_TARGET_PE
4339 t = VT_SHORT | VT_UNSIGNED;
4340 #else
4341 t = VT_INT;
4342 #endif
4343 goto str_init;
4344 case TOK_STR:
4345 /* string parsing */
4346 t = VT_BYTE;
4347 str_init:
4348 if (tcc_state->warn_write_strings)
4349 t |= VT_CONSTANT;
4350 type.t = t;
4351 mk_pointer(&type);
4352 type.t |= VT_ARRAY;
4353 memset(&ad, 0, sizeof(AttributeDef));
4354 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4355 break;
4356 case '(':
4357 next();
4358 /* cast ? */
4359 if (parse_btype(&type, &ad)) {
4360 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4361 skip(')');
4362 /* check ISOC99 compound literal */
4363 if (tok == '{') {
4364 /* data is allocated locally by default */
4365 if (global_expr)
4366 r = VT_CONST;
4367 else
4368 r = VT_LOCAL;
4369 /* all except arrays are lvalues */
4370 if (!(type.t & VT_ARRAY))
4371 r |= lvalue_type(type.t);
4372 memset(&ad, 0, sizeof(AttributeDef));
4373 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4374 } else {
4375 if (sizeof_caller) {
4376 vpush(&type);
4377 return;
4379 unary();
4380 gen_cast(&type);
4382 } else if (tok == '{') {
4383 if (const_wanted)
4384 tcc_error("expected constant");
4385 /* save all registers */
4386 if (!nocode_wanted)
4387 save_regs(0);
4388 /* statement expression : we do not accept break/continue
4389 inside as GCC does */
4390 block(NULL, NULL, 1);
4391 skip(')');
4392 } else {
4393 gexpr();
4394 skip(')');
4396 break;
4397 case '*':
4398 next();
4399 unary();
4400 indir();
4401 break;
4402 case '&':
4403 next();
4404 unary();
4405 /* functions names must be treated as function pointers,
4406 except for unary '&' and sizeof. Since we consider that
4407 functions are not lvalues, we only have to handle it
4408 there and in function calls. */
4409 /* arrays can also be used although they are not lvalues */
4410 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4411 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4412 test_lvalue();
4413 mk_pointer(&vtop->type);
4414 gaddrof();
4415 break;
4416 case '!':
4417 next();
4418 unary();
4419 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4420 CType boolean;
4421 boolean.t = VT_BOOL;
4422 gen_cast(&boolean);
4423 vtop->c.i = !vtop->c.i;
4424 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4425 vtop->c.i ^= 1;
4426 else {
4427 save_regs(1);
4428 vseti(VT_JMP, gvtst(1, 0));
4430 break;
4431 case '~':
4432 next();
4433 unary();
4434 vpushi(-1);
4435 gen_op('^');
4436 break;
4437 case '+':
4438 next();
4439 unary();
4440 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4441 tcc_error("pointer not accepted for unary plus");
4442 /* In order to force cast, we add zero, except for floating point
4443 where we really need an noop (otherwise -0.0 will be transformed
4444 into +0.0). */
4445 if (!is_float(vtop->type.t)) {
4446 vpushi(0);
4447 gen_op('+');
4449 break;
4450 case TOK_SIZEOF:
4451 case TOK_ALIGNOF1:
4452 case TOK_ALIGNOF2:
4453 t = tok;
4454 next();
4455 in_sizeof++;
4456 unary_type(&type); // Perform a in_sizeof = 0;
4457 size = type_size(&type, &align);
4458 if (t == TOK_SIZEOF) {
4459 if (!(type.t & VT_VLA)) {
4460 if (size < 0)
4461 tcc_error("sizeof applied to an incomplete type");
4462 vpushs(size);
4463 } else {
4464 vla_runtime_type_size(&type, &align);
4466 } else {
4467 vpushs(align);
4469 vtop->type.t |= VT_UNSIGNED;
4470 break;
4472 case TOK_builtin_expect:
4474 /* __builtin_expect is a no-op for now */
4475 int saved_nocode_wanted;
4476 next();
4477 skip('(');
4478 expr_eq();
4479 skip(',');
4480 saved_nocode_wanted = nocode_wanted;
4481 nocode_wanted = 1;
4482 expr_lor_const();
4483 vpop();
4484 nocode_wanted = saved_nocode_wanted;
4485 skip(')');
4487 break;
4488 case TOK_builtin_types_compatible_p:
4490 CType type1, type2;
4491 next();
4492 skip('(');
4493 parse_type(&type1);
4494 skip(',');
4495 parse_type(&type2);
4496 skip(')');
4497 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4498 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4499 vpushi(is_compatible_types(&type1, &type2));
4501 break;
4502 case TOK_builtin_choose_expr:
4504 int saved_nocode_wanted;
4505 int64_t c;
4506 next();
4507 skip('(');
4508 c = expr_const64();
4509 skip(',');
4510 if (!c) {
4511 saved_nocode_wanted = nocode_wanted;
4512 nocode_wanted = 1;
4514 expr_eq();
4515 if (!c) {
4516 vpop();
4517 nocode_wanted = saved_nocode_wanted;
4519 skip(',');
4520 if (c) {
4521 saved_nocode_wanted = nocode_wanted;
4522 nocode_wanted = 1;
4524 expr_eq();
4525 if (c) {
4526 vpop();
4527 nocode_wanted = saved_nocode_wanted;
4529 skip(')');
4531 break;
4532 case TOK_builtin_constant_p:
4534 int saved_nocode_wanted, res;
4535 next();
4536 skip('(');
4537 saved_nocode_wanted = nocode_wanted;
4538 nocode_wanted = 1;
4539 gexpr();
4540 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4541 vpop();
4542 nocode_wanted = saved_nocode_wanted;
4543 skip(')');
4544 vpushi(res);
4546 break;
4547 case TOK_builtin_frame_address:
4548 case TOK_builtin_return_address:
4550 int tok1 = tok;
4551 int level;
4552 CType type;
4553 next();
4554 skip('(');
4555 if (tok != TOK_CINT) {
4556 tcc_error("%s only takes positive integers",
4557 tok1 == TOK_builtin_return_address ?
4558 "__builtin_return_address" :
4559 "__builtin_frame_address");
4561 level = (uint32_t)tokc.i;
4562 next();
4563 skip(')');
4564 type.t = VT_VOID;
4565 mk_pointer(&type);
4566 vset(&type, VT_LOCAL, 0); /* local frame */
4567 while (level--) {
4568 mk_pointer(&vtop->type);
4569 indir(); /* -> parent frame */
4571 if (tok1 == TOK_builtin_return_address) {
4572 // assume return address is just above frame pointer on stack
4573 vpushi(PTR_SIZE);
4574 gen_op('+');
4575 mk_pointer(&vtop->type);
4576 indir();
4579 break;
4580 #ifdef TCC_TARGET_X86_64
4581 #ifdef TCC_TARGET_PE
4582 case TOK_builtin_va_start:
4584 next();
4585 skip('(');
4586 expr_eq();
4587 skip(',');
4588 expr_eq();
4589 skip(')');
4590 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4591 tcc_error("__builtin_va_start expects a local variable");
4592 vtop->r &= ~(VT_LVAL | VT_REF);
4593 vtop->type = char_pointer_type;
4594 vtop->c.i += 8;
4595 vstore();
4597 break;
4598 #else
4599 case TOK_builtin_va_arg_types:
4601 CType type;
4602 next();
4603 skip('(');
4604 parse_type(&type);
4605 skip(')');
4606 vpushi(classify_x86_64_va_arg(&type));
4608 break;
4609 #endif
4610 #endif
4612 #ifdef TCC_TARGET_ARM64
4613 case TOK___va_start: {
4614 if (nocode_wanted)
4615 tcc_error("statement in global scope");
4616 next();
4617 skip('(');
4618 expr_eq();
4619 skip(',');
4620 expr_eq();
4621 skip(')');
4622 //xx check types
4623 gen_va_start();
4624 vpushi(0);
4625 vtop->type.t = VT_VOID;
4626 break;
4628 case TOK___va_arg: {
4629 CType type;
4630 if (nocode_wanted)
4631 tcc_error("statement in global scope");
4632 next();
4633 skip('(');
4634 expr_eq();
4635 skip(',');
4636 parse_type(&type);
4637 skip(')');
4638 //xx check types
4639 gen_va_arg(&type);
4640 vtop->type = type;
4641 break;
4643 case TOK___arm64_clear_cache: {
4644 next();
4645 skip('(');
4646 expr_eq();
4647 skip(',');
4648 expr_eq();
4649 skip(')');
4650 gen_clear_cache();
4651 vpushi(0);
4652 vtop->type.t = VT_VOID;
4653 break;
4655 #endif
4656 /* pre operations */
4657 case TOK_INC:
4658 case TOK_DEC:
4659 t = tok;
4660 next();
4661 unary();
4662 inc(0, t);
4663 break;
4664 case '-':
4665 next();
4666 unary();
4667 t = vtop->type.t & VT_BTYPE;
4668 if (is_float(t)) {
4669 /* In IEEE negate(x) isn't subtract(0,x), but rather
4670 subtract(-0, x). */
4671 vpush(&vtop->type);
4672 if (t == VT_FLOAT)
4673 vtop->c.f = -0.0f;
4674 else if (t == VT_DOUBLE)
4675 vtop->c.d = -0.0;
4676 else
4677 vtop->c.ld = -0.0;
4678 } else
4679 vpushi(0);
4680 vswap();
4681 gen_op('-');
4682 break;
4683 case TOK_LAND:
4684 if (!gnu_ext)
4685 goto tok_identifier;
4686 next();
4687 /* allow to take the address of a label */
4688 if (tok < TOK_UIDENT)
4689 expect("label identifier");
4690 s = label_find(tok);
4691 if (!s) {
4692 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4693 } else {
4694 if (s->r == LABEL_DECLARED)
4695 s->r = LABEL_FORWARD;
4697 if (!s->type.t) {
4698 s->type.t = VT_VOID;
4699 mk_pointer(&s->type);
4700 s->type.t |= VT_STATIC;
4702 vpushsym(&s->type, s);
4703 next();
4704 break;
4706 // special qnan , snan and infinity values
4707 case TOK___NAN__:
4708 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4709 next();
4710 break;
4711 case TOK___SNAN__:
4712 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4713 next();
4714 break;
4715 case TOK___INF__:
4716 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4717 next();
4718 break;
4720 default:
4721 tok_identifier:
4722 t = tok;
4723 next();
4724 if (t < TOK_UIDENT)
4725 expect("identifier");
4726 s = sym_find(t);
4727 if (!s) {
4728 const char *name = get_tok_str(t, NULL);
4729 if (tok != '(')
4730 tcc_error("'%s' undeclared", name);
4731 /* for simple function calls, we tolerate undeclared
4732 external reference to int() function */
4733 if (tcc_state->warn_implicit_function_declaration
4734 #ifdef TCC_TARGET_PE
4735 /* people must be warned about using undeclared WINAPI functions
4736 (which usually start with uppercase letter) */
4737 || (name[0] >= 'A' && name[0] <= 'Z')
4738 #endif
4740 tcc_warning("implicit declaration of function '%s'", name);
4741 s = external_global_sym(t, &func_old_type, 0);
4743 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4744 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4745 /* if referencing an inline function, then we generate a
4746 symbol to it if not already done. It will have the
4747 effect to generate code for it at the end of the
4748 compilation unit. Inline function as always
4749 generated in the text section. */
4750 if (!s->c && !nocode_wanted)
4751 put_extern_sym(s, text_section, 0, 0);
4752 r = VT_SYM | VT_CONST;
4753 } else {
4754 r = s->r;
4755 /* A symbol that has a register is a local register variable,
4756 which starts out as VT_LOCAL value. */
4757 if ((r & VT_VALMASK) < VT_CONST)
4758 r = (r & ~VT_VALMASK) | VT_LOCAL;
4760 vset(&s->type, r, s->c);
4761 /* Point to s as backpointer (even without r&VT_SYM).
4762 Will be used by at least the x86 inline asm parser for
4763 regvars. */
4764 vtop->sym = s;
4765 if (vtop->r & VT_SYM) {
4766 vtop->c.i = 0;
4768 break;
4771 /* post operations */
4772 while (1) {
4773 if (tok == TOK_INC || tok == TOK_DEC) {
4774 inc(1, tok);
4775 next();
4776 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4777 int qualifiers;
4778 /* field */
4779 if (tok == TOK_ARROW)
4780 indir();
4781 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4782 test_lvalue();
4783 gaddrof();
4784 /* expect pointer on structure */
4785 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4786 expect("struct or union");
4787 if (tok == TOK_CDOUBLE)
4788 expect("field name");
4789 next();
4790 if (tok == TOK_CINT || tok == TOK_CUINT)
4791 expect("field name");
4792 s = find_field(&vtop->type, tok);
4793 if (!s)
4794 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4795 /* add field offset to pointer */
4796 vtop->type = char_pointer_type; /* change type to 'char *' */
4797 vpushi(s->c);
4798 gen_op('+');
4799 /* change type to field type, and set to lvalue */
4800 vtop->type = s->type;
4801 vtop->type.t |= qualifiers;
4802 /* an array is never an lvalue */
4803 if (!(vtop->type.t & VT_ARRAY)) {
4804 vtop->r |= lvalue_type(vtop->type.t);
4805 #ifdef CONFIG_TCC_BCHECK
4806 /* if bound checking, the referenced pointer must be checked */
4807 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4808 vtop->r |= VT_MUSTBOUND;
4809 #endif
4811 next();
4812 } else if (tok == '[') {
4813 next();
4814 gexpr();
4815 gen_op('+');
4816 indir();
4817 skip(']');
4818 } else if (tok == '(') {
4819 SValue ret;
4820 Sym *sa;
4821 int nb_args, ret_nregs, ret_align, regsize, variadic;
4823 /* function call */
4824 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4825 /* pointer test (no array accepted) */
4826 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4827 vtop->type = *pointed_type(&vtop->type);
4828 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4829 goto error_func;
4830 } else {
4831 error_func:
4832 expect("function pointer");
4834 } else {
4835 vtop->r &= ~VT_LVAL; /* no lvalue */
4837 /* get return type */
4838 s = vtop->type.ref;
4839 next();
4840 sa = s->next; /* first parameter */
4841 nb_args = 0;
4842 ret.r2 = VT_CONST;
4843 /* compute first implicit argument if a structure is returned */
4844 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4845 variadic = (s->c == FUNC_ELLIPSIS);
4846 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4847 &ret_align, &regsize);
4848 if (!ret_nregs) {
4849 /* get some space for the returned structure */
4850 size = type_size(&s->type, &align);
4851 #ifdef TCC_TARGET_ARM64
4852 /* On arm64, a small struct is return in registers.
4853 It is much easier to write it to memory if we know
4854 that we are allowed to write some extra bytes, so
4855 round the allocated space up to a power of 2: */
4856 if (size < 16)
4857 while (size & (size - 1))
4858 size = (size | (size - 1)) + 1;
4859 #endif
4860 loc = (loc - size) & -align;
4861 ret.type = s->type;
4862 ret.r = VT_LOCAL | VT_LVAL;
4863 /* pass it as 'int' to avoid structure arg passing
4864 problems */
4865 vseti(VT_LOCAL, loc);
4866 ret.c = vtop->c;
4867 nb_args++;
4869 } else {
4870 ret_nregs = 1;
4871 ret.type = s->type;
4874 if (ret_nregs) {
4875 /* return in register */
4876 if (is_float(ret.type.t)) {
4877 ret.r = reg_fret(ret.type.t);
4878 #ifdef TCC_TARGET_X86_64
4879 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4880 ret.r2 = REG_QRET;
4881 #endif
4882 } else {
4883 #ifndef TCC_TARGET_ARM64
4884 #ifdef TCC_TARGET_X86_64
4885 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4886 #else
4887 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4888 #endif
4889 ret.r2 = REG_LRET;
4890 #endif
4891 ret.r = REG_IRET;
4893 ret.c.i = 0;
4895 if (tok != ')') {
4896 for(;;) {
4897 expr_eq();
4898 gfunc_param_typed(s, sa);
4899 nb_args++;
4900 if (sa)
4901 sa = sa->next;
4902 if (tok == ')')
4903 break;
4904 skip(',');
4907 if (sa)
4908 tcc_error("too few arguments to function");
4909 skip(')');
4910 if (!nocode_wanted) {
4911 gfunc_call(nb_args);
4912 } else {
4913 vtop -= (nb_args + 1);
4916 /* return value */
4917 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4918 vsetc(&ret.type, r, &ret.c);
4919 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4922 /* handle packed struct return */
4923 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4924 int addr, offset;
4926 size = type_size(&s->type, &align);
4927 /* We're writing whole regs often, make sure there's enough
4928 space. Assume register size is power of 2. */
4929 if (regsize > align)
4930 align = regsize;
4931 loc = (loc - size) & -align;
4932 addr = loc;
4933 offset = 0;
4934 for (;;) {
4935 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4936 vswap();
4937 vstore();
4938 vtop--;
4939 if (--ret_nregs == 0)
4940 break;
4941 offset += regsize;
4943 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4945 } else {
4946 break;
4951 ST_FUNC void expr_prod(void)
4953 int t;
4955 unary();
4956 while (tok == '*' || tok == '/' || tok == '%') {
4957 t = tok;
4958 next();
4959 unary();
4960 gen_op(t);
4964 ST_FUNC void expr_sum(void)
4966 int t;
4968 expr_prod();
4969 while (tok == '+' || tok == '-') {
4970 t = tok;
4971 next();
4972 expr_prod();
4973 gen_op(t);
4977 static void expr_shift(void)
4979 int t;
4981 expr_sum();
4982 while (tok == TOK_SHL || tok == TOK_SAR) {
4983 t = tok;
4984 next();
4985 expr_sum();
4986 gen_op(t);
4990 static void expr_cmp(void)
4992 int t;
4994 expr_shift();
4995 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4996 tok == TOK_ULT || tok == TOK_UGE) {
4997 t = tok;
4998 next();
4999 expr_shift();
5000 gen_op(t);
5004 static void expr_cmpeq(void)
5006 int t;
5008 expr_cmp();
5009 while (tok == TOK_EQ || tok == TOK_NE) {
5010 t = tok;
5011 next();
5012 expr_cmp();
5013 gen_op(t);
5017 static void expr_and(void)
5019 expr_cmpeq();
5020 while (tok == '&') {
5021 next();
5022 expr_cmpeq();
5023 gen_op('&');
5027 static void expr_xor(void)
5029 expr_and();
5030 while (tok == '^') {
5031 next();
5032 expr_and();
5033 gen_op('^');
5037 static void expr_or(void)
5039 expr_xor();
5040 while (tok == '|') {
5041 next();
5042 expr_xor();
5043 gen_op('|');
5047 /* XXX: fix this mess */
5048 static void expr_land_const(void)
5050 expr_or();
5051 while (tok == TOK_LAND) {
5052 next();
5053 expr_or();
5054 gen_op(TOK_LAND);
5057 static void expr_lor_const(void)
5059 expr_land_const();
5060 while (tok == TOK_LOR) {
5061 next();
5062 expr_land_const();
5063 gen_op(TOK_LOR);
5067 static void expr_land(void)
5069 expr_or();
5070 if (tok == TOK_LAND) {
5071 int t = 0;
5072 for(;;) {
5073 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5074 CType ctb;
5075 ctb.t = VT_BOOL;
5076 gen_cast(&ctb);
5077 if (vtop->c.i) {
5078 vpop();
5079 } else {
5080 int saved_nocode_wanted = nocode_wanted;
5081 nocode_wanted = 1;
5082 while (tok == TOK_LAND) {
5083 next();
5084 expr_or();
5085 vpop();
5087 if (t)
5088 gsym(t);
5089 nocode_wanted = saved_nocode_wanted;
5090 gen_cast(&int_type);
5091 break;
5093 } else {
5094 if (!t)
5095 save_regs(1);
5096 t = gvtst(1, t);
5098 if (tok != TOK_LAND) {
5099 if (t)
5100 vseti(VT_JMPI, t);
5101 else
5102 vpushi(1);
5103 break;
5105 next();
5106 expr_or();
5111 static void expr_lor(void)
5113 expr_land();
5114 if (tok == TOK_LOR) {
5115 int t = 0;
5116 for(;;) {
5117 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5118 CType ctb;
5119 ctb.t = VT_BOOL;
5120 gen_cast(&ctb);
5121 if (!vtop->c.i) {
5122 vpop();
5123 } else {
5124 int saved_nocode_wanted = nocode_wanted;
5125 nocode_wanted = 1;
5126 while (tok == TOK_LOR) {
5127 next();
5128 expr_land();
5129 vpop();
5131 if (t)
5132 gsym(t);
5133 nocode_wanted = saved_nocode_wanted;
5134 gen_cast(&int_type);
5135 break;
5137 } else {
5138 if (!t)
5139 save_regs(1);
5140 t = gvtst(0, t);
5142 if (tok != TOK_LOR) {
5143 if (t)
5144 vseti(VT_JMP, t);
5145 else
5146 vpushi(0);
5147 break;
5149 next();
5150 expr_land();
5155 /* Assuming vtop is a value used in a conditional context
5156 (i.e. compared with zero) return 0 if it's false, 1 if
5157 true and -1 if it can't be statically determined. */
5158 static int condition_3way(void)
5160 int c = -1;
5161 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5162 (!(vtop->r & VT_SYM) ||
5163 !(vtop->sym->type.t & VT_WEAK))) {
5164 CType boolean;
5165 boolean.t = VT_BOOL;
5166 vdup();
5167 gen_cast(&boolean);
5168 c = vtop->c.i;
5169 vpop();
5171 return c;
5174 static void expr_cond(void)
5176 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5177 int c;
5178 SValue sv;
5179 CType type, type1, type2;
5181 expr_lor();
5182 if (tok == '?') {
5183 next();
5184 c = condition_3way();
5185 if (c >= 0) {
5186 int saved_nocode_wanted = nocode_wanted;
5187 if (c) {
5188 if (tok != ':' || !gnu_ext) {
5189 vpop();
5190 gexpr();
5192 skip(':');
5193 nocode_wanted = 1;
5194 expr_cond();
5195 vpop();
5196 nocode_wanted = saved_nocode_wanted;
5197 } else {
5198 vpop();
5199 if (tok != ':' || !gnu_ext) {
5200 nocode_wanted = 1;
5201 gexpr();
5202 vpop();
5203 nocode_wanted = saved_nocode_wanted;
5205 skip(':');
5206 expr_cond();
5209 else {
5210 /* XXX This doesn't handle nocode_wanted correctly at all.
5211 It unconditionally calls gv/gvtst and friends. That's
5212 the case for many of the expr_ routines. Currently
5213 that should generate only useless code, but depending
5214 on other operand handling this might also generate
5215 pointer derefs for lvalue conversions whose result
5216 is useless, but nevertheless can lead to segfault.
5218 Somewhen we need to overhaul the whole nocode_wanted
5219 handling. */
5220 if (vtop != vstack) {
5221 /* needed to avoid having different registers saved in
5222 each branch */
5223 if (is_float(vtop->type.t)) {
5224 rc = RC_FLOAT;
5225 #ifdef TCC_TARGET_X86_64
5226 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5227 rc = RC_ST0;
5229 #endif
5231 else
5232 rc = RC_INT;
5233 gv(rc);
5234 save_regs(1);
5236 if (tok == ':' && gnu_ext) {
5237 gv_dup();
5238 tt = gvtst(1, 0);
5239 } else {
5240 tt = gvtst(1, 0);
5241 gexpr();
5243 type1 = vtop->type;
5244 sv = *vtop; /* save value to handle it later */
5245 vtop--; /* no vpop so that FP stack is not flushed */
5246 skip(':');
5247 u = gjmp(0);
5248 gsym(tt);
5249 expr_cond();
5250 type2 = vtop->type;
5252 t1 = type1.t;
5253 bt1 = t1 & VT_BTYPE;
5254 t2 = type2.t;
5255 bt2 = t2 & VT_BTYPE;
5256 /* cast operands to correct type according to ISOC rules */
5257 if (is_float(bt1) || is_float(bt2)) {
5258 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5259 type.t = VT_LDOUBLE;
5260 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5261 type.t = VT_DOUBLE;
5262 } else {
5263 type.t = VT_FLOAT;
5265 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5266 /* cast to biggest op */
5267 type.t = VT_LLONG;
5268 /* convert to unsigned if it does not fit in a long long */
5269 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5270 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5271 type.t |= VT_UNSIGNED;
5272 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5273 /* If one is a null ptr constant the result type
5274 is the other. */
5275 if (is_null_pointer (vtop))
5276 type = type1;
5277 else if (is_null_pointer (&sv))
5278 type = type2;
5279 /* XXX: test pointer compatibility, C99 has more elaborate
5280 rules here. */
5281 else
5282 type = type1;
5283 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5284 /* XXX: test function pointer compatibility */
5285 type = bt1 == VT_FUNC ? type1 : type2;
5286 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5287 /* XXX: test structure compatibility */
5288 type = bt1 == VT_STRUCT ? type1 : type2;
5289 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5290 /* NOTE: as an extension, we accept void on only one side */
5291 type.t = VT_VOID;
5292 } else {
5293 /* integer operations */
5294 type.t = VT_INT;
5295 /* convert to unsigned if it does not fit in an integer */
5296 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5297 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5298 type.t |= VT_UNSIGNED;
5300 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5301 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5302 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5304 /* now we convert second operand */
5305 gen_cast(&type);
5306 if (islv) {
5307 mk_pointer(&vtop->type);
5308 gaddrof();
5310 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5311 gaddrof();
5312 rc = RC_INT;
5313 if (is_float(type.t)) {
5314 rc = RC_FLOAT;
5315 #ifdef TCC_TARGET_X86_64
5316 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5317 rc = RC_ST0;
5319 #endif
5320 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5321 /* for long longs, we use fixed registers to avoid having
5322 to handle a complicated move */
5323 rc = RC_IRET;
5326 r2 = gv(rc);
5327 /* this is horrible, but we must also convert first
5328 operand */
5329 tt = gjmp(0);
5330 gsym(u);
5331 /* put again first value and cast it */
5332 *vtop = sv;
5333 gen_cast(&type);
5334 if (islv) {
5335 mk_pointer(&vtop->type);
5336 gaddrof();
5338 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5339 gaddrof();
5340 r1 = gv(rc);
5341 move_reg(r2, r1, type.t);
5342 vtop->r = r2;
5343 gsym(tt);
5344 if (islv)
5345 indir();
5350 static void expr_eq(void)
5352 int t;
5354 expr_cond();
5355 if (tok == '=' ||
5356 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5357 tok == TOK_A_XOR || tok == TOK_A_OR ||
5358 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5359 test_lvalue();
5360 t = tok;
5361 next();
5362 if (t == '=') {
5363 expr_eq();
5364 } else {
5365 vdup();
5366 expr_eq();
5367 gen_op(t & 0x7f);
5369 vstore();
5373 ST_FUNC void gexpr(void)
5375 while (1) {
5376 expr_eq();
5377 if (tok != ',')
5378 break;
5379 vpop();
5380 next();
5384 /* parse an expression and return its type without any side effect. */
5385 static void expr_type(CType *type)
5387 int saved_nocode_wanted;
5389 saved_nocode_wanted = nocode_wanted;
5390 nocode_wanted = 1;
5391 gexpr();
5392 *type = vtop->type;
5393 vpop();
5394 nocode_wanted = saved_nocode_wanted;
5397 /* parse a unary expression and return its type without any side
5398 effect. */
5399 static void unary_type(CType *type)
5401 int a;
5403 a = nocode_wanted;
5404 nocode_wanted = 1;
5405 unary();
5406 *type = vtop->type;
5407 vpop();
5408 nocode_wanted = a;
5411 /* parse a constant expression and return value in vtop. */
5412 static void expr_const1(void)
5414 int a;
5415 a = const_wanted;
5416 const_wanted = 1;
5417 expr_cond();
5418 const_wanted = a;
5421 /* parse an integer constant and return its value. */
5422 static inline int64_t expr_const64(void)
5424 int64_t c;
5425 expr_const1();
5426 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5427 expect("constant expression");
5428 c = vtop->c.i;
5429 vpop();
5430 return c;
5433 /* parse an integer constant and return its value.
5434 Complain if it doesn't fit 32bit (signed or unsigned). */
5435 ST_FUNC int expr_const(void)
5437 int c;
5438 int64_t wc = expr_const64();
5439 c = wc;
5440 if (c != wc && (unsigned)c != wc)
5441 tcc_error("constant exceeds 32 bit");
5442 return c;
5445 /* return the label token if current token is a label, otherwise
5446 return zero */
5447 static int is_label(void)
5449 int last_tok;
5451 /* fast test first */
5452 if (tok < TOK_UIDENT)
5453 return 0;
5454 /* no need to save tokc because tok is an identifier */
5455 last_tok = tok;
5456 next();
5457 if (tok == ':') {
5458 next();
5459 return last_tok;
5460 } else {
5461 unget_tok(last_tok);
5462 return 0;
5466 static void label_or_decl(int l)
5468 int last_tok;
5470 /* fast test first */
5471 if (tok >= TOK_UIDENT)
5473 /* no need to save tokc because tok is an identifier */
5474 last_tok = tok;
5475 next();
5476 if (tok == ':') {
5477 unget_tok(last_tok);
5478 return;
5480 unget_tok(last_tok);
5482 decl(l);
5485 static int case_cmp(const void *pa, const void *pb)
5487 int64_t a = (*(struct case_t**) pa)->v1;
5488 int64_t b = (*(struct case_t**) pb)->v1;
5489 return a < b ? -1 : a > b;
5492 static void gcase(struct case_t **base, int len, int *bsym)
5494 struct case_t *p;
5495 int e;
5496 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5497 gv(RC_INT);
5498 while (len > 4) {
5499 /* binary search */
5500 p = base[len/2];
5501 vdup();
5502 if (ll)
5503 vpushll(p->v2);
5504 else
5505 vpushi(p->v2);
5506 gen_op(TOK_LE);
5507 e = gtst(1, 0);
5508 vdup();
5509 if (ll)
5510 vpushll(p->v1);
5511 else
5512 vpushi(p->v1);
5513 gen_op(TOK_GE);
5514 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5515 /* x < v1 */
5516 gcase(base, len/2, bsym);
5517 if (cur_switch->def_sym)
5518 gjmp_addr(cur_switch->def_sym);
5519 else
5520 *bsym = gjmp(*bsym);
5521 /* x > v2 */
5522 gsym(e);
5523 e = len/2 + 1;
5524 base += e; len -= e;
5526 /* linear scan */
5527 while (len--) {
5528 p = *base++;
5529 vdup();
5530 if (ll)
5531 vpushll(p->v2);
5532 else
5533 vpushi(p->v2);
5534 if (p->v1 == p->v2) {
5535 gen_op(TOK_EQ);
5536 gtst_addr(0, p->sym);
5537 } else {
5538 gen_op(TOK_LE);
5539 e = gtst(1, 0);
5540 vdup();
5541 if (ll)
5542 vpushll(p->v1);
5543 else
5544 vpushi(p->v1);
5545 gen_op(TOK_GE);
5546 gtst_addr(0, p->sym);
5547 gsym(e);
5552 static void block(int *bsym, int *csym, int is_expr)
5554 int a, b, c, d, cond;
5555 Sym *s;
5557 /* generate line number info */
5558 if (tcc_state->do_debug &&
5559 (last_line_num != file->line_num || last_ind != ind)) {
5560 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5561 last_ind = ind;
5562 last_line_num = file->line_num;
5565 if (is_expr) {
5566 /* default return value is (void) */
5567 vpushi(0);
5568 vtop->type.t = VT_VOID;
5571 if (tok == TOK_IF) {
5572 /* if test */
5573 int saved_nocode_wanted = nocode_wanted;
5574 next();
5575 skip('(');
5576 gexpr();
5577 skip(')');
5578 cond = condition_3way();
5579 if (cond == 0)
5580 nocode_wanted |= 2;
5581 a = gvtst(1, 0);
5582 block(bsym, csym, 0);
5583 if (cond != 1)
5584 nocode_wanted = saved_nocode_wanted;
5585 c = tok;
5586 if (c == TOK_ELSE) {
5587 next();
5588 if (cond == 1)
5589 nocode_wanted |= 2;
5590 d = gjmp(0);
5591 gsym(a);
5592 block(bsym, csym, 0);
5593 gsym(d); /* patch else jmp */
5594 if (cond != 0)
5595 nocode_wanted = saved_nocode_wanted;
5596 } else
5597 gsym(a);
5598 } else if (tok == TOK_WHILE) {
5599 int saved_nocode_wanted;
5600 nocode_wanted &= ~2;
5601 next();
5602 d = ind;
5603 vla_sp_restore();
5604 skip('(');
5605 gexpr();
5606 skip(')');
5607 a = gvtst(1, 0);
5608 b = 0;
5609 ++local_scope;
5610 saved_nocode_wanted = nocode_wanted;
5611 block(&a, &b, 0);
5612 nocode_wanted = saved_nocode_wanted;
5613 --local_scope;
5614 if(!nocode_wanted)
5615 gjmp_addr(d);
5616 gsym(a);
5617 gsym_addr(b, d);
5618 } else if (tok == '{') {
5619 Sym *llabel;
5620 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5622 next();
5623 /* record local declaration stack position */
5624 s = local_stack;
5625 llabel = local_label_stack;
5626 ++local_scope;
5628 /* handle local labels declarations */
5629 if (tok == TOK_LABEL) {
5630 next();
5631 for(;;) {
5632 if (tok < TOK_UIDENT)
5633 expect("label identifier");
5634 label_push(&local_label_stack, tok, LABEL_DECLARED);
5635 next();
5636 if (tok == ',') {
5637 next();
5638 } else {
5639 skip(';');
5640 break;
5644 while (tok != '}') {
5645 label_or_decl(VT_LOCAL);
5646 if (tok != '}') {
5647 if (is_expr)
5648 vpop();
5649 block(bsym, csym, is_expr);
5652 /* pop locally defined labels */
5653 label_pop(&local_label_stack, llabel);
5654 /* pop locally defined symbols */
5655 --local_scope;
5656 /* In the is_expr case (a statement expression is finished here),
5657 vtop might refer to symbols on the local_stack. Either via the
5658 type or via vtop->sym. We can't pop those nor any that in turn
5659 might be referred to. To make it easier we don't roll back
5660 any symbols in that case; some upper level call to block() will
5661 do that. We do have to remove such symbols from the lookup
5662 tables, though. sym_pop will do that. */
5663 sym_pop(&local_stack, s, is_expr);
5665 /* Pop VLA frames and restore stack pointer if required */
5666 if (vlas_in_scope > saved_vlas_in_scope) {
5667 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5668 vla_sp_restore();
5670 vlas_in_scope = saved_vlas_in_scope;
5672 next();
5673 } else if (tok == TOK_RETURN) {
5674 next();
5675 if (tok != ';') {
5676 gexpr();
5677 gen_assign_cast(&func_vt);
5678 #ifdef TCC_TARGET_ARM64
5679 // Perhaps it would be better to use this for all backends:
5680 greturn();
5681 #else
5682 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5683 CType type, ret_type;
5684 int ret_align, ret_nregs, regsize;
5685 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5686 &ret_align, &regsize);
5687 if (0 == ret_nregs) {
5688 /* if returning structure, must copy it to implicit
5689 first pointer arg location */
5690 type = func_vt;
5691 mk_pointer(&type);
5692 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5693 indir();
5694 vswap();
5695 /* copy structure value to pointer */
5696 vstore();
5697 } else {
5698 /* returning structure packed into registers */
5699 int r, size, addr, align;
5700 size = type_size(&func_vt,&align);
5701 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5702 (vtop->c.i & (ret_align-1)))
5703 && (align & (ret_align-1))) {
5704 loc = (loc - size) & -ret_align;
5705 addr = loc;
5706 type = func_vt;
5707 vset(&type, VT_LOCAL | VT_LVAL, addr);
5708 vswap();
5709 vstore();
5710 vpop();
5711 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5713 vtop->type = ret_type;
5714 if (is_float(ret_type.t))
5715 r = rc_fret(ret_type.t);
5716 else
5717 r = RC_IRET;
5719 if (ret_nregs == 1)
5720 gv(r);
5721 else {
5722 for (;;) {
5723 vdup();
5724 gv(r);
5725 vpop();
5726 if (--ret_nregs == 0)
5727 break;
5728 /* We assume that when a structure is returned in multiple
5729 registers, their classes are consecutive values of the
5730 suite s(n) = 2^n */
5731 r <<= 1;
5732 vtop->c.i += regsize;
5736 } else if (is_float(func_vt.t)) {
5737 gv(rc_fret(func_vt.t));
5738 } else {
5739 gv(RC_IRET);
5741 #endif
5742 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5744 skip(';');
5745 /* jump unless last stmt in top-level block */
5746 if (tok != '}' || local_scope != 1)
5747 rsym = gjmp(rsym);
5748 nocode_wanted |= 2;
5749 } else if (tok == TOK_BREAK) {
5750 /* compute jump */
5751 if (!bsym)
5752 tcc_error("cannot break");
5753 *bsym = gjmp(*bsym);
5754 next();
5755 skip(';');
5756 nocode_wanted |= 2;
5757 } else if (tok == TOK_CONTINUE) {
5758 /* compute jump */
5759 if (!csym)
5760 tcc_error("cannot continue");
5761 vla_sp_restore_root();
5762 *csym = gjmp(*csym);
5763 next();
5764 skip(';');
5765 } else if (tok == TOK_FOR) {
5766 int e;
5767 int saved_nocode_wanted;
5768 nocode_wanted &= ~2;
5769 next();
5770 skip('(');
5771 s = local_stack;
5772 ++local_scope;
5773 if (tok != ';') {
5774 /* c99 for-loop init decl? */
5775 if (!decl0(VT_LOCAL, 1)) {
5776 /* no, regular for-loop init expr */
5777 gexpr();
5778 vpop();
5781 skip(';');
5782 d = ind;
5783 c = ind;
5784 vla_sp_restore();
5785 a = 0;
5786 b = 0;
5787 if (tok != ';') {
5788 gexpr();
5789 a = gvtst(1, 0);
5791 skip(';');
5792 if (tok != ')') {
5793 e = gjmp(0);
5794 c = ind;
5795 vla_sp_restore();
5796 gexpr();
5797 vpop();
5798 gjmp_addr(d);
5799 gsym(e);
5801 skip(')');
5802 saved_nocode_wanted = nocode_wanted;
5803 block(&a, &b, 0);
5804 nocode_wanted = saved_nocode_wanted;
5805 if(!nocode_wanted)
5806 gjmp_addr(c);
5807 gsym(a);
5808 gsym_addr(b, c);
5809 --local_scope;
5810 sym_pop(&local_stack, s, 0);
5812 } else
5813 if (tok == TOK_DO) {
5814 int saved_nocode_wanted;
5815 nocode_wanted &= ~2;
5816 next();
5817 a = 0;
5818 b = 0;
5819 d = ind;
5820 vla_sp_restore();
5821 saved_nocode_wanted = nocode_wanted;
5822 block(&a, &b, 0);
5823 nocode_wanted = saved_nocode_wanted;
5824 skip(TOK_WHILE);
5825 skip('(');
5826 gsym(b);
5827 gexpr();
5828 c = gvtst(0, 0);
5829 if (!nocode_wanted)
5830 gsym_addr(c, d);
5831 skip(')');
5832 gsym(a);
5833 skip(';');
5834 } else
5835 if (tok == TOK_SWITCH) {
5836 struct switch_t *saved, sw;
5837 int saved_nocode_wanted = nocode_wanted;
5838 SValue switchval;
5839 next();
5840 skip('(');
5841 gexpr();
5842 skip(')');
5843 switchval = *vtop--;
5844 a = 0;
5845 b = gjmp(0); /* jump to first case */
5846 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5847 saved = cur_switch;
5848 cur_switch = &sw;
5849 block(&a, csym, 0);
5850 nocode_wanted = saved_nocode_wanted;
5851 a = gjmp(a); /* add implicit break */
5852 /* case lookup */
5853 gsym(b);
5854 if (!nocode_wanted) {
5855 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5856 for (b = 1; b < sw.n; b++)
5857 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5858 tcc_error("duplicate case value");
5859 /* Our switch table sorting is signed, so the compared
5860 value needs to be as well when it's 64bit. */
5861 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5862 switchval.type.t &= ~VT_UNSIGNED;
5863 vpushv(&switchval);
5864 gcase(sw.p, sw.n, &a);
5865 vpop();
5866 if (sw.def_sym)
5867 gjmp_addr(sw.def_sym);
5869 dynarray_reset(&sw.p, &sw.n);
5870 cur_switch = saved;
5871 /* break label */
5872 gsym(a);
5873 } else
5874 if (tok == TOK_CASE) {
5875 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5876 if (!cur_switch)
5877 expect("switch");
5878 nocode_wanted &= ~2;
5879 next();
5880 cr->v1 = cr->v2 = expr_const64();
5881 if (gnu_ext && tok == TOK_DOTS) {
5882 next();
5883 cr->v2 = expr_const64();
5884 if (cr->v2 < cr->v1)
5885 tcc_warning("empty case range");
5887 cr->sym = ind;
5888 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5889 skip(':');
5890 is_expr = 0;
5891 goto block_after_label;
5892 } else
5893 if (tok == TOK_DEFAULT) {
5894 next();
5895 skip(':');
5896 if (!cur_switch)
5897 expect("switch");
5898 if (cur_switch->def_sym)
5899 tcc_error("too many 'default'");
5900 cur_switch->def_sym = ind;
5901 is_expr = 0;
5902 goto block_after_label;
5903 } else
5904 if (tok == TOK_GOTO) {
5905 next();
5906 if (tok == '*' && gnu_ext) {
5907 /* computed goto */
5908 next();
5909 gexpr();
5910 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5911 expect("pointer");
5912 if (!nocode_wanted)
5913 ggoto();
5914 else
5915 vtop--;
5916 } else if (tok >= TOK_UIDENT) {
5917 s = label_find(tok);
5918 /* put forward definition if needed */
5919 if (!s) {
5920 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5921 } else {
5922 if (s->r == LABEL_DECLARED)
5923 s->r = LABEL_FORWARD;
5925 vla_sp_restore_root();
5926 if (nocode_wanted)
5928 else if (s->r & LABEL_FORWARD)
5929 s->jnext = gjmp(s->jnext);
5930 else
5931 gjmp_addr(s->jnext);
5932 next();
5933 } else {
5934 expect("label identifier");
5936 skip(';');
5937 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5938 asm_instr();
5939 } else {
5940 b = is_label();
5941 if (b) {
5942 /* label case */
5943 s = label_find(b);
5944 if (s) {
5945 if (s->r == LABEL_DEFINED)
5946 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5947 gsym(s->jnext);
5948 s->r = LABEL_DEFINED;
5949 } else {
5950 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5952 s->jnext = ind;
5953 vla_sp_restore();
5954 /* we accept this, but it is a mistake */
5955 block_after_label:
5956 nocode_wanted &= ~2;
5957 if (tok == '}') {
5958 tcc_warning("deprecated use of label at end of compound statement");
5959 } else {
5960 if (is_expr)
5961 vpop();
5962 block(bsym, csym, is_expr);
5964 } else {
5965 /* expression case */
5966 if (tok != ';') {
5967 if (is_expr) {
5968 vpop();
5969 gexpr();
5970 } else {
5971 gexpr();
5972 vpop();
5975 skip(';');
5980 #define EXPR_CONST 1
5981 #define EXPR_ANY 2
5983 static void parse_init_elem(int expr_type)
5985 int saved_global_expr;
5986 switch(expr_type) {
5987 case EXPR_CONST:
5988 /* compound literals must be allocated globally in this case */
5989 saved_global_expr = global_expr;
5990 global_expr = 1;
5991 expr_const1();
5992 global_expr = saved_global_expr;
5993 /* NOTE: symbols are accepted */
5994 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5995 tcc_error("initializer element is not constant");
5996 break;
5997 case EXPR_ANY:
5998 expr_eq();
5999 break;
6003 /* t is the array or struct type. c is the array or struct
6004 address. cur_field is the pointer to the current
6005 value, for arrays the 'c' member contains the current start
6006 index and the 'r' contains the end index (in case of range init).
6007 'size_only' is true if only size info is needed (only used
6008 in arrays) */
6009 static void decl_designator(CType *type, Section *sec, unsigned long c,
6010 Sym **cur_field, int size_only)
6012 Sym *s, *f;
6013 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6014 CType type1;
6016 notfirst = 0;
6017 elem_size = 0;
6018 nb_elems = 1;
6019 if (gnu_ext && (l = is_label()) != 0)
6020 goto struct_field;
6021 while (tok == '[' || tok == '.') {
6022 if (tok == '[') {
6023 if (!(type->t & VT_ARRAY))
6024 expect("array type");
6025 s = type->ref;
6026 next();
6027 index = expr_const();
6028 if (index < 0 || (s->c >= 0 && index >= s->c))
6029 tcc_error("invalid index");
6030 if (tok == TOK_DOTS && gnu_ext) {
6031 next();
6032 index_last = expr_const();
6033 if (index_last < 0 ||
6034 (s->c >= 0 && index_last >= s->c) ||
6035 index_last < index)
6036 tcc_error("invalid index");
6037 } else {
6038 index_last = index;
6040 skip(']');
6041 if (!notfirst) {
6042 (*cur_field)->c = index;
6043 (*cur_field)->r = index_last;
6045 type = pointed_type(type);
6046 elem_size = type_size(type, &align);
6047 c += index * elem_size;
6048 /* NOTE: we only support ranges for last designator */
6049 nb_elems = index_last - index + 1;
6050 if (nb_elems != 1) {
6051 notfirst = 1;
6052 break;
6054 } else {
6055 next();
6056 l = tok;
6057 next();
6058 struct_field:
6059 if ((type->t & VT_BTYPE) != VT_STRUCT)
6060 expect("struct/union type");
6061 f = find_field(type, l);
6062 if (!f)
6063 expect("field");
6064 if (!notfirst)
6065 *cur_field = f;
6066 /* XXX: fix this mess by using explicit storage field */
6067 type1 = f->type;
6068 type1.t |= (type->t & ~VT_TYPE);
6069 type = &type1;
6070 c += f->c;
6072 notfirst = 1;
6074 if (notfirst) {
6075 if (tok == '=') {
6076 next();
6077 } else {
6078 if (!gnu_ext)
6079 expect("=");
6081 } else {
6082 if (type->t & VT_ARRAY) {
6083 index = (*cur_field)->c;
6084 if (type->ref->c >= 0 && index >= type->ref->c)
6085 tcc_error("index too large");
6086 type = pointed_type(type);
6087 c += index * type_size(type, &align);
6088 } else {
6089 f = *cur_field;
6090 if (!f)
6091 tcc_error("too many field init");
6092 /* XXX: fix this mess by using explicit storage field */
6093 type1 = f->type;
6094 type1.t |= (type->t & ~VT_TYPE);
6095 type = &type1;
6096 c += f->c;
6099 decl_initializer(type, sec, c, 0, size_only);
6101 /* XXX: make it more general */
6102 if (!size_only && nb_elems > 1) {
6103 unsigned long c_end;
6104 uint8_t *src, *dst;
6105 int i;
6107 if (!sec) {
6108 vset(type, VT_LOCAL|VT_LVAL, c);
6109 for (i = 1; i < nb_elems; i++) {
6110 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6111 vswap();
6112 vstore();
6114 vpop();
6115 } else {
6116 c_end = c + nb_elems * elem_size;
6117 if (c_end > sec->data_allocated)
6118 section_realloc(sec, c_end);
6119 src = sec->data + c;
6120 dst = src;
6121 for(i = 1; i < nb_elems; i++) {
6122 dst += elem_size;
6123 memcpy(dst, src, elem_size);
6129 /* store a value or an expression directly in global data or in local array */
6130 static void init_putv(CType *type, Section *sec, unsigned long c)
6132 int bt, bit_pos, bit_size;
6133 void *ptr;
6134 unsigned long long bit_mask;
6135 CType dtype;
6137 dtype = *type;
6138 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6140 if (sec) {
6141 int size, align;
6142 /* XXX: not portable */
6143 /* XXX: generate error if incorrect relocation */
6144 gen_assign_cast(&dtype);
6145 bt = type->t & VT_BTYPE;
6146 size = type_size(type, &align);
6147 if (c + size > sec->data_allocated) {
6148 section_realloc(sec, c + size);
6150 ptr = sec->data + c;
6151 /* XXX: make code faster ? */
6152 if (!(type->t & VT_BITFIELD)) {
6153 bit_pos = 0;
6154 bit_size = PTR_SIZE * 8;
6155 bit_mask = -1LL;
6156 } else {
6157 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6158 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6159 bit_mask = (1LL << bit_size) - 1;
6161 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6162 vtop->sym->v >= SYM_FIRST_ANOM &&
6163 /* XXX This rejects compount literals like
6164 '(void *){ptr}'. The problem is that '&sym' is
6165 represented the same way, which would be ruled out
6166 by the SYM_FIRST_ANOM check above, but also '"string"'
6167 in 'char *p = "string"' is represented the same
6168 with the type being VT_PTR and the symbol being an
6169 anonymous one. That is, there's no difference in vtop
6170 between '(void *){x}' and '&(void *){x}'. Ignore
6171 pointer typed entities here. Hopefully no real code
6172 will every use compound literals with scalar type. */
6173 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6174 /* These come from compound literals, memcpy stuff over. */
6175 Section *ssec;
6176 ElfW(Sym) *esym;
6177 ElfW_Rel *rel;
6178 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6179 ssec = tcc_state->sections[esym->st_shndx];
6180 memmove (ptr, ssec->data + esym->st_value, size);
6181 if (ssec->reloc) {
6182 /* We need to copy over all memory contents, and that
6183 includes relocations. Use the fact that relocs are
6184 created it order, so look from the end of relocs
6185 until we hit one before the copied region. */
6186 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6187 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6188 while (num_relocs--) {
6189 rel--;
6190 if (rel->r_offset >= esym->st_value + size)
6191 continue;
6192 if (rel->r_offset < esym->st_value)
6193 break;
6194 /* Note: if the same fields are initialized multiple
6195 times (possible with designators) then we possibly
6196 add multiple relocations for the same offset here.
6197 That would lead to wrong code, the last reloc needs
6198 to win. We clean this up later after the whole
6199 initializer is parsed. */
6200 put_elf_reloca(symtab_section, sec,
6201 c + rel->r_offset - esym->st_value,
6202 ELFW(R_TYPE)(rel->r_info),
6203 ELFW(R_SYM)(rel->r_info),
6204 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6205 rel->r_addend
6206 #else
6208 #endif
6212 } else {
6213 if ((vtop->r & VT_SYM) &&
6214 (bt == VT_BYTE ||
6215 bt == VT_SHORT ||
6216 bt == VT_DOUBLE ||
6217 bt == VT_LDOUBLE ||
6218 #if PTR_SIZE == 8
6219 (bt == VT_LLONG && bit_size != 64) ||
6220 bt == VT_INT
6221 #else
6222 bt == VT_LLONG ||
6223 (bt == VT_INT && bit_size != 32)
6224 #endif
6226 tcc_error("initializer element is not computable at load time");
6227 switch(bt) {
6228 /* XXX: when cross-compiling we assume that each type has the
6229 same representation on host and target, which is likely to
6230 be wrong in the case of long double */
6231 case VT_BOOL:
6232 vtop->c.i = (vtop->c.i != 0);
6233 case VT_BYTE:
6234 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6235 break;
6236 case VT_SHORT:
6237 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6238 break;
6239 case VT_DOUBLE:
6240 *(double *)ptr = vtop->c.d;
6241 break;
6242 case VT_LDOUBLE:
6243 if (sizeof(long double) == LDOUBLE_SIZE)
6244 *(long double *)ptr = vtop->c.ld;
6245 else if (sizeof(double) == LDOUBLE_SIZE)
6246 *(double *)ptr = vtop->c.ld;
6247 else
6248 tcc_error("can't cross compile long double constants");
6249 break;
6250 #if PTR_SIZE != 8
6251 case VT_LLONG:
6252 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6253 break;
6254 #else
6255 case VT_LLONG:
6256 #endif
6257 case VT_PTR:
6259 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6260 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6261 if (vtop->r & VT_SYM)
6262 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6263 else
6264 *(addr_t *)ptr |= val;
6265 #else
6266 if (vtop->r & VT_SYM)
6267 greloc(sec, vtop->sym, c, R_DATA_PTR);
6268 *(addr_t *)ptr |= val;
6269 #endif
6270 break;
6272 default:
6274 int val = (vtop->c.i & bit_mask) << bit_pos;
6275 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6276 if (vtop->r & VT_SYM)
6277 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6278 else
6279 *(int *)ptr |= val;
6280 #else
6281 if (vtop->r & VT_SYM)
6282 greloc(sec, vtop->sym, c, R_DATA_PTR);
6283 *(int *)ptr |= val;
6284 #endif
6285 break;
6289 vtop--;
6290 } else {
6291 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6292 vswap();
6293 vstore();
6294 vpop();
6298 /* put zeros for variable based init */
6299 static void init_putz(Section *sec, unsigned long c, int size)
6301 if (sec) {
6302 /* nothing to do because globals are already set to zero */
6303 } else {
6304 vpush_global_sym(&func_old_type, TOK_memset);
6305 vseti(VT_LOCAL, c);
6306 #ifdef TCC_TARGET_ARM
6307 vpushs(size);
6308 vpushi(0);
6309 #else
6310 vpushi(0);
6311 vpushs(size);
6312 #endif
6313 gfunc_call(3);
6317 /* 't' contains the type and storage info. 'c' is the offset of the
6318 object in section 'sec'. If 'sec' is NULL, it means stack based
6319 allocation. 'first' is true if array '{' must be read (multi
6320 dimension implicit array init handling). 'size_only' is true if
6321 size only evaluation is wanted (only for arrays). */
6322 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6323 int first, int size_only)
6325 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6326 int size1, align1;
6327 int have_elem;
6328 Sym *s, *f;
6329 Sym indexsym;
6330 CType *t1;
6332 /* If we currently are at an '}' or ',' we have read an initializer
6333 element in one of our callers, and not yet consumed it. */
6334 have_elem = tok == '}' || tok == ',';
6335 if (!have_elem && tok != '{' &&
6336 /* In case of strings we have special handling for arrays, so
6337 don't consume them as initializer value (which would commit them
6338 to some anonymous symbol). */
6339 tok != TOK_LSTR && tok != TOK_STR &&
6340 !size_only) {
6341 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6342 have_elem = 1;
6345 if (have_elem &&
6346 !(type->t & VT_ARRAY) &&
6347 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6348 The source type might have VT_CONSTANT set, which is
6349 of course assignable to non-const elements. */
6350 is_compatible_parameter_types(type, &vtop->type)) {
6351 init_putv(type, sec, c);
6352 } else if (type->t & VT_ARRAY) {
6353 s = type->ref;
6354 n = s->c;
6355 array_length = 0;
6356 t1 = pointed_type(type);
6357 size1 = type_size(t1, &align1);
6359 no_oblock = 1;
6360 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6361 tok == '{') {
6362 if (tok != '{')
6363 tcc_error("character array initializer must be a literal,"
6364 " optionally enclosed in braces");
6365 skip('{');
6366 no_oblock = 0;
6369 /* only parse strings here if correct type (otherwise: handle
6370 them as ((w)char *) expressions */
6371 if ((tok == TOK_LSTR &&
6372 #ifdef TCC_TARGET_PE
6373 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6374 #else
6375 (t1->t & VT_BTYPE) == VT_INT
6376 #endif
6377 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6378 while (tok == TOK_STR || tok == TOK_LSTR) {
6379 int cstr_len, ch;
6381 /* compute maximum number of chars wanted */
6382 if (tok == TOK_STR)
6383 cstr_len = tokc.str.size;
6384 else
6385 cstr_len = tokc.str.size / sizeof(nwchar_t);
6386 cstr_len--;
6387 nb = cstr_len;
6388 if (n >= 0 && nb > (n - array_length))
6389 nb = n - array_length;
6390 if (!size_only) {
6391 if (cstr_len > nb)
6392 tcc_warning("initializer-string for array is too long");
6393 /* in order to go faster for common case (char
6394 string in global variable, we handle it
6395 specifically */
6396 if (sec && tok == TOK_STR && size1 == 1) {
6397 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6398 } else {
6399 for(i=0;i<nb;i++) {
6400 if (tok == TOK_STR)
6401 ch = ((unsigned char *)tokc.str.data)[i];
6402 else
6403 ch = ((nwchar_t *)tokc.str.data)[i];
6404 vpushi(ch);
6405 init_putv(t1, sec, c + (array_length + i) * size1);
6409 array_length += nb;
6410 next();
6412 /* only add trailing zero if enough storage (no
6413 warning in this case since it is standard) */
6414 if (n < 0 || array_length < n) {
6415 if (!size_only) {
6416 vpushi(0);
6417 init_putv(t1, sec, c + (array_length * size1));
6419 array_length++;
6421 } else {
6422 indexsym.c = 0;
6423 indexsym.r = 0;
6424 f = &indexsym;
6426 do_init_list:
6427 while (tok != '}' || have_elem) {
6428 decl_designator(type, sec, c, &f, size_only);
6429 have_elem = 0;
6430 index = f->c;
6431 /* must put zero in holes (note that doing it that way
6432 ensures that it even works with designators) */
6433 if (!size_only && array_length < index) {
6434 init_putz(sec, c + array_length * size1,
6435 (index - array_length) * size1);
6437 if (type->t & VT_ARRAY) {
6438 index = indexsym.c = ++indexsym.r;
6439 } else {
6440 index = index + type_size(&f->type, &align1);
6441 if (s->type.t == TOK_UNION)
6442 f = NULL;
6443 else
6444 f = f->next;
6446 if (index > array_length)
6447 array_length = index;
6449 if (type->t & VT_ARRAY) {
6450 /* special test for multi dimensional arrays (may not
6451 be strictly correct if designators are used at the
6452 same time) */
6453 if (no_oblock && index >= n)
6454 break;
6455 } else {
6456 if (no_oblock && f == NULL)
6457 break;
6459 if (tok == '}')
6460 break;
6461 skip(',');
6464 /* put zeros at the end */
6465 if (!size_only && array_length < n) {
6466 init_putz(sec, c + array_length * size1,
6467 (n - array_length) * size1);
6469 if (!no_oblock)
6470 skip('}');
6471 /* patch type size if needed, which happens only for array types */
6472 if (n < 0)
6473 s->c = array_length;
6474 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6475 size1 = 1;
6476 no_oblock = 1;
6477 if (first || tok == '{') {
6478 skip('{');
6479 no_oblock = 0;
6481 s = type->ref;
6482 f = s->next;
6483 array_length = 0;
6484 n = s->c;
6485 goto do_init_list;
6486 } else if (tok == '{') {
6487 next();
6488 decl_initializer(type, sec, c, first, size_only);
6489 skip('}');
6490 } else if (size_only) {
6491 /* If we supported only ISO C we wouldn't have to accept calling
6492 this on anything than an array size_only==1 (and even then
6493 only on the outermost level, so no recursion would be needed),
6494 because initializing a flex array member isn't supported.
6495 But GNU C supports it, so we need to recurse even into
6496 subfields of structs and arrays when size_only is set. */
6497 /* just skip expression */
6498 parlevel = parlevel1 = 0;
6499 while ((parlevel > 0 || parlevel1 > 0 ||
6500 (tok != '}' && tok != ',')) && tok != -1) {
6501 if (tok == '(')
6502 parlevel++;
6503 else if (tok == ')') {
6504 if (parlevel == 0 && parlevel1 == 0)
6505 break;
6506 parlevel--;
6508 else if (tok == '{')
6509 parlevel1++;
6510 else if (tok == '}') {
6511 if (parlevel == 0 && parlevel1 == 0)
6512 break;
6513 parlevel1--;
6515 next();
6517 } else {
6518 if (!have_elem) {
6519 /* This should happen only when we haven't parsed
6520 the init element above for fear of committing a
6521 string constant to memory too early. */
6522 if (tok != TOK_STR && tok != TOK_LSTR)
6523 expect("string constant");
6524 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6526 init_putv(type, sec, c);
6530 /* parse an initializer for type 't' if 'has_init' is non zero, and
6531 allocate space in local or global data space ('r' is either
6532 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6533 variable 'v' of scope 'scope' is declared before initializers
6534 are parsed. If 'v' is zero, then a reference to the new object
6535 is put in the value stack. If 'has_init' is 2, a special parsing
6536 is done to handle string constants. */
6537 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6538 int has_init, int v, int scope)
6540 int size, align, addr, data_offset;
6541 int level;
6542 ParseState saved_parse_state = {0};
6543 TokenString *init_str = NULL;
6544 Section *sec;
6545 Sym *flexible_array;
6547 flexible_array = NULL;
6548 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6549 Sym *field = type->ref->next;
6550 if (field) {
6551 while (field->next)
6552 field = field->next;
6553 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6554 flexible_array = field;
6558 size = type_size(type, &align);
6559 /* If unknown size, we must evaluate it before
6560 evaluating initializers because
6561 initializers can generate global data too
6562 (e.g. string pointers or ISOC99 compound
6563 literals). It also simplifies local
6564 initializers handling */
6565 if (size < 0 || (flexible_array && has_init)) {
6566 if (!has_init)
6567 tcc_error("unknown type size");
6568 /* get all init string */
6569 init_str = tok_str_alloc();
6570 if (has_init == 2) {
6571 /* only get strings */
6572 while (tok == TOK_STR || tok == TOK_LSTR) {
6573 tok_str_add_tok(init_str);
6574 next();
6576 } else {
6577 level = 0;
6578 while (level > 0 || (tok != ',' && tok != ';')) {
6579 if (tok < 0)
6580 tcc_error("unexpected end of file in initializer");
6581 tok_str_add_tok(init_str);
6582 if (tok == '{')
6583 level++;
6584 else if (tok == '}') {
6585 level--;
6586 if (level <= 0) {
6587 next();
6588 break;
6591 next();
6594 tok_str_add(init_str, -1);
6595 tok_str_add(init_str, 0);
6597 /* compute size */
6598 save_parse_state(&saved_parse_state);
6600 begin_macro(init_str, 1);
6601 next();
6602 decl_initializer(type, NULL, 0, 1, 1);
6603 /* prepare second initializer parsing */
6604 macro_ptr = init_str->str;
6605 next();
6607 /* if still unknown size, error */
6608 size = type_size(type, &align);
6609 if (size < 0)
6610 tcc_error("unknown type size");
6612 /* If there's a flex member and it was used in the initializer
6613 adjust size. */
6614 if (flexible_array &&
6615 flexible_array->type.ref->c > 0)
6616 size += flexible_array->type.ref->c
6617 * pointed_size(&flexible_array->type);
6618 /* take into account specified alignment if bigger */
6619 if (ad->a.aligned) {
6620 int speca = 1 << (ad->a.aligned - 1);
6621 if (speca > align)
6622 align = speca;
6623 } else if (ad->a.packed) {
6624 align = 1;
6626 if ((r & VT_VALMASK) == VT_LOCAL) {
6627 sec = NULL;
6628 #ifdef CONFIG_TCC_BCHECK
6629 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6630 loc--;
6632 #endif
6633 loc = (loc - size) & -align;
6634 addr = loc;
6635 #ifdef CONFIG_TCC_BCHECK
6636 /* handles bounds */
6637 /* XXX: currently, since we do only one pass, we cannot track
6638 '&' operators, so we add only arrays */
6639 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6640 addr_t *bounds_ptr;
6641 /* add padding between regions */
6642 loc--;
6643 /* then add local bound info */
6644 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6645 bounds_ptr[0] = addr;
6646 bounds_ptr[1] = size;
6648 #endif
6649 if (v) {
6650 /* local variable */
6651 #ifdef CONFIG_TCC_ASM
6652 if (ad->asm_label) {
6653 int reg = asm_parse_regvar(ad->asm_label);
6654 if (reg >= 0)
6655 r = (r & ~VT_VALMASK) | reg;
6657 #endif
6658 sym_push(v, type, r, addr);
6659 } else {
6660 /* push local reference */
6661 vset(type, r, addr);
6663 } else {
6664 Sym *sym;
6666 sym = NULL;
6667 if (v && scope == VT_CONST) {
6668 /* see if the symbol was already defined */
6669 sym = sym_find(v);
6670 if (sym) {
6671 if (!is_compatible_types(&sym->type, type))
6672 tcc_error("incompatible types for redefinition of '%s'",
6673 get_tok_str(v, NULL));
6674 if (sym->type.t & VT_EXTERN) {
6675 /* if the variable is extern, it was not allocated */
6676 sym->type.t &= ~VT_EXTERN;
6677 /* set array size if it was omitted in extern
6678 declaration */
6679 if ((sym->type.t & VT_ARRAY) &&
6680 sym->type.ref->c < 0 &&
6681 type->ref->c >= 0)
6682 sym->type.ref->c = type->ref->c;
6683 } else {
6684 /* we accept several definitions of the same
6685 global variable. this is tricky, because we
6686 must play with the SHN_COMMON type of the symbol */
6687 /* XXX: should check if the variable was already
6688 initialized. It is incorrect to initialized it
6689 twice */
6690 /* no init data, we won't add more to the symbol */
6691 if (!has_init)
6692 goto no_alloc;
6697 /* allocate symbol in corresponding section */
6698 sec = ad->section;
6699 if (!sec) {
6700 if (has_init)
6701 sec = data_section;
6702 else if (tcc_state->nocommon)
6703 sec = bss_section;
6705 if (sec) {
6706 data_offset = sec->data_offset;
6707 data_offset = (data_offset + align - 1) & -align;
6708 addr = data_offset;
6709 /* very important to increment global pointer at this time
6710 because initializers themselves can create new initializers */
6711 data_offset += size;
6712 #ifdef CONFIG_TCC_BCHECK
6713 /* add padding if bound check */
6714 if (tcc_state->do_bounds_check)
6715 data_offset++;
6716 #endif
6717 sec->data_offset = data_offset;
6718 /* allocate section space to put the data */
6719 if (sec->sh_type != SHT_NOBITS &&
6720 data_offset > sec->data_allocated)
6721 section_realloc(sec, data_offset);
6722 /* align section if needed */
6723 if (align > sec->sh_addralign)
6724 sec->sh_addralign = align;
6725 } else {
6726 addr = 0; /* avoid warning */
6729 if (v) {
6730 if (scope != VT_CONST || !sym) {
6731 sym = sym_push(v, type, r | VT_SYM, 0);
6732 sym->asm_label = ad->asm_label;
6734 /* update symbol definition */
6735 if (sec) {
6736 put_extern_sym(sym, sec, addr, size);
6737 } else {
6738 ElfW(Sym) *esym;
6739 /* put a common area */
6740 put_extern_sym(sym, NULL, align, size);
6741 /* XXX: find a nicer way */
6742 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6743 esym->st_shndx = SHN_COMMON;
6745 } else {
6746 /* push global reference */
6747 sym = get_sym_ref(type, sec, addr, size);
6748 vpushsym(type, sym);
6750 /* patch symbol weakness */
6751 if (type->t & VT_WEAK)
6752 weaken_symbol(sym);
6753 apply_visibility(sym, type);
6754 #ifdef CONFIG_TCC_BCHECK
6755 /* handles bounds now because the symbol must be defined
6756 before for the relocation */
6757 if (tcc_state->do_bounds_check) {
6758 addr_t *bounds_ptr;
6760 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6761 /* then add global bound info */
6762 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6763 bounds_ptr[0] = 0; /* relocated */
6764 bounds_ptr[1] = size;
6766 #endif
6768 if (type->t & VT_VLA) {
6769 int a;
6771 /* save current stack pointer */
6772 if (vlas_in_scope == 0) {
6773 if (vla_sp_root_loc == -1)
6774 vla_sp_root_loc = (loc -= PTR_SIZE);
6775 gen_vla_sp_save(vla_sp_root_loc);
6778 vla_runtime_type_size(type, &a);
6779 gen_vla_alloc(type, a);
6780 gen_vla_sp_save(addr);
6781 vla_sp_loc = addr;
6782 vlas_in_scope++;
6783 } else if (has_init) {
6784 size_t oldreloc_offset = 0;
6785 if (sec && sec->reloc)
6786 oldreloc_offset = sec->reloc->data_offset;
6787 decl_initializer(type, sec, addr, 1, 0);
6788 if (sec && sec->reloc)
6789 squeeze_multi_relocs(sec, oldreloc_offset);
6790 /* patch flexible array member size back to -1, */
6791 /* for possible subsequent similar declarations */
6792 if (flexible_array)
6793 flexible_array->type.ref->c = -1;
6795 no_alloc: ;
6796 /* restore parse state if needed */
6797 if (init_str) {
6798 end_macro();
6799 restore_parse_state(&saved_parse_state);
6803 static void put_func_debug(Sym *sym)
6805 char buf[512];
6807 /* stabs info */
6808 /* XXX: we put here a dummy type */
6809 snprintf(buf, sizeof(buf), "%s:%c1",
6810 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6811 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6812 cur_text_section, sym->c);
6813 /* //gr gdb wants a line at the function */
6814 put_stabn(N_SLINE, 0, file->line_num, 0);
6815 last_ind = 0;
6816 last_line_num = 0;
6819 /* parse an old style function declaration list */
6820 /* XXX: check multiple parameter */
6821 static void func_decl_list(Sym *func_sym)
6823 AttributeDef ad;
6824 int v;
6825 Sym *s;
6826 CType btype, type;
6828 /* parse each declaration */
6829 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6830 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6831 if (!parse_btype(&btype, &ad))
6832 expect("declaration list");
6833 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6834 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6835 tok == ';') {
6836 /* we accept no variable after */
6837 } else {
6838 for(;;) {
6839 type = btype;
6840 type_decl(&type, &ad, &v, TYPE_DIRECT);
6841 /* find parameter in function parameter list */
6842 s = func_sym->next;
6843 while (s != NULL) {
6844 if ((s->v & ~SYM_FIELD) == v)
6845 goto found;
6846 s = s->next;
6848 tcc_error("declaration for parameter '%s' but no such parameter",
6849 get_tok_str(v, NULL));
6850 found:
6851 /* check that no storage specifier except 'register' was given */
6852 if (type.t & VT_STORAGE)
6853 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6854 convert_parameter_type(&type);
6855 /* we can add the type (NOTE: it could be local to the function) */
6856 s->type = type;
6857 /* accept other parameters */
6858 if (tok == ',')
6859 next();
6860 else
6861 break;
6864 skip(';');
6868 /* parse a function defined by symbol 'sym' and generate its code in
6869 'cur_text_section' */
6870 static void gen_function(Sym *sym)
6872 int saved_nocode_wanted = nocode_wanted;
6874 nocode_wanted = 0;
6875 ind = cur_text_section->data_offset;
6876 /* NOTE: we patch the symbol size later */
6877 put_extern_sym(sym, cur_text_section, ind, 0);
6878 funcname = get_tok_str(sym->v, NULL);
6879 func_ind = ind;
6880 /* Initialize VLA state */
6881 vla_sp_loc = -1;
6882 vla_sp_root_loc = -1;
6883 /* put debug symbol */
6884 if (tcc_state->do_debug)
6885 put_func_debug(sym);
6887 /* push a dummy symbol to enable local sym storage */
6888 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6889 local_scope = 1; /* for function parameters */
6890 gfunc_prolog(&sym->type);
6891 local_scope = 0;
6893 rsym = 0;
6894 block(NULL, NULL, 0);
6895 gsym(rsym);
6896 gfunc_epilog();
6897 cur_text_section->data_offset = ind;
6898 label_pop(&global_label_stack, NULL);
6899 /* reset local stack */
6900 local_scope = 0;
6901 sym_pop(&local_stack, NULL, 0);
6902 /* end of function */
6903 /* patch symbol size */
6904 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6905 ind - func_ind;
6906 /* patch symbol weakness (this definition overrules any prototype) */
6907 if (sym->type.t & VT_WEAK)
6908 weaken_symbol(sym);
6909 apply_visibility(sym, &sym->type);
6910 if (tcc_state->do_debug) {
6911 put_stabn(N_FUN, 0, 0, ind - func_ind);
6913 /* It's better to crash than to generate wrong code */
6914 cur_text_section = NULL;
6915 funcname = ""; /* for safety */
6916 func_vt.t = VT_VOID; /* for safety */
6917 func_var = 0; /* for safety */
6918 ind = 0; /* for safety */
6919 nocode_wanted = saved_nocode_wanted;
6920 check_vstack();
6923 static void gen_inline_functions(TCCState *s)
6925 Sym *sym;
6926 int inline_generated, i, ln;
6927 struct InlineFunc *fn;
6929 ln = file->line_num;
6930 /* iterate while inline function are referenced */
6931 for(;;) {
6932 inline_generated = 0;
6933 for (i = 0; i < s->nb_inline_fns; ++i) {
6934 fn = s->inline_fns[i];
6935 sym = fn->sym;
6936 if (sym && sym->c) {
6937 /* the function was used: generate its code and
6938 convert it to a normal function */
6939 fn->sym = NULL;
6940 if (file)
6941 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6942 sym->r = VT_SYM | VT_CONST;
6943 sym->type.t &= ~VT_INLINE;
6945 begin_macro(fn->func_str, 1);
6946 next();
6947 cur_text_section = text_section;
6948 gen_function(sym);
6949 end_macro();
6951 inline_generated = 1;
6954 if (!inline_generated)
6955 break;
6957 file->line_num = ln;
6960 ST_FUNC void free_inline_functions(TCCState *s)
6962 int i;
6963 /* free tokens of unused inline functions */
6964 for (i = 0; i < s->nb_inline_fns; ++i) {
6965 struct InlineFunc *fn = s->inline_fns[i];
6966 if (fn->sym)
6967 tok_str_free(fn->func_str);
6969 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6972 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6973 static int decl0(int l, int is_for_loop_init)
6975 int v, has_init, r;
6976 CType type, btype;
6977 Sym *sym;
6978 AttributeDef ad;
6980 while (1) {
6981 if (!parse_btype(&btype, &ad)) {
6982 if (is_for_loop_init)
6983 return 0;
6984 /* skip redundant ';' */
6985 /* XXX: find more elegant solution */
6986 if (tok == ';') {
6987 next();
6988 continue;
6990 if (l == VT_CONST &&
6991 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6992 /* global asm block */
6993 asm_global_instr();
6994 continue;
6996 /* special test for old K&R protos without explicit int
6997 type. Only accepted when defining global data */
6998 if (l == VT_LOCAL || tok < TOK_UIDENT)
6999 break;
7000 btype.t = VT_INT;
7002 if (((btype.t & VT_BTYPE) == VT_ENUM ||
7003 (btype.t & VT_BTYPE) == VT_STRUCT) &&
7004 tok == ';') {
7005 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7006 int v = btype.ref->v;
7007 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7008 tcc_warning("unnamed struct/union that defines no instances");
7010 next();
7011 continue;
7013 while (1) { /* iterate thru each declaration */
7014 type = btype;
7015 /* If the base type itself was an array type of unspecified
7016 size (like in 'typedef int arr[]; arr x = {1};') then
7017 we will overwrite the unknown size by the real one for
7018 this decl. We need to unshare the ref symbol holding
7019 that size. */
7020 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7021 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7023 type_decl(&type, &ad, &v, TYPE_DIRECT);
7024 #if 0
7026 char buf[500];
7027 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
7028 printf("type = '%s'\n", buf);
7030 #endif
7031 if ((type.t & VT_BTYPE) == VT_FUNC) {
7032 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7033 tcc_error("function without file scope cannot be static");
7035 /* if old style function prototype, we accept a
7036 declaration list */
7037 sym = type.ref;
7038 if (sym->c == FUNC_OLD)
7039 func_decl_list(sym);
7042 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7043 ad.asm_label = asm_label_instr();
7044 /* parse one last attribute list, after asm label */
7045 parse_attribute(&ad);
7046 if (tok == '{')
7047 expect(";");
7050 if (ad.a.weak)
7051 type.t |= VT_WEAK;
7052 #ifdef TCC_TARGET_PE
7053 if (ad.a.func_import)
7054 type.t |= VT_IMPORT;
7055 if (ad.a.func_export)
7056 type.t |= VT_EXPORT;
7057 #endif
7058 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7060 if (tok == '{') {
7061 if (l == VT_LOCAL)
7062 tcc_error("cannot use local functions");
7063 if ((type.t & VT_BTYPE) != VT_FUNC)
7064 expect("function definition");
7066 /* reject abstract declarators in function definition */
7067 sym = type.ref;
7068 while ((sym = sym->next) != NULL)
7069 if (!(sym->v & ~SYM_FIELD))
7070 expect("identifier");
7072 /* XXX: cannot do better now: convert extern line to static inline */
7073 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7074 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7076 sym = sym_find(v);
7077 if (sym) {
7078 Sym *ref;
7079 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7080 goto func_error1;
7082 ref = sym->type.ref;
7083 if (0 == ref->a.func_proto)
7084 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7086 /* use func_call from prototype if not defined */
7087 if (ref->a.func_call != FUNC_CDECL
7088 && type.ref->a.func_call == FUNC_CDECL)
7089 type.ref->a.func_call = ref->a.func_call;
7091 /* use export from prototype */
7092 if (ref->a.func_export)
7093 type.ref->a.func_export = 1;
7095 /* use static from prototype */
7096 if (sym->type.t & VT_STATIC)
7097 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7099 /* If the definition has no visibility use the
7100 one from prototype. */
7101 if (! (type.t & VT_VIS_MASK))
7102 type.t |= sym->type.t & VT_VIS_MASK;
7104 if (!is_compatible_types(&sym->type, &type)) {
7105 func_error1:
7106 tcc_error("incompatible types for redefinition of '%s'",
7107 get_tok_str(v, NULL));
7109 type.ref->a.func_proto = 0;
7110 /* if symbol is already defined, then put complete type */
7111 sym->type = type;
7112 } else {
7113 /* put function symbol */
7114 sym = global_identifier_push(v, type.t, 0);
7115 sym->type.ref = type.ref;
7118 /* static inline functions are just recorded as a kind
7119 of macro. Their code will be emitted at the end of
7120 the compilation unit only if they are used */
7121 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7122 (VT_INLINE | VT_STATIC)) {
7123 int block_level;
7124 struct InlineFunc *fn;
7125 const char *filename;
7127 filename = file ? file->filename : "";
7128 fn = tcc_malloc(sizeof *fn + strlen(filename));
7129 strcpy(fn->filename, filename);
7130 fn->sym = sym;
7131 fn->func_str = tok_str_alloc();
7133 block_level = 0;
7134 for(;;) {
7135 int t;
7136 if (tok == TOK_EOF)
7137 tcc_error("unexpected end of file");
7138 tok_str_add_tok(fn->func_str);
7139 t = tok;
7140 next();
7141 if (t == '{') {
7142 block_level++;
7143 } else if (t == '}') {
7144 block_level--;
7145 if (block_level == 0)
7146 break;
7149 tok_str_add(fn->func_str, -1);
7150 tok_str_add(fn->func_str, 0);
7151 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7153 } else {
7154 /* compute text section */
7155 cur_text_section = ad.section;
7156 if (!cur_text_section)
7157 cur_text_section = text_section;
7158 sym->r = VT_SYM | VT_CONST;
7159 gen_function(sym);
7161 break;
7162 } else {
7163 if (btype.t & VT_TYPEDEF) {
7164 /* save typedefed type */
7165 /* XXX: test storage specifiers ? */
7166 sym = sym_find(v);
7167 if (sym && sym->scope == local_scope) {
7168 if (!is_compatible_types(&sym->type, &type)
7169 || !(sym->type.t & VT_TYPEDEF))
7170 tcc_error("incompatible redefinition of '%s'",
7171 get_tok_str(v, NULL));
7172 sym->type = type;
7173 } else {
7174 sym = sym_push(v, &type, 0, 0);
7176 sym->a = ad.a;
7177 sym->type.t |= VT_TYPEDEF;
7178 } else {
7179 r = 0;
7180 if ((type.t & VT_BTYPE) == VT_FUNC) {
7181 /* external function definition */
7182 /* specific case for func_call attribute */
7183 ad.a.func_proto = 1;
7184 type.ref->a = ad.a;
7185 } else if (!(type.t & VT_ARRAY)) {
7186 /* not lvalue if array */
7187 r |= lvalue_type(type.t);
7189 has_init = (tok == '=');
7190 if (has_init && (type.t & VT_VLA))
7191 tcc_error("variable length array cannot be initialized");
7192 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7193 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7194 !has_init && l == VT_CONST && type.ref->c < 0)) {
7195 /* external variable or function */
7196 /* NOTE: as GCC, uninitialized global static
7197 arrays of null size are considered as
7198 extern */
7199 sym = external_sym(v, &type, r);
7200 sym->asm_label = ad.asm_label;
7202 if (ad.alias_target) {
7203 Section tsec;
7204 ElfW(Sym) *esym;
7205 Sym *alias_target;
7207 alias_target = sym_find(ad.alias_target);
7208 if (!alias_target || !alias_target->c)
7209 tcc_error("unsupported forward __alias__ attribute");
7210 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7211 tsec.sh_num = esym->st_shndx;
7212 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7214 } else {
7215 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7216 if (type.t & VT_STATIC)
7217 r |= VT_CONST;
7218 else
7219 r |= l;
7220 if (has_init)
7221 next();
7222 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7225 if (tok != ',') {
7226 if (is_for_loop_init)
7227 return 1;
7228 skip(';');
7229 break;
7231 next();
7233 ad.a.aligned = 0;
7236 return 0;
7239 ST_FUNC void decl(int l)
7241 decl0(l, 0);
7244 /* ------------------------------------------------------------------------- */