bitfields: Implement MS compatible layout
[tinycc.git] / tccgen.c
blob0576ded6c015b0e75c1a36948dd96a13df8b7920
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
72 static void gen_cast(CType *type);
73 static inline CType *pointed_type(CType *type);
74 static int is_compatible_types(CType *type1, CType *type2);
75 static int parse_btype(CType *type, AttributeDef *ad);
76 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
77 static void parse_expr_type(CType *type);
78 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
79 static void block(int *bsym, int *csym, int is_expr);
80 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
81 static int decl0(int l, int is_for_loop_init);
82 static void expr_eq(void);
83 static void expr_lor_const(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 ST_FUNC void tccgen_start(TCCState *s1)
144 cur_text_section = NULL;
145 funcname = "";
146 anon_sym = SYM_FIRST_ANOM;
147 section_sym = 0;
148 nocode_wanted = 1;
150 /* define some often used types */
151 int_type.t = VT_INT;
152 char_pointer_type.t = VT_BYTE;
153 mk_pointer(&char_pointer_type);
154 #if PTR_SIZE == 4
155 size_type.t = VT_INT;
156 #else
157 size_type.t = VT_LLONG;
158 #endif
159 func_old_type.t = VT_FUNC;
160 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
162 if (s1->do_debug) {
163 char buf[512];
165 /* file info: full path + filename */
166 section_sym = put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
168 text_section->sh_num, NULL);
169 getcwd(buf, sizeof(buf));
170 #ifdef _WIN32
171 normalize_slashes(buf);
172 #endif
173 pstrcat(buf, sizeof(buf), "/");
174 put_stabs_r(buf, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
176 put_stabs_r(file->filename, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
179 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
180 symbols can be safely used */
181 put_elf_sym(symtab_section, 0, 0,
182 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
183 SHN_ABS, file->filename);
185 #ifdef TCC_TARGET_ARM
186 arm_init(s1);
187 #endif
190 ST_FUNC void tccgen_end(TCCState *s1)
192 gen_inline_functions(s1);
193 check_vstack();
194 /* end of translation unit info */
195 if (s1->do_debug) {
196 put_stabs_r(NULL, N_SO, 0, 0,
197 text_section->data_offset, text_section, section_sym);
201 /* ------------------------------------------------------------------------- */
202 /* update sym->c so that it points to an external symbol in section
203 'section' with value 'value' */
205 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
206 addr_t value, unsigned long size,
207 int can_add_underscore)
209 int sym_type, sym_bind, sh_num, info, other;
210 ElfW(Sym) *esym;
211 const char *name;
212 char buf1[256];
214 #ifdef CONFIG_TCC_BCHECK
215 char buf[32];
216 #endif
218 if (section == NULL)
219 sh_num = SHN_UNDEF;
220 else if (section == SECTION_ABS)
221 sh_num = SHN_ABS;
222 else
223 sh_num = section->sh_num;
225 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
226 sym_type = STT_FUNC;
227 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
228 sym_type = STT_NOTYPE;
229 } else {
230 sym_type = STT_OBJECT;
233 if (sym->type.t & VT_STATIC)
234 sym_bind = STB_LOCAL;
235 else {
236 if (sym->type.t & VT_WEAK)
237 sym_bind = STB_WEAK;
238 else
239 sym_bind = STB_GLOBAL;
242 if (!sym->c) {
243 name = get_tok_str(sym->v, NULL);
244 #ifdef CONFIG_TCC_BCHECK
245 if (tcc_state->do_bounds_check) {
246 /* XXX: avoid doing that for statics ? */
247 /* if bound checking is activated, we change some function
248 names by adding the "__bound" prefix */
249 switch(sym->v) {
250 #ifdef TCC_TARGET_PE
251 /* XXX: we rely only on malloc hooks */
252 case TOK_malloc:
253 case TOK_free:
254 case TOK_realloc:
255 case TOK_memalign:
256 case TOK_calloc:
257 #endif
258 case TOK_memcpy:
259 case TOK_memmove:
260 case TOK_memset:
261 case TOK_strlen:
262 case TOK_strcpy:
263 case TOK_alloca:
264 strcpy(buf, "__bound_");
265 strcat(buf, name);
266 name = buf;
267 break;
270 #endif
271 other = 0;
273 #ifdef TCC_TARGET_PE
274 if (sym->type.t & VT_EXPORT)
275 other |= ST_PE_EXPORT;
276 if (sym_type == STT_FUNC && sym->type.ref) {
277 Sym *ref = sym->type.ref;
278 if (ref->a.func_export)
279 other |= ST_PE_EXPORT;
280 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
281 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
282 name = buf1;
283 other |= ST_PE_STDCALL;
284 can_add_underscore = 0;
286 } else {
287 if (find_elf_sym(tcc_state->dynsymtab_section, name))
288 other |= ST_PE_IMPORT;
289 if (sym->type.t & VT_IMPORT)
290 other |= ST_PE_IMPORT;
292 #else
293 if (! (sym->type.t & VT_STATIC))
294 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
295 #endif
296 if (tcc_state->leading_underscore && can_add_underscore) {
297 buf1[0] = '_';
298 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
299 name = buf1;
301 if (sym->asm_label) {
302 name = get_tok_str(sym->asm_label, NULL);
304 info = ELFW(ST_INFO)(sym_bind, sym_type);
305 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
306 } else {
307 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
308 esym->st_value = value;
309 esym->st_size = size;
310 esym->st_shndx = sh_num;
314 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
315 addr_t value, unsigned long size)
317 put_extern_sym2(sym, section, value, size, 1);
320 /* add a new relocation entry to symbol 'sym' in section 's' */
321 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
322 addr_t addend)
324 int c = 0;
325 if (sym) {
326 if (0 == sym->c)
327 put_extern_sym(sym, NULL, 0, 0);
328 c = sym->c;
330 /* now we can add ELF relocation info */
331 put_elf_reloca(symtab_section, s, offset, type, c, addend);
334 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
336 greloca(s, sym, offset, type, 0);
339 /* ------------------------------------------------------------------------- */
340 /* symbol allocator */
341 static Sym *__sym_malloc(void)
343 Sym *sym_pool, *sym, *last_sym;
344 int i;
346 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
347 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
349 last_sym = sym_free_first;
350 sym = sym_pool;
351 for(i = 0; i < SYM_POOL_NB; i++) {
352 sym->next = last_sym;
353 last_sym = sym;
354 sym++;
356 sym_free_first = last_sym;
357 return last_sym;
360 static inline Sym *sym_malloc(void)
362 Sym *sym;
363 #ifndef SYM_DEBUG
364 sym = sym_free_first;
365 if (!sym)
366 sym = __sym_malloc();
367 sym_free_first = sym->next;
368 return sym;
369 #else
370 sym = tcc_malloc(sizeof(Sym));
371 return sym;
372 #endif
375 ST_INLN void sym_free(Sym *sym)
377 #ifndef SYM_DEBUG
378 sym->next = sym_free_first;
379 sym_free_first = sym;
380 #else
381 tcc_free(sym);
382 #endif
385 /* push, without hashing */
386 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
388 Sym *s;
390 s = sym_malloc();
391 s->scope = 0;
392 s->v = v;
393 s->type.t = t;
394 s->type.ref = NULL;
395 #ifdef _WIN64
396 s->d = NULL;
397 #endif
398 s->c = c;
399 s->next = NULL;
400 /* add in stack */
401 s->prev = *ps;
402 *ps = s;
403 return s;
406 /* find a symbol and return its associated structure. 's' is the top
407 of the symbol stack */
408 ST_FUNC Sym *sym_find2(Sym *s, int v)
410 while (s) {
411 if (s->v == v)
412 return s;
413 else if (s->v == -1)
414 return NULL;
415 s = s->prev;
417 return NULL;
420 /* structure lookup */
421 ST_INLN Sym *struct_find(int v)
423 v -= TOK_IDENT;
424 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
425 return NULL;
426 return table_ident[v]->sym_struct;
429 /* find an identifier */
430 ST_INLN Sym *sym_find(int v)
432 v -= TOK_IDENT;
433 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
434 return NULL;
435 return table_ident[v]->sym_identifier;
438 /* push a given symbol on the symbol stack */
439 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
441 Sym *s, **ps;
442 TokenSym *ts;
444 if (local_stack)
445 ps = &local_stack;
446 else
447 ps = &global_stack;
448 s = sym_push2(ps, v, type->t, c);
449 s->type.ref = type->ref;
450 s->r = r;
451 /* don't record fields or anonymous symbols */
452 /* XXX: simplify */
453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
454 /* record symbol in token array */
455 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
456 if (v & SYM_STRUCT)
457 ps = &ts->sym_struct;
458 else
459 ps = &ts->sym_identifier;
460 s->prev_tok = *ps;
461 *ps = s;
462 s->scope = local_scope;
463 if (s->prev_tok && s->prev_tok->scope == s->scope)
464 tcc_error("redeclaration of '%s'",
465 get_tok_str(v & ~SYM_STRUCT, NULL));
467 return s;
470 /* push a global identifier */
471 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
473 Sym *s, **ps;
474 s = sym_push2(&global_stack, v, t, c);
475 /* don't record anonymous symbol */
476 if (v < SYM_FIRST_ANOM) {
477 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
478 /* modify the top most local identifier, so that
479 sym_identifier will point to 's' when popped */
480 while (*ps != NULL)
481 ps = &(*ps)->prev_tok;
482 s->prev_tok = NULL;
483 *ps = s;
485 return s;
488 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
489 pop them yet from the list, but do remove them from the token array. */
490 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
492 Sym *s, *ss, **ps;
493 TokenSym *ts;
494 int v;
496 s = *ptop;
497 while(s != b) {
498 ss = s->prev;
499 v = s->v;
500 /* remove symbol in token array */
501 /* XXX: simplify */
502 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
503 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
504 if (v & SYM_STRUCT)
505 ps = &ts->sym_struct;
506 else
507 ps = &ts->sym_identifier;
508 *ps = s->prev_tok;
510 if (!keep)
511 sym_free(s);
512 s = ss;
514 if (!keep)
515 *ptop = b;
518 static void weaken_symbol(Sym *sym)
520 sym->type.t |= VT_WEAK;
521 if (sym->c > 0) {
522 int esym_type;
523 ElfW(Sym) *esym;
525 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
526 esym_type = ELFW(ST_TYPE)(esym->st_info);
527 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
531 static void apply_visibility(Sym *sym, CType *type)
533 int vis = sym->type.t & VT_VIS_MASK;
534 int vis2 = type->t & VT_VIS_MASK;
535 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
536 vis = vis2;
537 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
539 else
540 vis = (vis < vis2) ? vis : vis2;
541 sym->type.t &= ~VT_VIS_MASK;
542 sym->type.t |= vis;
544 if (sym->c > 0) {
545 ElfW(Sym) *esym;
547 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
548 vis >>= VT_VIS_SHIFT;
549 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
553 /* ------------------------------------------------------------------------- */
555 ST_FUNC void swap(int *p, int *q)
557 int t;
558 t = *p;
559 *p = *q;
560 *q = t;
563 static void vsetc(CType *type, int r, CValue *vc)
565 int v;
567 if (vtop >= vstack + (VSTACK_SIZE - 1))
568 tcc_error("memory full (vstack)");
569 /* cannot let cpu flags if other instruction are generated. Also
570 avoid leaving VT_JMP anywhere except on the top of the stack
571 because it would complicate the code generator. */
572 if (vtop >= vstack) {
573 v = vtop->r & VT_VALMASK;
574 if (v == VT_CMP || (v & ~1) == VT_JMP)
575 gv(RC_INT);
577 vtop++;
578 vtop->type = *type;
579 vtop->r = r;
580 vtop->r2 = VT_CONST;
581 vtop->c = *vc;
582 vtop->sym = NULL;
585 /* push constant of type "type" with useless value */
586 ST_FUNC void vpush(CType *type)
588 CValue cval;
589 vsetc(type, VT_CONST, &cval);
592 /* push integer constant */
593 ST_FUNC void vpushi(int v)
595 CValue cval;
596 cval.i = v;
597 vsetc(&int_type, VT_CONST, &cval);
600 /* push a pointer sized constant */
601 static void vpushs(addr_t v)
603 CValue cval;
604 cval.i = v;
605 vsetc(&size_type, VT_CONST, &cval);
608 /* push arbitrary 64bit constant */
609 ST_FUNC void vpush64(int ty, unsigned long long v)
611 CValue cval;
612 CType ctype;
613 ctype.t = ty;
614 ctype.ref = NULL;
615 cval.i = v;
616 vsetc(&ctype, VT_CONST, &cval);
619 /* push long long constant */
620 static inline void vpushll(long long v)
622 vpush64(VT_LLONG, v);
625 /* push a symbol value of TYPE */
626 static inline void vpushsym(CType *type, Sym *sym)
628 CValue cval;
629 cval.i = 0;
630 vsetc(type, VT_CONST | VT_SYM, &cval);
631 vtop->sym = sym;
634 /* Return a static symbol pointing to a section */
635 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
637 int v;
638 Sym *sym;
640 v = anon_sym++;
641 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
642 sym->type.ref = type->ref;
643 sym->r = VT_CONST | VT_SYM;
644 put_extern_sym(sym, sec, offset, size);
645 return sym;
648 /* push a reference to a section offset by adding a dummy symbol */
649 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
651 vpushsym(type, get_sym_ref(type, sec, offset, size));
654 /* define a new external reference to a symbol 'v' of type 'u' */
655 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
663 s->type.ref = type->ref;
664 s->r = r | VT_CONST | VT_SYM;
666 return s;
669 /* define a new external reference to a symbol 'v' */
670 static Sym *external_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
678 s->type.t |= VT_EXTERN;
679 } else if (s->type.ref == func_old_type.ref) {
680 s->type.ref = type->ref;
681 s->r = r | VT_CONST | VT_SYM;
682 s->type.t |= VT_EXTERN;
683 } else if (!is_compatible_types(&s->type, type)) {
684 tcc_error("incompatible types for redefinition of '%s'",
685 get_tok_str(v, NULL));
687 /* Merge some storage attributes. */
688 if (type->t & VT_WEAK)
689 weaken_symbol(s);
691 if (type->t & VT_VIS_MASK)
692 apply_visibility(s, type);
694 return s;
697 /* push a reference to global symbol v */
698 ST_FUNC void vpush_global_sym(CType *type, int v)
700 vpushsym(type, external_global_sym(v, type, 0));
703 ST_FUNC void vset(CType *type, int r, long v)
705 CValue cval;
707 cval.i = v;
708 vsetc(type, r, &cval);
711 static void vseti(int r, int v)
713 CType type;
714 type.t = VT_INT;
715 type.ref = 0;
716 vset(&type, r, v);
719 ST_FUNC void vswap(void)
721 SValue tmp;
722 /* cannot let cpu flags if other instruction are generated. Also
723 avoid leaving VT_JMP anywhere except on the top of the stack
724 because it would complicate the code generator. */
725 if (vtop >= vstack) {
726 int v = vtop->r & VT_VALMASK;
727 if (v == VT_CMP || (v & ~1) == VT_JMP)
728 gv(RC_INT);
730 tmp = vtop[0];
731 vtop[0] = vtop[-1];
732 vtop[-1] = tmp;
734 /* XXX: +2% overall speed possible with optimized memswap
736 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
740 ST_FUNC void vpushv(SValue *v)
742 if (vtop >= vstack + (VSTACK_SIZE - 1))
743 tcc_error("memory full (vstack)");
744 vtop++;
745 *vtop = *v;
748 static void vdup(void)
750 vpushv(vtop);
753 /* save registers up to (vtop - n) stack entry */
754 ST_FUNC void save_regs(int n)
756 SValue *p, *p1;
757 for(p = vstack, p1 = vtop - n; p <= p1; p++)
758 save_reg(p->r);
761 /* save r to the memory stack, and mark it as being free */
762 ST_FUNC void save_reg(int r)
764 save_reg_upstack(r, 0);
767 /* save r to the memory stack, and mark it as being free,
768 if seen up to (vtop - n) stack entry */
769 ST_FUNC void save_reg_upstack(int r, int n)
771 int l, saved, size, align;
772 SValue *p, *p1, sv;
773 CType *type;
775 if ((r &= VT_VALMASK) >= VT_CONST)
776 return;
778 /* modify all stack values */
779 saved = 0;
780 l = 0;
781 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
782 if ((p->r & VT_VALMASK) == r ||
783 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
784 /* must save value on stack if not already done */
785 if (!saved) {
786 /* NOTE: must reload 'r' because r might be equal to r2 */
787 r = p->r & VT_VALMASK;
788 /* store register in the stack */
789 type = &p->type;
790 if ((p->r & VT_LVAL) ||
791 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
792 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
793 type = &char_pointer_type;
794 #else
795 type = &int_type;
796 #endif
797 size = type_size(type, &align);
798 loc = (loc - size) & -align;
799 sv.type.t = type->t;
800 sv.r = VT_LOCAL | VT_LVAL;
801 sv.c.i = loc;
802 store(r, &sv);
803 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
804 /* x86 specific: need to pop fp register ST0 if saved */
805 if (r == TREG_ST0) {
806 o(0xd8dd); /* fstp %st(0) */
808 #endif
809 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
810 /* special long long case */
811 if ((type->t & VT_BTYPE) == VT_LLONG) {
812 sv.c.i += 4;
813 store(p->r2, &sv);
815 #endif
816 l = loc;
817 saved = 1;
819 /* mark that stack entry as being saved on the stack */
820 if (p->r & VT_LVAL) {
821 /* also clear the bounded flag because the
822 relocation address of the function was stored in
823 p->c.i */
824 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
825 } else {
826 p->r = lvalue_type(p->type.t) | VT_LOCAL;
828 p->r2 = VT_CONST;
829 p->c.i = l;
834 #ifdef TCC_TARGET_ARM
835 /* find a register of class 'rc2' with at most one reference on stack.
836 * If none, call get_reg(rc) */
837 ST_FUNC int get_reg_ex(int rc, int rc2)
839 int r;
840 SValue *p;
842 for(r=0;r<NB_REGS;r++) {
843 if (reg_classes[r] & rc2) {
844 int n;
845 n=0;
846 for(p = vstack; p <= vtop; p++) {
847 if ((p->r & VT_VALMASK) == r ||
848 (p->r2 & VT_VALMASK) == r)
849 n++;
851 if (n <= 1)
852 return r;
855 return get_reg(rc);
857 #endif
859 /* find a free register of class 'rc'. If none, save one register */
860 ST_FUNC int get_reg(int rc)
862 int r;
863 SValue *p;
865 /* find a free register */
866 for(r=0;r<NB_REGS;r++) {
867 if (reg_classes[r] & rc) {
868 for(p=vstack;p<=vtop;p++) {
869 if ((p->r & VT_VALMASK) == r ||
870 (p->r2 & VT_VALMASK) == r)
871 goto notfound;
873 return r;
875 notfound: ;
878 /* no register left : free the first one on the stack (VERY
879 IMPORTANT to start from the bottom to ensure that we don't
880 spill registers used in gen_opi()) */
881 for(p=vstack;p<=vtop;p++) {
882 /* look at second register (if long long) */
883 r = p->r2 & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc))
885 goto save_found;
886 r = p->r & VT_VALMASK;
887 if (r < VT_CONST && (reg_classes[r] & rc)) {
888 save_found:
889 save_reg(r);
890 return r;
893 /* Should never comes here */
894 return -1;
897 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
898 if needed */
899 static void move_reg(int r, int s, int t)
901 SValue sv;
903 if (r != s) {
904 save_reg(r);
905 sv.type.t = t;
906 sv.type.ref = NULL;
907 sv.r = s;
908 sv.c.i = 0;
909 load(r, &sv);
913 /* get address of vtop (vtop MUST BE an lvalue) */
914 ST_FUNC void gaddrof(void)
916 if (vtop->r & VT_REF && !nocode_wanted)
917 gv(RC_INT);
918 vtop->r &= ~VT_LVAL;
919 /* tricky: if saved lvalue, then we can go back to lvalue */
920 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
921 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
926 #ifdef CONFIG_TCC_BCHECK
927 /* generate lvalue bound code */
928 static void gbound(void)
930 int lval_type;
931 CType type1;
933 vtop->r &= ~VT_MUSTBOUND;
934 /* if lvalue, then use checking code before dereferencing */
935 if (vtop->r & VT_LVAL) {
936 /* if not VT_BOUNDED value, then make one */
937 if (!(vtop->r & VT_BOUNDED)) {
938 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
939 /* must save type because we must set it to int to get pointer */
940 type1 = vtop->type;
941 vtop->type.t = VT_PTR;
942 gaddrof();
943 vpushi(0);
944 gen_bounded_ptr_add();
945 vtop->r |= lval_type;
946 vtop->type = type1;
948 /* then check for dereferencing */
949 gen_bounded_ptr_deref();
952 #endif
954 /* store vtop a register belonging to class 'rc'. lvalues are
955 converted to values. Cannot be used if cannot be converted to
956 register value (such as structures). */
957 ST_FUNC int gv(int rc)
959 int r, bit_pos, bit_size, size, align, i;
960 int rc2;
962 /* NOTE: get_reg can modify vstack[] */
963 if (vtop->type.t & VT_BITFIELD) {
964 CType type;
965 int bits = 32;
966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
968 /* remove bit field info to avoid loops */
969 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
970 /* cast to int to propagate signedness in following ops */
971 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
972 type.t = VT_LLONG;
973 bits = 64;
974 } else
975 type.t = VT_INT;
976 if((vtop->type.t & VT_UNSIGNED) ||
977 (vtop->type.t & VT_BTYPE) == VT_BOOL)
978 type.t |= VT_UNSIGNED;
979 gen_cast(&type);
980 /* generate shifts */
981 vpushi(bits - (bit_pos + bit_size));
982 gen_op(TOK_SHL);
983 vpushi(bits - bit_size);
984 /* NOTE: transformed to SHR if unsigned */
985 gen_op(TOK_SAR);
986 r = gv(rc);
987 } else {
988 if (is_float(vtop->type.t) &&
989 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
990 Sym *sym;
991 int *ptr;
992 unsigned long offset;
993 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
994 CValue check;
995 #endif
997 /* XXX: unify with initializers handling ? */
998 /* CPUs usually cannot use float constants, so we store them
999 generically in data segment */
1000 size = type_size(&vtop->type, &align);
1001 offset = (data_section->data_offset + align - 1) & -align;
1002 data_section->data_offset = offset;
1003 /* XXX: not portable yet */
1004 #if defined(__i386__) || defined(__x86_64__)
1005 /* Zero pad x87 tenbyte long doubles */
1006 if (size == LDOUBLE_SIZE) {
1007 vtop->c.tab[2] &= 0xffff;
1008 #if LDOUBLE_SIZE == 16
1009 vtop->c.tab[3] = 0;
1010 #endif
1012 #endif
1013 ptr = section_ptr_add(data_section, size);
1014 size = size >> 2;
1015 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1016 check.d = 1;
1017 if(check.tab[0])
1018 for(i=0;i<size;i++)
1019 ptr[i] = vtop->c.tab[size-1-i];
1020 else
1021 #endif
1022 for(i=0;i<size;i++)
1023 ptr[i] = vtop->c.tab[i];
1024 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1025 vtop->r |= VT_LVAL | VT_SYM;
1026 vtop->sym = sym;
1027 vtop->c.i = 0;
1029 #ifdef CONFIG_TCC_BCHECK
1030 if (vtop->r & VT_MUSTBOUND)
1031 gbound();
1032 #endif
1034 r = vtop->r & VT_VALMASK;
1035 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1036 #ifndef TCC_TARGET_ARM64
1037 if (rc == RC_IRET)
1038 rc2 = RC_LRET;
1039 #ifdef TCC_TARGET_X86_64
1040 else if (rc == RC_FRET)
1041 rc2 = RC_QRET;
1042 #endif
1043 #endif
1045 /* need to reload if:
1046 - constant
1047 - lvalue (need to dereference pointer)
1048 - already a register, but not in the right class */
1049 if (r >= VT_CONST
1050 || (vtop->r & VT_LVAL)
1051 || !(reg_classes[r] & rc)
1052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1053 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1054 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1055 #else
1056 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1057 #endif
1060 r = get_reg(rc);
1061 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1062 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1063 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1064 #else
1065 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1066 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1067 unsigned long long ll;
1068 #endif
1069 int r2, original_type;
1070 original_type = vtop->type.t;
1071 /* two register type load : expand to two words
1072 temporarily */
1073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1074 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1075 /* load constant */
1076 ll = vtop->c.i;
1077 vtop->c.i = ll; /* first word */
1078 load(r, vtop);
1079 vtop->r = r; /* save register value */
1080 vpushi(ll >> 32); /* second word */
1081 } else
1082 #endif
1083 if (vtop->r & VT_LVAL) {
1084 /* We do not want to modifier the long long
1085 pointer here, so the safest (and less
1086 efficient) is to save all the other registers
1087 in the stack. XXX: totally inefficient. */
1088 #if 0
1089 save_regs(1);
1090 #else
1091 /* lvalue_save: save only if used further down the stack */
1092 save_reg_upstack(vtop->r, 1);
1093 #endif
1094 /* load from memory */
1095 vtop->type.t = load_type;
1096 load(r, vtop);
1097 vdup();
1098 vtop[-1].r = r; /* save register value */
1099 /* increment pointer to get second word */
1100 vtop->type.t = addr_type;
1101 gaddrof();
1102 vpushi(load_size);
1103 gen_op('+');
1104 vtop->r |= VT_LVAL;
1105 vtop->type.t = load_type;
1106 } else {
1107 /* move registers */
1108 load(r, vtop);
1109 vdup();
1110 vtop[-1].r = r; /* save register value */
1111 vtop->r = vtop[-1].r2;
1113 /* Allocate second register. Here we rely on the fact that
1114 get_reg() tries first to free r2 of an SValue. */
1115 r2 = get_reg(rc2);
1116 load(r2, vtop);
1117 vpop();
1118 /* write second register */
1119 vtop->r2 = r2;
1120 vtop->type.t = original_type;
1121 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1122 int t1, t;
1123 /* lvalue of scalar type : need to use lvalue type
1124 because of possible cast */
1125 t = vtop->type.t;
1126 t1 = t;
1127 /* compute memory access type */
1128 if (vtop->r & VT_REF)
1129 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1130 t = VT_PTR;
1131 #else
1132 t = VT_INT;
1133 #endif
1134 else if (vtop->r & VT_LVAL_BYTE)
1135 t = VT_BYTE;
1136 else if (vtop->r & VT_LVAL_SHORT)
1137 t = VT_SHORT;
1138 if (vtop->r & VT_LVAL_UNSIGNED)
1139 t |= VT_UNSIGNED;
1140 vtop->type.t = t;
1141 load(r, vtop);
1142 /* restore wanted type */
1143 vtop->type.t = t1;
1144 } else {
1145 /* one register type load */
1146 load(r, vtop);
1149 vtop->r = r;
1150 #ifdef TCC_TARGET_C67
1151 /* uses register pairs for doubles */
1152 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1153 vtop->r2 = r+1;
1154 #endif
1156 return r;
1159 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1160 ST_FUNC void gv2(int rc1, int rc2)
1162 int v;
1164 /* generate more generic register first. But VT_JMP or VT_CMP
1165 values must be generated first in all cases to avoid possible
1166 reload errors */
1167 v = vtop[0].r & VT_VALMASK;
1168 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1169 vswap();
1170 gv(rc1);
1171 vswap();
1172 gv(rc2);
1173 /* test if reload is needed for first register */
1174 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1175 vswap();
1176 gv(rc1);
1177 vswap();
1179 } else {
1180 gv(rc2);
1181 vswap();
1182 gv(rc1);
1183 vswap();
1184 /* test if reload is needed for first register */
1185 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1186 gv(rc2);
1191 #ifndef TCC_TARGET_ARM64
1192 /* wrapper around RC_FRET to return a register by type */
1193 static int rc_fret(int t)
1195 #ifdef TCC_TARGET_X86_64
1196 if (t == VT_LDOUBLE) {
1197 return RC_ST0;
1199 #endif
1200 return RC_FRET;
1202 #endif
1204 /* wrapper around REG_FRET to return a register by type */
1205 static int reg_fret(int t)
1207 #ifdef TCC_TARGET_X86_64
1208 if (t == VT_LDOUBLE) {
1209 return TREG_ST0;
1211 #endif
1212 return REG_FRET;
1215 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1216 /* expand 64bit on stack in two ints */
1217 static void lexpand(void)
1219 int u, v;
1220 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1221 v = vtop->r & (VT_VALMASK | VT_LVAL);
1222 if (v == VT_CONST) {
1223 vdup();
1224 vtop[0].c.i >>= 32;
1225 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1226 vdup();
1227 vtop[0].c.i += 4;
1228 } else {
1229 gv(RC_INT);
1230 vdup();
1231 vtop[0].r = vtop[-1].r2;
1232 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1234 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1236 #endif
1238 #ifdef TCC_TARGET_ARM
1239 /* expand long long on stack */
1240 ST_FUNC void lexpand_nr(void)
1242 int u,v;
1244 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1245 vdup();
1246 vtop->r2 = VT_CONST;
1247 vtop->type.t = VT_INT | u;
1248 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1249 if (v == VT_CONST) {
1250 vtop[-1].c.i = vtop->c.i;
1251 vtop->c.i = vtop->c.i >> 32;
1252 vtop->r = VT_CONST;
1253 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1254 vtop->c.i += 4;
1255 vtop->r = vtop[-1].r;
1256 } else if (v > VT_CONST) {
1257 vtop--;
1258 lexpand();
1259 } else
1260 vtop->r = vtop[-1].r2;
1261 vtop[-1].r2 = VT_CONST;
1262 vtop[-1].type.t = VT_INT | u;
1264 #endif
1266 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1267 /* build a long long from two ints */
1268 static void lbuild(int t)
1270 gv2(RC_INT, RC_INT);
1271 vtop[-1].r2 = vtop[0].r;
1272 vtop[-1].type.t = t;
1273 vpop();
1275 #endif
1277 /* rotate n first stack elements to the bottom
1278 I1 ... In -> I2 ... In I1 [top is right]
1280 ST_FUNC void vrotb(int n)
1282 int i;
1283 SValue tmp;
1285 tmp = vtop[-n + 1];
1286 for(i=-n+1;i!=0;i++)
1287 vtop[i] = vtop[i+1];
1288 vtop[0] = tmp;
1291 /* rotate the n elements before entry e towards the top
1292 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1294 ST_FUNC void vrote(SValue *e, int n)
1296 int i;
1297 SValue tmp;
1299 tmp = *e;
1300 for(i = 0;i < n - 1; i++)
1301 e[-i] = e[-i - 1];
1302 e[-n + 1] = tmp;
1305 /* rotate n first stack elements to the top
1306 I1 ... In -> In I1 ... I(n-1) [top is right]
1308 ST_FUNC void vrott(int n)
1310 vrote(vtop, n);
1313 /* pop stack value */
1314 ST_FUNC void vpop(void)
1316 int v;
1317 v = vtop->r & VT_VALMASK;
1318 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1319 /* for x86, we need to pop the FP stack */
1320 if (v == TREG_ST0 && !nocode_wanted) {
1321 o(0xd8dd); /* fstp %st(0) */
1322 } else
1323 #endif
1324 if (v == VT_JMP || v == VT_JMPI) {
1325 /* need to put correct jump if && or || without test */
1326 gsym(vtop->c.i);
1328 vtop--;
1331 /* convert stack entry to register and duplicate its value in another
1332 register */
1333 static void gv_dup(void)
1335 int rc, t, r, r1;
1336 SValue sv;
1338 t = vtop->type.t;
1339 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1340 if ((t & VT_BTYPE) == VT_LLONG) {
1341 lexpand();
1342 gv_dup();
1343 vswap();
1344 vrotb(3);
1345 gv_dup();
1346 vrotb(4);
1347 /* stack: H L L1 H1 */
1348 lbuild(t);
1349 vrotb(3);
1350 vrotb(3);
1351 vswap();
1352 lbuild(t);
1353 vswap();
1354 } else
1355 #endif
1357 /* duplicate value */
1358 rc = RC_INT;
1359 sv.type.t = VT_INT;
1360 if (is_float(t)) {
1361 rc = RC_FLOAT;
1362 #ifdef TCC_TARGET_X86_64
1363 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1364 rc = RC_ST0;
1366 #endif
1367 sv.type.t = t;
1369 r = gv(rc);
1370 r1 = get_reg(rc);
1371 sv.r = r;
1372 sv.c.i = 0;
1373 load(r1, &sv); /* move r to r1 */
1374 vdup();
1375 /* duplicates value */
1376 if (r != r1)
1377 vtop->r = r1;
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 ST_FUNC int gvtst(int inv, int t)
1386 int v = vtop->r & VT_VALMASK;
1387 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1388 vpushi(0);
1389 gen_op(TOK_NE);
1391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1392 /* constant jmp optimization */
1393 if ((vtop->c.i != 0) != inv)
1394 t = gjmp(t);
1395 vtop--;
1396 return t;
1398 return gtst(inv, t);
1401 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1402 /* generate CPU independent (unsigned) long long operations */
1403 static void gen_opl(int op)
1405 int t, a, b, op1, c, i;
1406 int func;
1407 unsigned short reg_iret = REG_IRET;
1408 unsigned short reg_lret = REG_LRET;
1409 SValue tmp;
1411 switch(op) {
1412 case '/':
1413 case TOK_PDIV:
1414 func = TOK___divdi3;
1415 goto gen_func;
1416 case TOK_UDIV:
1417 func = TOK___udivdi3;
1418 goto gen_func;
1419 case '%':
1420 func = TOK___moddi3;
1421 goto gen_mod_func;
1422 case TOK_UMOD:
1423 func = TOK___umoddi3;
1424 gen_mod_func:
1425 #ifdef TCC_ARM_EABI
1426 reg_iret = TREG_R2;
1427 reg_lret = TREG_R3;
1428 #endif
1429 gen_func:
1430 /* call generic long long function */
1431 vpush_global_sym(&func_old_type, func);
1432 vrott(3);
1433 gfunc_call(2);
1434 vpushi(0);
1435 vtop->r = reg_iret;
1436 vtop->r2 = reg_lret;
1437 break;
1438 case '^':
1439 case '&':
1440 case '|':
1441 case '*':
1442 case '+':
1443 case '-':
1444 //pv("gen_opl A",0,2);
1445 t = vtop->type.t;
1446 vswap();
1447 lexpand();
1448 vrotb(3);
1449 lexpand();
1450 /* stack: L1 H1 L2 H2 */
1451 tmp = vtop[0];
1452 vtop[0] = vtop[-3];
1453 vtop[-3] = tmp;
1454 tmp = vtop[-2];
1455 vtop[-2] = vtop[-3];
1456 vtop[-3] = tmp;
1457 vswap();
1458 /* stack: H1 H2 L1 L2 */
1459 //pv("gen_opl B",0,4);
1460 if (op == '*') {
1461 vpushv(vtop - 1);
1462 vpushv(vtop - 1);
1463 gen_op(TOK_UMULL);
1464 lexpand();
1465 /* stack: H1 H2 L1 L2 ML MH */
1466 for(i=0;i<4;i++)
1467 vrotb(6);
1468 /* stack: ML MH H1 H2 L1 L2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-2];
1471 vtop[-2] = tmp;
1472 /* stack: ML MH H1 L2 H2 L1 */
1473 gen_op('*');
1474 vrotb(3);
1475 vrotb(3);
1476 gen_op('*');
1477 /* stack: ML MH M1 M2 */
1478 gen_op('+');
1479 gen_op('+');
1480 } else if (op == '+' || op == '-') {
1481 /* XXX: add non carry method too (for MIPS or alpha) */
1482 if (op == '+')
1483 op1 = TOK_ADDC1;
1484 else
1485 op1 = TOK_SUBC1;
1486 gen_op(op1);
1487 /* stack: H1 H2 (L1 op L2) */
1488 vrotb(3);
1489 vrotb(3);
1490 gen_op(op1 + 1); /* TOK_xxxC2 */
1491 } else {
1492 gen_op(op);
1493 /* stack: H1 H2 (L1 op L2) */
1494 vrotb(3);
1495 vrotb(3);
1496 /* stack: (L1 op L2) H1 H2 */
1497 gen_op(op);
1498 /* stack: (L1 op L2) (H1 op H2) */
1500 /* stack: L H */
1501 lbuild(t);
1502 break;
1503 case TOK_SAR:
1504 case TOK_SHR:
1505 case TOK_SHL:
1506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1507 t = vtop[-1].type.t;
1508 vswap();
1509 lexpand();
1510 vrotb(3);
1511 /* stack: L H shift */
1512 c = (int)vtop->c.i;
1513 /* constant: simpler */
1514 /* NOTE: all comments are for SHL. the other cases are
1515 done by swaping words */
1516 vpop();
1517 if (op != TOK_SHL)
1518 vswap();
1519 if (c >= 32) {
1520 /* stack: L H */
1521 vpop();
1522 if (c > 32) {
1523 vpushi(c - 32);
1524 gen_op(op);
1526 if (op != TOK_SAR) {
1527 vpushi(0);
1528 } else {
1529 gv_dup();
1530 vpushi(31);
1531 gen_op(TOK_SAR);
1533 vswap();
1534 } else {
1535 vswap();
1536 gv_dup();
1537 /* stack: H L L */
1538 vpushi(c);
1539 gen_op(op);
1540 vswap();
1541 vpushi(32 - c);
1542 if (op == TOK_SHL)
1543 gen_op(TOK_SHR);
1544 else
1545 gen_op(TOK_SHL);
1546 vrotb(3);
1547 /* stack: L L H */
1548 vpushi(c);
1549 if (op == TOK_SHL)
1550 gen_op(TOK_SHL);
1551 else
1552 gen_op(TOK_SHR);
1553 gen_op('|');
1555 if (op != TOK_SHL)
1556 vswap();
1557 lbuild(t);
1558 } else {
1559 /* XXX: should provide a faster fallback on x86 ? */
1560 switch(op) {
1561 case TOK_SAR:
1562 func = TOK___ashrdi3;
1563 goto gen_func;
1564 case TOK_SHR:
1565 func = TOK___lshrdi3;
1566 goto gen_func;
1567 case TOK_SHL:
1568 func = TOK___ashldi3;
1569 goto gen_func;
1572 break;
1573 default:
1574 /* compare operations */
1575 t = vtop->type.t;
1576 vswap();
1577 lexpand();
1578 vrotb(3);
1579 lexpand();
1580 /* stack: L1 H1 L2 H2 */
1581 tmp = vtop[-1];
1582 vtop[-1] = vtop[-2];
1583 vtop[-2] = tmp;
1584 /* stack: L1 L2 H1 H2 */
1585 /* compare high */
1586 op1 = op;
1587 /* when values are equal, we need to compare low words. since
1588 the jump is inverted, we invert the test too. */
1589 if (op1 == TOK_LT)
1590 op1 = TOK_LE;
1591 else if (op1 == TOK_GT)
1592 op1 = TOK_GE;
1593 else if (op1 == TOK_ULT)
1594 op1 = TOK_ULE;
1595 else if (op1 == TOK_UGT)
1596 op1 = TOK_UGE;
1597 a = 0;
1598 b = 0;
1599 gen_op(op1);
1600 if (op1 != TOK_NE) {
1601 a = gvtst(1, 0);
1603 if (op != TOK_EQ) {
1604 /* generate non equal test */
1605 /* XXX: NOT PORTABLE yet */
1606 if (a == 0) {
1607 b = gvtst(0, 0);
1608 } else {
1609 #if defined(TCC_TARGET_I386)
1610 b = psym(0x850f, 0);
1611 #elif defined(TCC_TARGET_ARM)
1612 b = ind;
1613 o(0x1A000000 | encbranch(ind, 0, 1));
1614 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1615 tcc_error("not implemented");
1616 #else
1617 #error not supported
1618 #endif
1621 /* compare low. Always unsigned */
1622 op1 = op;
1623 if (op1 == TOK_LT)
1624 op1 = TOK_ULT;
1625 else if (op1 == TOK_LE)
1626 op1 = TOK_ULE;
1627 else if (op1 == TOK_GT)
1628 op1 = TOK_UGT;
1629 else if (op1 == TOK_GE)
1630 op1 = TOK_UGE;
1631 gen_op(op1);
1632 a = gvtst(1, a);
1633 gsym(b);
1634 vseti(VT_JMPI, a);
1635 break;
1638 #endif
1640 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1642 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1643 return (a ^ b) >> 63 ? -x : x;
1646 static int gen_opic_lt(uint64_t a, uint64_t b)
1648 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1651 /* handle integer constant optimizations and various machine
1652 independent opt */
1653 static void gen_opic(int op)
1655 SValue *v1 = vtop - 1;
1656 SValue *v2 = vtop;
1657 int t1 = v1->type.t & VT_BTYPE;
1658 int t2 = v2->type.t & VT_BTYPE;
1659 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1661 uint64_t l1 = c1 ? v1->c.i : 0;
1662 uint64_t l2 = c2 ? v2->c.i : 0;
1663 int shm = (t1 == VT_LLONG) ? 63 : 31;
1665 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1666 l1 = ((uint32_t)l1 |
1667 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1668 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1669 l2 = ((uint32_t)l2 |
1670 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1672 if (c1 && c2) {
1673 switch(op) {
1674 case '+': l1 += l2; break;
1675 case '-': l1 -= l2; break;
1676 case '&': l1 &= l2; break;
1677 case '^': l1 ^= l2; break;
1678 case '|': l1 |= l2; break;
1679 case '*': l1 *= l2; break;
1681 case TOK_PDIV:
1682 case '/':
1683 case '%':
1684 case TOK_UDIV:
1685 case TOK_UMOD:
1686 /* if division by zero, generate explicit division */
1687 if (l2 == 0) {
1688 if (const_wanted)
1689 tcc_error("division by zero in constant");
1690 goto general_case;
1692 switch(op) {
1693 default: l1 = gen_opic_sdiv(l1, l2); break;
1694 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1695 case TOK_UDIV: l1 = l1 / l2; break;
1696 case TOK_UMOD: l1 = l1 % l2; break;
1698 break;
1699 case TOK_SHL: l1 <<= (l2 & shm); break;
1700 case TOK_SHR: l1 >>= (l2 & shm); break;
1701 case TOK_SAR:
1702 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1703 break;
1704 /* tests */
1705 case TOK_ULT: l1 = l1 < l2; break;
1706 case TOK_UGE: l1 = l1 >= l2; break;
1707 case TOK_EQ: l1 = l1 == l2; break;
1708 case TOK_NE: l1 = l1 != l2; break;
1709 case TOK_ULE: l1 = l1 <= l2; break;
1710 case TOK_UGT: l1 = l1 > l2; break;
1711 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1712 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1713 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1714 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1715 /* logical */
1716 case TOK_LAND: l1 = l1 && l2; break;
1717 case TOK_LOR: l1 = l1 || l2; break;
1718 default:
1719 goto general_case;
1721 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1722 l1 = ((uint32_t)l1 |
1723 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1724 v1->c.i = l1;
1725 vtop--;
1726 } else {
1727 /* if commutative ops, put c2 as constant */
1728 if (c1 && (op == '+' || op == '&' || op == '^' ||
1729 op == '|' || op == '*')) {
1730 vswap();
1731 c2 = c1; //c = c1, c1 = c2, c2 = c;
1732 l2 = l1; //l = l1, l1 = l2, l2 = l;
1734 if (!const_wanted &&
1735 c1 && ((l1 == 0 &&
1736 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1737 (l1 == -1 && op == TOK_SAR))) {
1738 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1739 vtop--;
1740 } else if (!const_wanted &&
1741 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1742 (l2 == -1 && op == '|') ||
1743 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1744 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1745 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1746 if (l2 == 1)
1747 vtop->c.i = 0;
1748 vswap();
1749 vtop--;
1750 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1751 op == TOK_PDIV) &&
1752 l2 == 1) ||
1753 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1754 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1755 l2 == 0) ||
1756 (op == '&' &&
1757 l2 == -1))) {
1758 /* filter out NOP operations like x*1, x-0, x&-1... */
1759 vtop--;
1760 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1761 /* try to use shifts instead of muls or divs */
1762 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1763 int n = -1;
1764 while (l2) {
1765 l2 >>= 1;
1766 n++;
1768 vtop->c.i = n;
1769 if (op == '*')
1770 op = TOK_SHL;
1771 else if (op == TOK_PDIV)
1772 op = TOK_SAR;
1773 else
1774 op = TOK_SHR;
1776 goto general_case;
1777 } else if (c2 && (op == '+' || op == '-') &&
1778 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1779 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1780 /* symbol + constant case */
1781 if (op == '-')
1782 l2 = -l2;
1783 l2 += vtop[-1].c.i;
1784 /* The backends can't always deal with addends to symbols
1785 larger than +-1<<31. Don't construct such. */
1786 if ((int)l2 != l2)
1787 goto general_case;
1788 vtop--;
1789 vtop->c.i = l2;
1790 } else {
1791 general_case:
1792 if (!nocode_wanted) {
1793 /* call low level op generator */
1794 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1795 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1796 gen_opl(op);
1797 else
1798 gen_opi(op);
1799 } else {
1800 vtop--;
1801 /* Ensure vtop isn't marked VT_CONST in case something
1802 up our callchain is interested in const-ness of the
1803 expression. Also make it a non-LVAL if it was,
1804 so that further code can't accidentally generate
1805 a deref (happen only for buggy uses of e.g.
1806 gv() under nocode_wanted). */
1807 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 if (!nocode_wanted) {
1870 gen_opf(op);
1871 } else {
1872 vtop--;
1877 static int pointed_size(CType *type)
1879 int align;
1880 return type_size(pointed_type(type), &align);
1883 static void vla_runtime_pointed_size(CType *type)
1885 int align;
1886 vla_runtime_type_size(pointed_type(type), &align);
1889 static inline int is_null_pointer(SValue *p)
1891 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1892 return 0;
1893 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1894 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1895 ((p->type.t & VT_BTYPE) == VT_PTR &&
1896 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1899 static inline int is_integer_btype(int bt)
1901 return (bt == VT_BYTE || bt == VT_SHORT ||
1902 bt == VT_INT || bt == VT_LLONG);
1905 /* check types for comparison or subtraction of pointers */
1906 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1908 CType *type1, *type2, tmp_type1, tmp_type2;
1909 int bt1, bt2;
1911 /* null pointers are accepted for all comparisons as gcc */
1912 if (is_null_pointer(p1) || is_null_pointer(p2))
1913 return;
1914 type1 = &p1->type;
1915 type2 = &p2->type;
1916 bt1 = type1->t & VT_BTYPE;
1917 bt2 = type2->t & VT_BTYPE;
1918 /* accept comparison between pointer and integer with a warning */
1919 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1920 if (op != TOK_LOR && op != TOK_LAND )
1921 tcc_warning("comparison between pointer and integer");
1922 return;
1925 /* both must be pointers or implicit function pointers */
1926 if (bt1 == VT_PTR) {
1927 type1 = pointed_type(type1);
1928 } else if (bt1 != VT_FUNC)
1929 goto invalid_operands;
1931 if (bt2 == VT_PTR) {
1932 type2 = pointed_type(type2);
1933 } else if (bt2 != VT_FUNC) {
1934 invalid_operands:
1935 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1937 if ((type1->t & VT_BTYPE) == VT_VOID ||
1938 (type2->t & VT_BTYPE) == VT_VOID)
1939 return;
1940 tmp_type1 = *type1;
1941 tmp_type2 = *type2;
1942 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1943 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1944 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1945 /* gcc-like error if '-' is used */
1946 if (op == '-')
1947 goto invalid_operands;
1948 else
1949 tcc_warning("comparison of distinct pointer types lacks a cast");
1953 /* generic gen_op: handles types problems */
1954 ST_FUNC void gen_op(int op)
1956 int u, t1, t2, bt1, bt2, t;
1957 CType type1;
1959 redo:
1960 t1 = vtop[-1].type.t;
1961 t2 = vtop[0].type.t;
1962 bt1 = t1 & VT_BTYPE;
1963 bt2 = t2 & VT_BTYPE;
1965 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1966 tcc_error("operation on a struct");
1967 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1968 if (bt2 == VT_FUNC) {
1969 mk_pointer(&vtop->type);
1970 gaddrof();
1972 if (bt1 == VT_FUNC) {
1973 vswap();
1974 mk_pointer(&vtop->type);
1975 gaddrof();
1976 vswap();
1978 goto redo;
1979 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1980 /* at least one operand is a pointer */
1981 /* relationnal op: must be both pointers */
1982 if (op >= TOK_ULT && op <= TOK_LOR) {
1983 check_comparison_pointer_types(vtop - 1, vtop, op);
1984 /* pointers are handled are unsigned */
1985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1986 t = VT_LLONG | VT_UNSIGNED;
1987 #else
1988 t = VT_INT | VT_UNSIGNED;
1989 #endif
1990 goto std_op;
1992 /* if both pointers, then it must be the '-' op */
1993 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1994 if (op != '-')
1995 tcc_error("cannot use pointers here");
1996 check_comparison_pointer_types(vtop - 1, vtop, op);
1997 /* XXX: check that types are compatible */
1998 if (vtop[-1].type.t & VT_VLA) {
1999 vla_runtime_pointed_size(&vtop[-1].type);
2000 } else {
2001 vpushi(pointed_size(&vtop[-1].type));
2003 vrott(3);
2004 gen_opic(op);
2005 /* set to integer type */
2006 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2007 vtop->type.t = VT_LLONG;
2008 #else
2009 vtop->type.t = VT_INT;
2010 #endif
2011 vswap();
2012 gen_op(TOK_PDIV);
2013 } else {
2014 /* exactly one pointer : must be '+' or '-'. */
2015 if (op != '-' && op != '+')
2016 tcc_error("cannot use pointers here");
2017 /* Put pointer as first operand */
2018 if (bt2 == VT_PTR) {
2019 vswap();
2020 swap(&t1, &t2);
2022 #if PTR_SIZE == 4
2023 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2024 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2025 gen_cast(&int_type);
2026 #endif
2027 type1 = vtop[-1].type;
2028 type1.t &= ~VT_ARRAY;
2029 if (vtop[-1].type.t & VT_VLA)
2030 vla_runtime_pointed_size(&vtop[-1].type);
2031 else {
2032 u = pointed_size(&vtop[-1].type);
2033 if (u < 0)
2034 tcc_error("unknown array element size");
2035 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2036 vpushll(u);
2037 #else
2038 /* XXX: cast to int ? (long long case) */
2039 vpushi(u);
2040 #endif
2042 gen_op('*');
2043 #if 0
2044 /* #ifdef CONFIG_TCC_BCHECK
2045 The main reason to removing this code:
2046 #include <stdio.h>
2047 int main ()
2049 int v[10];
2050 int i = 10;
2051 int j = 9;
2052 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2053 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2055 When this code is on. then the output looks like
2056 v+i-j = 0xfffffffe
2057 v+(i-j) = 0xbff84000
2059 /* if evaluating constant expression, no code should be
2060 generated, so no bound check */
2061 if (tcc_state->do_bounds_check && !const_wanted) {
2062 /* if bounded pointers, we generate a special code to
2063 test bounds */
2064 if (op == '-') {
2065 vpushi(0);
2066 vswap();
2067 gen_op('-');
2069 gen_bounded_ptr_add();
2070 } else
2071 #endif
2073 gen_opic(op);
2075 /* put again type if gen_opic() swaped operands */
2076 vtop->type = type1;
2078 } else if (is_float(bt1) || is_float(bt2)) {
2079 /* compute bigger type and do implicit casts */
2080 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2081 t = VT_LDOUBLE;
2082 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2083 t = VT_DOUBLE;
2084 } else {
2085 t = VT_FLOAT;
2087 /* floats can only be used for a few operations */
2088 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2089 (op < TOK_ULT || op > TOK_GT))
2090 tcc_error("invalid operands for binary operation");
2091 goto std_op;
2092 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2093 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2095 t |= VT_UNSIGNED;
2096 goto std_op;
2097 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2098 /* cast to biggest op */
2099 t = VT_LLONG;
2100 /* convert to unsigned if it does not fit in a long long */
2101 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2102 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2103 t |= VT_UNSIGNED;
2104 goto std_op;
2105 } else {
2106 /* integer operations */
2107 t = VT_INT;
2108 /* convert to unsigned if it does not fit in an integer */
2109 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2110 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2111 t |= VT_UNSIGNED;
2112 std_op:
2113 /* XXX: currently, some unsigned operations are explicit, so
2114 we modify them here */
2115 if (t & VT_UNSIGNED) {
2116 if (op == TOK_SAR)
2117 op = TOK_SHR;
2118 else if (op == '/')
2119 op = TOK_UDIV;
2120 else if (op == '%')
2121 op = TOK_UMOD;
2122 else if (op == TOK_LT)
2123 op = TOK_ULT;
2124 else if (op == TOK_GT)
2125 op = TOK_UGT;
2126 else if (op == TOK_LE)
2127 op = TOK_ULE;
2128 else if (op == TOK_GE)
2129 op = TOK_UGE;
2131 vswap();
2132 type1.t = t;
2133 gen_cast(&type1);
2134 vswap();
2135 /* special case for shifts and long long: we keep the shift as
2136 an integer */
2137 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2138 type1.t = VT_INT;
2139 gen_cast(&type1);
2140 if (is_float(t))
2141 gen_opif(op);
2142 else
2143 gen_opic(op);
2144 if (op >= TOK_ULT && op <= TOK_GT) {
2145 /* relationnal op: the result is an int */
2146 vtop->type.t = VT_INT;
2147 } else {
2148 vtop->type.t = t;
2151 // Make sure that we have converted to an rvalue:
2152 if (vtop->r & VT_LVAL && !nocode_wanted)
2153 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2156 #ifndef TCC_TARGET_ARM
2157 /* generic itof for unsigned long long case */
2158 static void gen_cvt_itof1(int t)
2160 #ifdef TCC_TARGET_ARM64
2161 gen_cvt_itof(t);
2162 #else
2163 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2164 (VT_LLONG | VT_UNSIGNED)) {
2166 if (t == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___floatundisf);
2168 #if LDOUBLE_SIZE != 8
2169 else if (t == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___floatundixf);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___floatundidf);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = reg_fret(t);
2178 } else {
2179 gen_cvt_itof(t);
2181 #endif
2183 #endif
2185 /* generic ftoi for unsigned long long case */
2186 static void gen_cvt_ftoi1(int t)
2188 #ifdef TCC_TARGET_ARM64
2189 gen_cvt_ftoi(t);
2190 #else
2191 int st;
2193 if (t == (VT_LLONG | VT_UNSIGNED)) {
2194 /* not handled natively */
2195 st = vtop->type.t & VT_BTYPE;
2196 if (st == VT_FLOAT)
2197 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2198 #if LDOUBLE_SIZE != 8
2199 else if (st == VT_LDOUBLE)
2200 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2201 #endif
2202 else
2203 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2204 vrott(2);
2205 gfunc_call(1);
2206 vpushi(0);
2207 vtop->r = REG_IRET;
2208 vtop->r2 = REG_LRET;
2209 } else {
2210 gen_cvt_ftoi(t);
2212 #endif
2215 /* force char or short cast */
2216 static void force_charshort_cast(int t)
2218 int bits, dbt;
2219 dbt = t & VT_BTYPE;
2220 /* XXX: add optimization if lvalue : just change type and offset */
2221 if (dbt == VT_BYTE)
2222 bits = 8;
2223 else
2224 bits = 16;
2225 if (t & VT_UNSIGNED) {
2226 vpushi((1 << bits) - 1);
2227 gen_op('&');
2228 } else {
2229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2230 bits = 64 - bits;
2231 else
2232 bits = 32 - bits;
2233 vpushi(bits);
2234 gen_op(TOK_SHL);
2235 /* result must be signed or the SAR is converted to an SHL
2236 This was not the case when "t" was a signed short
2237 and the last value on the stack was an unsigned int */
2238 vtop->type.t &= ~VT_UNSIGNED;
2239 vpushi(bits);
2240 gen_op(TOK_SAR);
2244 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2245 static void gen_cast(CType *type)
2247 int sbt, dbt, sf, df, c, p;
2249 /* special delayed cast for char/short */
2250 /* XXX: in some cases (multiple cascaded casts), it may still
2251 be incorrect */
2252 if (vtop->r & VT_MUSTCAST) {
2253 vtop->r &= ~VT_MUSTCAST;
2254 force_charshort_cast(vtop->type.t);
2257 /* bitfields first get cast to ints */
2258 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2259 gv(RC_INT);
2262 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2263 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2265 if (sbt != dbt) {
2266 sf = is_float(sbt);
2267 df = is_float(dbt);
2268 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2269 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2270 if (c) {
2271 /* constant case: we can do it now */
2272 /* XXX: in ISOC, cannot do it if error in convert */
2273 if (sbt == VT_FLOAT)
2274 vtop->c.ld = vtop->c.f;
2275 else if (sbt == VT_DOUBLE)
2276 vtop->c.ld = vtop->c.d;
2278 if (df) {
2279 if ((sbt & VT_BTYPE) == VT_LLONG) {
2280 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2281 vtop->c.ld = vtop->c.i;
2282 else
2283 vtop->c.ld = -(long double)-vtop->c.i;
2284 } else if(!sf) {
2285 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2286 vtop->c.ld = (uint32_t)vtop->c.i;
2287 else
2288 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2291 if (dbt == VT_FLOAT)
2292 vtop->c.f = (float)vtop->c.ld;
2293 else if (dbt == VT_DOUBLE)
2294 vtop->c.d = (double)vtop->c.ld;
2295 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2296 vtop->c.i = vtop->c.ld;
2297 } else if (sf && dbt == VT_BOOL) {
2298 vtop->c.i = (vtop->c.ld != 0);
2299 } else {
2300 if(sf)
2301 vtop->c.i = vtop->c.ld;
2302 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2304 else if (sbt & VT_UNSIGNED)
2305 vtop->c.i = (uint32_t)vtop->c.i;
2306 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2307 else if (sbt == VT_PTR)
2309 #endif
2310 else if (sbt != VT_LLONG)
2311 vtop->c.i = ((uint32_t)vtop->c.i |
2312 -(vtop->c.i & 0x80000000));
2314 if (dbt == (VT_LLONG|VT_UNSIGNED))
2316 else if (dbt == VT_BOOL)
2317 vtop->c.i = (vtop->c.i != 0);
2318 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2319 else if (dbt == VT_PTR)
2321 #endif
2322 else if (dbt != VT_LLONG) {
2323 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2324 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2325 0xffffffff);
2326 vtop->c.i &= m;
2327 if (!(dbt & VT_UNSIGNED))
2328 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2331 } else if (p && dbt == VT_BOOL) {
2332 vtop->r = VT_CONST;
2333 vtop->c.i = 1;
2334 } else if (!nocode_wanted) {
2335 /* non constant case: generate code */
2336 if (sf && df) {
2337 /* convert from fp to fp */
2338 gen_cvt_ftof(dbt);
2339 } else if (df) {
2340 /* convert int to fp */
2341 gen_cvt_itof1(dbt);
2342 } else if (sf) {
2343 /* convert fp to int */
2344 if (dbt == VT_BOOL) {
2345 vpushi(0);
2346 gen_op(TOK_NE);
2347 } else {
2348 /* we handle char/short/etc... with generic code */
2349 if (dbt != (VT_INT | VT_UNSIGNED) &&
2350 dbt != (VT_LLONG | VT_UNSIGNED) &&
2351 dbt != VT_LLONG)
2352 dbt = VT_INT;
2353 gen_cvt_ftoi1(dbt);
2354 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2355 /* additional cast for char/short... */
2356 vtop->type.t = dbt;
2357 gen_cast(type);
2360 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2361 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2362 if ((sbt & VT_BTYPE) != VT_LLONG) {
2363 /* scalar to long long */
2364 /* machine independent conversion */
2365 gv(RC_INT);
2366 /* generate high word */
2367 if (sbt == (VT_INT | VT_UNSIGNED)) {
2368 vpushi(0);
2369 gv(RC_INT);
2370 } else {
2371 if (sbt == VT_PTR) {
2372 /* cast from pointer to int before we apply
2373 shift operation, which pointers don't support*/
2374 gen_cast(&int_type);
2376 gv_dup();
2377 vpushi(31);
2378 gen_op(TOK_SAR);
2380 /* patch second register */
2381 vtop[-1].r2 = vtop->r;
2382 vpop();
2384 #else
2385 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2386 (dbt & VT_BTYPE) == VT_PTR ||
2387 (dbt & VT_BTYPE) == VT_FUNC) {
2388 if ((sbt & VT_BTYPE) != VT_LLONG &&
2389 (sbt & VT_BTYPE) != VT_PTR &&
2390 (sbt & VT_BTYPE) != VT_FUNC) {
2391 /* need to convert from 32bit to 64bit */
2392 gv(RC_INT);
2393 if (sbt != (VT_INT | VT_UNSIGNED)) {
2394 #if defined(TCC_TARGET_ARM64)
2395 gen_cvt_sxtw();
2396 #elif defined(TCC_TARGET_X86_64)
2397 int r = gv(RC_INT);
2398 /* x86_64 specific: movslq */
2399 o(0x6348);
2400 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2401 #else
2402 #error
2403 #endif
2406 #endif
2407 } else if (dbt == VT_BOOL) {
2408 /* scalar to bool */
2409 vpushi(0);
2410 gen_op(TOK_NE);
2411 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2412 (dbt & VT_BTYPE) == VT_SHORT) {
2413 if (sbt == VT_PTR) {
2414 vtop->type.t = VT_INT;
2415 tcc_warning("nonportable conversion from pointer to char/short");
2417 force_charshort_cast(dbt);
2418 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2419 } else if ((dbt & VT_BTYPE) == VT_INT) {
2420 /* scalar to int */
2421 if ((sbt & VT_BTYPE) == VT_LLONG) {
2422 /* from long long: just take low order word */
2423 lexpand();
2424 vpop();
2426 /* if lvalue and single word type, nothing to do because
2427 the lvalue already contains the real type size (see
2428 VT_LVAL_xxx constants) */
2429 #endif
2432 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2433 /* if we are casting between pointer types,
2434 we must update the VT_LVAL_xxx size */
2435 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2436 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2438 vtop->type = *type;
2441 /* return type size as known at compile time. Put alignment at 'a' */
2442 ST_FUNC int type_size(CType *type, int *a)
2444 Sym *s;
2445 int bt;
2447 bt = type->t & VT_BTYPE;
2448 if (bt == VT_STRUCT) {
2449 /* struct/union */
2450 s = type->ref;
2451 *a = s->r;
2452 return s->c;
2453 } else if (bt == VT_PTR) {
2454 if (type->t & VT_ARRAY) {
2455 int ts;
2457 s = type->ref;
2458 ts = type_size(&s->type, a);
2460 if (ts < 0 && s->c < 0)
2461 ts = -ts;
2463 return ts * s->c;
2464 } else {
2465 *a = PTR_SIZE;
2466 return PTR_SIZE;
2468 } else if (bt == VT_LDOUBLE) {
2469 *a = LDOUBLE_ALIGN;
2470 return LDOUBLE_SIZE;
2471 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2472 #ifdef TCC_TARGET_I386
2473 #ifdef TCC_TARGET_PE
2474 *a = 8;
2475 #else
2476 *a = 4;
2477 #endif
2478 #elif defined(TCC_TARGET_ARM)
2479 #ifdef TCC_ARM_EABI
2480 *a = 8;
2481 #else
2482 *a = 4;
2483 #endif
2484 #else
2485 *a = 8;
2486 #endif
2487 return 8;
2488 } else if (bt == VT_INT || bt == VT_FLOAT) {
2489 *a = 4;
2490 return 4;
2491 } else if (bt == VT_SHORT) {
2492 *a = 2;
2493 return 2;
2494 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2495 *a = 8;
2496 return 16;
2497 } else if (bt == VT_ENUM) {
2498 *a = 4;
2499 /* Enums might be incomplete, so don't just return '4' here. */
2500 return type->ref->c;
2501 } else {
2502 /* char, void, function, _Bool */
2503 *a = 1;
2504 return 1;
2508 /* push type size as known at runtime time on top of value stack. Put
2509 alignment at 'a' */
2510 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2512 if (type->t & VT_VLA) {
2513 type_size(&type->ref->type, a);
2514 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2515 } else {
2516 vpushi(type_size(type, a));
2520 static void vla_sp_restore(void) {
2521 if (vlas_in_scope) {
2522 gen_vla_sp_restore(vla_sp_loc);
2526 static void vla_sp_restore_root(void) {
2527 if (vlas_in_scope) {
2528 gen_vla_sp_restore(vla_sp_root_loc);
2532 /* return the pointed type of t */
2533 static inline CType *pointed_type(CType *type)
2535 return &type->ref->type;
2538 /* modify type so that its it is a pointer to type. */
2539 ST_FUNC void mk_pointer(CType *type)
2541 Sym *s;
2542 s = sym_push(SYM_FIELD, type, 0, -1);
2543 type->t = VT_PTR | (type->t & ~VT_TYPE);
2544 type->ref = s;
2547 /* compare function types. OLD functions match any new functions */
2548 static int is_compatible_func(CType *type1, CType *type2)
2550 Sym *s1, *s2;
2552 s1 = type1->ref;
2553 s2 = type2->ref;
2554 if (!is_compatible_types(&s1->type, &s2->type))
2555 return 0;
2556 /* check func_call */
2557 if (s1->a.func_call != s2->a.func_call)
2558 return 0;
2559 /* XXX: not complete */
2560 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2561 return 1;
2562 if (s1->c != s2->c)
2563 return 0;
2564 while (s1 != NULL) {
2565 if (s2 == NULL)
2566 return 0;
2567 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2568 return 0;
2569 s1 = s1->next;
2570 s2 = s2->next;
2572 if (s2)
2573 return 0;
2574 return 1;
2577 /* return true if type1 and type2 are the same. If unqualified is
2578 true, qualifiers on the types are ignored.
2580 - enums are not checked as gcc __builtin_types_compatible_p ()
2582 static int compare_types(CType *type1, CType *type2, int unqualified)
2584 int bt1, t1, t2;
2586 t1 = type1->t & VT_TYPE;
2587 t2 = type2->t & VT_TYPE;
2588 if (unqualified) {
2589 /* strip qualifiers before comparing */
2590 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2591 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2593 /* Default Vs explicit signedness only matters for char */
2594 if ((t1 & VT_BTYPE) != VT_BYTE) {
2595 t1 &= ~VT_DEFSIGN;
2596 t2 &= ~VT_DEFSIGN;
2598 /* An enum is compatible with (unsigned) int. Ideally we would
2599 store the enums signedness in type->ref.a.<some_bit> and
2600 only accept unsigned enums with unsigned int and vice versa.
2601 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2602 from pointer target types, so we can't add it here either. */
2603 if ((t1 & VT_BTYPE) == VT_ENUM) {
2604 t1 = VT_INT;
2605 if (type1->ref->a.unsigned_enum)
2606 t1 |= VT_UNSIGNED;
2608 if ((t2 & VT_BTYPE) == VT_ENUM) {
2609 t2 = VT_INT;
2610 if (type2->ref->a.unsigned_enum)
2611 t2 |= VT_UNSIGNED;
2613 /* XXX: bitfields ? */
2614 if (t1 != t2)
2615 return 0;
2616 /* test more complicated cases */
2617 bt1 = t1 & VT_BTYPE;
2618 if (bt1 == VT_PTR) {
2619 type1 = pointed_type(type1);
2620 type2 = pointed_type(type2);
2621 return is_compatible_types(type1, type2);
2622 } else if (bt1 == VT_STRUCT) {
2623 return (type1->ref == type2->ref);
2624 } else if (bt1 == VT_FUNC) {
2625 return is_compatible_func(type1, type2);
2626 } else {
2627 return 1;
2631 /* return true if type1 and type2 are exactly the same (including
2632 qualifiers).
2634 static int is_compatible_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,0);
2639 /* return true if type1 and type2 are the same (ignoring qualifiers).
2641 static int is_compatible_parameter_types(CType *type1, CType *type2)
2643 return compare_types(type1,type2,1);
2646 /* print a type. If 'varstr' is not NULL, then the variable is also
2647 printed in the type */
2648 /* XXX: union */
2649 /* XXX: add array and function pointers */
2650 static void type_to_str(char *buf, int buf_size,
2651 CType *type, const char *varstr)
2653 int bt, v, t;
2654 Sym *s, *sa;
2655 char buf1[256];
2656 const char *tstr;
2658 t = type->t & VT_TYPE;
2659 bt = t & VT_BTYPE;
2660 buf[0] = '\0';
2661 if (t & VT_CONSTANT)
2662 pstrcat(buf, buf_size, "const ");
2663 if (t & VT_VOLATILE)
2664 pstrcat(buf, buf_size, "volatile ");
2665 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2666 pstrcat(buf, buf_size, "unsigned ");
2667 else if (t & VT_DEFSIGN)
2668 pstrcat(buf, buf_size, "signed ");
2669 switch(bt) {
2670 case VT_VOID:
2671 tstr = "void";
2672 goto add_tstr;
2673 case VT_BOOL:
2674 tstr = "_Bool";
2675 goto add_tstr;
2676 case VT_BYTE:
2677 tstr = "char";
2678 goto add_tstr;
2679 case VT_SHORT:
2680 tstr = "short";
2681 goto add_tstr;
2682 case VT_INT:
2683 tstr = "int";
2684 goto add_tstr;
2685 case VT_LONG:
2686 tstr = "long";
2687 goto add_tstr;
2688 case VT_LLONG:
2689 tstr = "long long";
2690 goto add_tstr;
2691 case VT_FLOAT:
2692 tstr = "float";
2693 goto add_tstr;
2694 case VT_DOUBLE:
2695 tstr = "double";
2696 goto add_tstr;
2697 case VT_LDOUBLE:
2698 tstr = "long double";
2699 add_tstr:
2700 pstrcat(buf, buf_size, tstr);
2701 break;
2702 case VT_ENUM:
2703 case VT_STRUCT:
2704 if (bt == VT_STRUCT)
2705 tstr = "struct ";
2706 else
2707 tstr = "enum ";
2708 pstrcat(buf, buf_size, tstr);
2709 v = type->ref->v & ~SYM_STRUCT;
2710 if (v >= SYM_FIRST_ANOM)
2711 pstrcat(buf, buf_size, "<anonymous>");
2712 else
2713 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2714 break;
2715 case VT_FUNC:
2716 s = type->ref;
2717 type_to_str(buf, buf_size, &s->type, varstr);
2718 pstrcat(buf, buf_size, "(");
2719 sa = s->next;
2720 while (sa != NULL) {
2721 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2722 pstrcat(buf, buf_size, buf1);
2723 sa = sa->next;
2724 if (sa)
2725 pstrcat(buf, buf_size, ", ");
2727 pstrcat(buf, buf_size, ")");
2728 goto no_var;
2729 case VT_PTR:
2730 s = type->ref;
2731 if (t & VT_ARRAY) {
2732 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2733 type_to_str(buf, buf_size, &s->type, buf1);
2734 goto no_var;
2736 pstrcpy(buf1, sizeof(buf1), "*");
2737 if (t & VT_CONSTANT)
2738 pstrcat(buf1, buf_size, "const ");
2739 if (t & VT_VOLATILE)
2740 pstrcat(buf1, buf_size, "volatile ");
2741 if (varstr)
2742 pstrcat(buf1, sizeof(buf1), varstr);
2743 type_to_str(buf, buf_size, &s->type, buf1);
2744 goto no_var;
2746 if (varstr) {
2747 pstrcat(buf, buf_size, " ");
2748 pstrcat(buf, buf_size, varstr);
2750 no_var: ;
2753 /* verify type compatibility to store vtop in 'dt' type, and generate
2754 casts if needed. */
2755 static void gen_assign_cast(CType *dt)
2757 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2758 char buf1[256], buf2[256];
2759 int dbt, sbt;
2761 st = &vtop->type; /* source type */
2762 dbt = dt->t & VT_BTYPE;
2763 sbt = st->t & VT_BTYPE;
2764 if (sbt == VT_VOID || dbt == VT_VOID) {
2765 if (sbt == VT_VOID && dbt == VT_VOID)
2766 ; /*
2767 It is Ok if both are void
2768 A test program:
2769 void func1() {}
2770 void func2() {
2771 return func1();
2773 gcc accepts this program
2775 else
2776 tcc_error("cannot cast from/to void");
2778 if (dt->t & VT_CONSTANT)
2779 tcc_warning("assignment of read-only location");
2780 switch(dbt) {
2781 case VT_PTR:
2782 /* special cases for pointers */
2783 /* '0' can also be a pointer */
2784 if (is_null_pointer(vtop))
2785 goto type_ok;
2786 /* accept implicit pointer to integer cast with warning */
2787 if (is_integer_btype(sbt)) {
2788 tcc_warning("assignment makes pointer from integer without a cast");
2789 goto type_ok;
2791 type1 = pointed_type(dt);
2792 /* a function is implicitely a function pointer */
2793 if (sbt == VT_FUNC) {
2794 if ((type1->t & VT_BTYPE) != VT_VOID &&
2795 !is_compatible_types(pointed_type(dt), st))
2796 tcc_warning("assignment from incompatible pointer type");
2797 goto type_ok;
2799 if (sbt != VT_PTR)
2800 goto error;
2801 type2 = pointed_type(st);
2802 if ((type1->t & VT_BTYPE) == VT_VOID ||
2803 (type2->t & VT_BTYPE) == VT_VOID) {
2804 /* void * can match anything */
2805 } else {
2806 /* exact type match, except for qualifiers */
2807 tmp_type1 = *type1;
2808 tmp_type2 = *type2;
2809 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2812 /* Like GCC don't warn by default for merely changes
2813 in pointer target signedness. Do warn for different
2814 base types, though, in particular for unsigned enums
2815 and signed int targets. */
2816 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2817 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2818 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2820 else
2821 tcc_warning("assignment from incompatible pointer type");
2824 /* check const and volatile */
2825 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2826 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2827 tcc_warning("assignment discards qualifiers from pointer target type");
2828 break;
2829 case VT_BYTE:
2830 case VT_SHORT:
2831 case VT_INT:
2832 case VT_LLONG:
2833 if (sbt == VT_PTR || sbt == VT_FUNC) {
2834 tcc_warning("assignment makes integer from pointer without a cast");
2835 } else if (sbt == VT_STRUCT) {
2836 goto case_VT_STRUCT;
2838 /* XXX: more tests */
2839 break;
2840 case VT_STRUCT:
2841 case_VT_STRUCT:
2842 tmp_type1 = *dt;
2843 tmp_type2 = *st;
2844 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2847 error:
2848 type_to_str(buf1, sizeof(buf1), st, NULL);
2849 type_to_str(buf2, sizeof(buf2), dt, NULL);
2850 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2852 break;
2854 type_ok:
2855 gen_cast(dt);
2858 /* store vtop in lvalue pushed on stack */
2859 ST_FUNC void vstore(void)
2861 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2863 ft = vtop[-1].type.t;
2864 sbt = vtop->type.t & VT_BTYPE;
2865 dbt = ft & VT_BTYPE;
2866 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2867 (sbt == VT_INT && dbt == VT_SHORT))
2868 && !(vtop->type.t & VT_BITFIELD)) {
2869 /* optimize char/short casts */
2870 delayed_cast = VT_MUSTCAST;
2871 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2872 ((1 << VT_STRUCT_SHIFT) - 1));
2873 /* XXX: factorize */
2874 if (ft & VT_CONSTANT)
2875 tcc_warning("assignment of read-only location");
2876 } else {
2877 delayed_cast = 0;
2878 if (!(ft & VT_BITFIELD))
2879 gen_assign_cast(&vtop[-1].type);
2882 if (sbt == VT_STRUCT) {
2883 /* if structure, only generate pointer */
2884 /* structure assignment : generate memcpy */
2885 /* XXX: optimize if small size */
2886 if (!nocode_wanted) {
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2913 } else {
2914 vswap();
2915 vpop();
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 if (!nocode_wanted) {
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop[-1].r & VT_MUSTBOUND) {
2968 vswap();
2969 gbound();
2970 vswap();
2972 #endif
2973 rc = RC_INT;
2974 if (is_float(ft)) {
2975 rc = RC_FLOAT;
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2978 rc = RC_ST0;
2979 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2980 rc = RC_FRET;
2982 #endif
2984 r = gv(rc); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2987 SValue sv;
2988 t = get_reg(RC_INT);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 sv.type.t = VT_PTR;
2991 #else
2992 sv.type.t = VT_INT;
2993 #endif
2994 sv.r = VT_LOCAL | VT_LVAL;
2995 sv.c.i = vtop[-1].c.i;
2996 load(t, &sv);
2997 vtop[-1].r = t | VT_LVAL;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3002 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3003 #else
3004 if ((ft & VT_BTYPE) == VT_LLONG) {
3005 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3006 #endif
3007 vtop[-1].type.t = load_type;
3008 store(r, vtop - 1);
3009 vswap();
3010 /* convert to int to increment easily */
3011 vtop->type.t = addr_type;
3012 gaddrof();
3013 vpushi(load_size);
3014 gen_op('+');
3015 vtop->r |= VT_LVAL;
3016 vswap();
3017 vtop[-1].type.t = load_type;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop->r2, vtop - 1);
3020 } else {
3021 store(r, vtop - 1);
3024 vswap();
3025 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop->r |= delayed_cast;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC void inc(int post, int c)
3033 test_lvalue();
3034 vdup(); /* save lvalue */
3035 if (post) {
3036 if (!nocode_wanted)
3037 gv_dup(); /* duplicate value */
3038 else
3039 vdup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* Parse GNUC __attribute__ extension. Currently, the following
3066 extensions are recognized:
3067 - aligned(n) : set data/function alignment.
3068 - packed : force data alignment to 1
3069 - section(x) : generate data/code in this section.
3070 - unused : currently ignored, but may be used someday.
3071 - regparm(n) : pass function parameters in registers (i386 only)
3073 static void parse_attribute(AttributeDef *ad)
3075 int t, n;
3076 CString astr;
3078 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3079 next();
3080 skip('(');
3081 skip('(');
3082 while (tok != ')') {
3083 if (tok < TOK_IDENT)
3084 expect("attribute name");
3085 t = tok;
3086 next();
3087 switch(t) {
3088 case TOK_SECTION1:
3089 case TOK_SECTION2:
3090 skip('(');
3091 parse_mult_str(&astr, "section name");
3092 ad->section = find_section(tcc_state, (char *)astr.data);
3093 skip(')');
3094 cstr_free(&astr);
3095 break;
3096 case TOK_ALIAS1:
3097 case TOK_ALIAS2:
3098 skip('(');
3099 parse_mult_str(&astr, "alias(\"target\")");
3100 ad->alias_target = /* save string as token, for later */
3101 tok_alloc((char*)astr.data, astr.size-1)->tok;
3102 skip(')');
3103 cstr_free(&astr);
3104 break;
3105 case TOK_VISIBILITY1:
3106 case TOK_VISIBILITY2:
3107 skip('(');
3108 parse_mult_str(&astr,
3109 "visibility(\"default|hidden|internal|protected\")");
3110 if (!strcmp (astr.data, "default"))
3111 ad->a.visibility = STV_DEFAULT;
3112 else if (!strcmp (astr.data, "hidden"))
3113 ad->a.visibility = STV_HIDDEN;
3114 else if (!strcmp (astr.data, "internal"))
3115 ad->a.visibility = STV_INTERNAL;
3116 else if (!strcmp (astr.data, "protected"))
3117 ad->a.visibility = STV_PROTECTED;
3118 else
3119 expect("visibility(\"default|hidden|internal|protected\")");
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_ALIGNED1:
3124 case TOK_ALIGNED2:
3125 if (tok == '(') {
3126 next();
3127 n = expr_const();
3128 if (n <= 0 || (n & (n - 1)) != 0)
3129 tcc_error("alignment must be a positive power of two");
3130 skip(')');
3131 } else {
3132 n = MAX_ALIGN;
3134 ad->a.aligned = n;
3135 break;
3136 case TOK_PACKED1:
3137 case TOK_PACKED2:
3138 ad->a.packed = 1;
3139 break;
3140 case TOK_WEAK1:
3141 case TOK_WEAK2:
3142 ad->a.weak = 1;
3143 break;
3144 case TOK_UNUSED1:
3145 case TOK_UNUSED2:
3146 /* currently, no need to handle it because tcc does not
3147 track unused objects */
3148 break;
3149 case TOK_NORETURN1:
3150 case TOK_NORETURN2:
3151 /* currently, no need to handle it because tcc does not
3152 track unused objects */
3153 break;
3154 case TOK_CDECL1:
3155 case TOK_CDECL2:
3156 case TOK_CDECL3:
3157 ad->a.func_call = FUNC_CDECL;
3158 break;
3159 case TOK_STDCALL1:
3160 case TOK_STDCALL2:
3161 case TOK_STDCALL3:
3162 ad->a.func_call = FUNC_STDCALL;
3163 break;
3164 #ifdef TCC_TARGET_I386
3165 case TOK_REGPARM1:
3166 case TOK_REGPARM2:
3167 skip('(');
3168 n = expr_const();
3169 if (n > 3)
3170 n = 3;
3171 else if (n < 0)
3172 n = 0;
3173 if (n > 0)
3174 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3175 skip(')');
3176 break;
3177 case TOK_FASTCALL1:
3178 case TOK_FASTCALL2:
3179 case TOK_FASTCALL3:
3180 ad->a.func_call = FUNC_FASTCALLW;
3181 break;
3182 #endif
3183 case TOK_MODE:
3184 skip('(');
3185 switch(tok) {
3186 case TOK_MODE_DI:
3187 ad->a.mode = VT_LLONG + 1;
3188 break;
3189 case TOK_MODE_QI:
3190 ad->a.mode = VT_BYTE + 1;
3191 break;
3192 case TOK_MODE_HI:
3193 ad->a.mode = VT_SHORT + 1;
3194 break;
3195 case TOK_MODE_SI:
3196 case TOK_MODE_word:
3197 ad->a.mode = VT_INT + 1;
3198 break;
3199 default:
3200 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3201 break;
3203 next();
3204 skip(')');
3205 break;
3206 case TOK_DLLEXPORT:
3207 ad->a.func_export = 1;
3208 break;
3209 case TOK_DLLIMPORT:
3210 ad->a.func_import = 1;
3211 break;
3212 default:
3213 if (tcc_state->warn_unsupported)
3214 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3215 /* skip parameters */
3216 if (tok == '(') {
3217 int parenthesis = 0;
3218 do {
3219 if (tok == '(')
3220 parenthesis++;
3221 else if (tok == ')')
3222 parenthesis--;
3223 next();
3224 } while (parenthesis && tok != -1);
3226 break;
3228 if (tok != ',')
3229 break;
3230 next();
3232 skip(')');
3233 skip(')');
3237 static Sym * find_field (CType *type, int v)
3239 Sym *s = type->ref;
3240 v |= SYM_FIELD;
3241 while ((s = s->next) != NULL) {
3242 if ((s->v & SYM_FIELD) &&
3243 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3244 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3245 Sym *ret = find_field (&s->type, v);
3246 if (ret)
3247 return ret;
3249 if (s->v == v)
3250 break;
3252 return s;
3255 static void struct_add_offset (Sym *s, int offset)
3257 while ((s = s->next) != NULL) {
3258 if ((s->v & SYM_FIELD) &&
3259 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3260 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3261 struct_add_offset(s->type.ref, offset);
3262 } else
3263 s->c += offset;
3267 static void struct_layout(CType *type, AttributeDef *ad)
3269 int align, maxalign, offset, c, bit_pos, bt, prevbt;
3270 Sym *f;
3271 if (ad->a.aligned)
3272 maxalign = ad->a.aligned;
3273 else
3274 maxalign = 1;
3275 offset = 0;
3276 c = 0;
3277 bit_pos = 0;
3278 prevbt = VT_STRUCT; /* make it never match */
3279 for (f = type->ref->next; f; f = f->next) {
3280 int extra_bytes = 0;
3281 int typealign, bit_size;
3282 int size = type_size(&f->type, &typealign);
3283 int pcc = !tcc_state->ms_bitfields;
3284 if (f->type.t & VT_BITFIELD) {
3285 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3286 /* without ms-bitfields, allocate the
3287 * minimum number of bytes necessary,
3288 * adding single bytes as needed */
3289 if (!tcc_state->ms_bitfields) {
3290 if (bit_pos == 0)
3291 /* minimum bytes for new bitfield */
3292 size = (bit_size + 7) / 8;
3293 else {
3294 /* enough spare bits already allocated? */
3295 int add_size = (bit_pos - 1) % 8 + 1 + bit_size;
3296 if (add_size > 8) /* doesn't fit */
3297 extra_bytes = (add_size - 1) / 8;
3300 } else
3301 bit_size = -1;
3302 if (bit_size == 0 && pcc) {
3303 /* Zero-width bit-fields in PCC mode aren't affected
3304 by any packing (attribute or pragma). */
3305 align = typealign;
3306 } else if (f->r > 1) {
3307 align = f->r;
3308 } else if (ad->a.packed || f->r == 1) {
3309 align = 1;
3310 typealign = 1;
3311 } else {
3312 align = typealign;
3314 /*if (extra_bytes) c += extra_bytes;
3315 else*/ if (bit_size < 0) {
3316 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3317 if (type->ref->type.t == TOK_STRUCT) {
3318 c = (c + addbytes + align - 1) & -align;
3319 offset = c;
3320 if (size > 0)
3321 c += size;
3322 } else {
3323 offset = 0;
3324 if (addbytes > c)
3325 c = addbytes;
3326 if (size > c)
3327 c = size;
3329 if (align > maxalign)
3330 maxalign = align;
3331 bit_pos = 0;
3332 prevbt = VT_STRUCT;
3333 } else {
3334 /* A bit-field. Layout is more complicated. There are two
3335 options TCC implements: PCC compatible and MS compatible
3336 (PCC compatible is what GCC uses for almost all targets). */
3337 if (!bit_pos) {
3338 if (type->ref->type.t == TOK_STRUCT) {
3339 /* Don't align c here. That's only to be done
3340 in certain cases. */
3341 offset = c;
3342 } else {
3343 offset = 0;
3346 if (pcc) {
3347 /* In PCC layout a non-packed bit-field is placed adjacent
3348 to the preceding bit-fields, except if it would overflow
3349 its container (depending on base type) or it's a zero-width
3350 bit-field. Packed non-zero-width bit-fields always are
3351 placed adjacent. */
3352 if ((typealign != 1 &&
3353 bit_pos + bit_size > size * 8) ||
3354 bit_size == 0) {
3355 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3356 offset = c;
3357 bit_pos = 0;
3359 /* In PCC layout named bit-fields influence the alignment
3360 of the containing struct using the base types alignment,
3361 except for packed fields or zero-width fields. */
3362 if (bit_size > 0) {
3363 if (align > maxalign)
3364 maxalign = align;
3365 if (typealign > maxalign)
3366 maxalign = typealign;
3368 } else {
3369 bt = f->type.t & VT_BTYPE;
3370 if (
3372 bit_pos + bit_size > size * 8) ||
3373 (bit_size == 0 && prevbt == bt) ||
3374 (bit_size > 0 && bt != prevbt))) {
3375 c = (c + typealign - 1) & -typealign;
3376 offset = c;
3377 bit_pos = 0;
3378 /* In MS bitfield mode a bit-field run always uses
3379 at least as many bits as the underlying type. */
3380 c += size;
3382 if (bit_size > 0 || prevbt == bt) {
3383 if (align > maxalign)
3384 maxalign = align;
3385 if (typealign > maxalign)
3386 maxalign = typealign;
3388 prevbt = bt;
3390 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3391 | (bit_pos << VT_STRUCT_SHIFT);
3392 bit_pos += bit_size;
3393 if (pcc && bit_pos >= size * 8) {
3394 c += size;
3395 bit_pos -= size * 8;
3398 #if 0
3399 printf("set field %s offset=%d c=%d",
3400 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3401 if (f->type.t & VT_BITFIELD) {
3402 printf(" pos=%d size=%d",
3403 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3404 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3406 printf("\n");
3407 #endif
3409 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3410 Sym *ass;
3411 /* An anonymous struct/union. Adjust member offsets
3412 to reflect the real offset of our containing struct.
3413 Also set the offset of this anon member inside
3414 the outer struct to be zero. Via this it
3415 works when accessing the field offset directly
3416 (from base object), as well as when recursing
3417 members in initializer handling. */
3418 int v2 = f->type.ref->v;
3419 if (!(v2 & SYM_FIELD) &&
3420 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3421 Sym **pps;
3422 /* This happens only with MS extensions. The
3423 anon member has a named struct type, so it
3424 potentially is shared with other references.
3425 We need to unshare members so we can modify
3426 them. */
3427 ass = f->type.ref;
3428 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3429 &f->type.ref->type, 0,
3430 f->type.ref->c);
3431 pps = &f->type.ref->next;
3432 while ((ass = ass->next) != NULL) {
3433 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3434 pps = &((*pps)->next);
3436 *pps = NULL;
3438 struct_add_offset(f->type.ref, offset);
3439 f->c = 0;
3440 } else {
3441 f->c = offset;
3444 f->r = 0;
3446 /* store size and alignment */
3447 type->ref->c = (c + ((bit_pos + 7) >> 3) + maxalign - 1) & -maxalign;
3448 type->ref->r = maxalign;
3451 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3452 static void struct_decl(CType *type, AttributeDef *ad, int u)
3454 int extra_bytes;
3455 int a, v, size, align, flexible, alignoverride;
3456 long c;
3457 int bit_size, bit_pos, bsize, bt, prevbt;
3458 Sym *s, *ss, **ps;
3459 AttributeDef ad1;
3460 CType type1, btype;
3462 a = tok; /* save decl type */
3463 next();
3464 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3465 parse_attribute(ad);
3466 if (tok != '{') {
3467 v = tok;
3468 next();
3469 /* struct already defined ? return it */
3470 if (v < TOK_IDENT)
3471 expect("struct/union/enum name");
3472 s = struct_find(v);
3473 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3474 if (s->type.t != a)
3475 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3476 goto do_decl;
3478 } else {
3479 v = anon_sym++;
3481 /* Record the original enum/struct/union token. */
3482 type1.t = a;
3483 type1.ref = NULL;
3484 /* we put an undefined size for struct/union */
3485 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3486 s->r = 0; /* default alignment is zero as gcc */
3487 /* put struct/union/enum name in type */
3488 do_decl:
3489 type->t = u;
3490 type->ref = s;
3492 if (tok == '{') {
3493 next();
3494 if (s->c != -1)
3495 tcc_error("struct/union/enum already defined");
3496 /* cannot be empty */
3497 c = 0;
3498 /* non empty enums are not allowed */
3499 if (a == TOK_ENUM) {
3500 int seen_neg = 0;
3501 int seen_wide = 0;
3502 for(;;) {
3503 CType *t = &int_type;
3504 v = tok;
3505 if (v < TOK_UIDENT)
3506 expect("identifier");
3507 ss = sym_find(v);
3508 if (ss && !local_stack)
3509 tcc_error("redefinition of enumerator '%s'",
3510 get_tok_str(v, NULL));
3511 next();
3512 if (tok == '=') {
3513 next();
3514 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3515 c = expr_const64();
3516 #else
3517 /* We really want to support long long enums
3518 on i386 as well, but the Sym structure only
3519 holds a 'long' for associated constants,
3520 and enlarging it would bump its size (no
3521 available padding). So punt for now. */
3522 c = expr_const();
3523 #endif
3525 if (c < 0)
3526 seen_neg = 1;
3527 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3528 seen_wide = 1, t = &size_type;
3529 /* enum symbols have static storage */
3530 ss = sym_push(v, t, VT_CONST, c);
3531 ss->type.t |= VT_STATIC;
3532 if (tok != ',')
3533 break;
3534 next();
3535 c++;
3536 /* NOTE: we accept a trailing comma */
3537 if (tok == '}')
3538 break;
3540 if (!seen_neg)
3541 s->a.unsigned_enum = 1;
3542 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3543 skip('}');
3544 } else {
3545 ps = &s->next;
3546 prevbt = VT_INT;
3547 bit_pos = 0;
3548 flexible = 0;
3549 while (tok != '}') {
3550 if (!parse_btype(&btype, &ad1)) {
3551 skip(';');
3552 continue;
3554 while (1) {
3555 extra_bytes = 0;
3556 if (flexible)
3557 tcc_error("flexible array member '%s' not at the end of struct",
3558 get_tok_str(v, NULL));
3559 bit_size = -1;
3560 v = 0;
3561 type1 = btype;
3562 if (tok != ':') {
3563 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3564 if (v == 0) {
3565 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3566 expect("identifier");
3567 else {
3568 int v = btype.ref->v;
3569 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3570 if (tcc_state->ms_extensions == 0)
3571 expect("identifier");
3575 if (type_size(&type1, &align) < 0) {
3576 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3577 flexible = 1;
3578 else
3579 tcc_error("field '%s' has incomplete type",
3580 get_tok_str(v, NULL));
3582 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3583 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3584 tcc_error("invalid type for '%s'",
3585 get_tok_str(v, NULL));
3587 if (tok == ':') {
3588 next();
3589 bit_size = expr_const();
3590 /* XXX: handle v = 0 case for messages */
3591 if (bit_size < 0)
3592 tcc_error("negative width in bit-field '%s'",
3593 get_tok_str(v, NULL));
3594 if (v && bit_size == 0)
3595 tcc_error("zero width for bit-field '%s'",
3596 get_tok_str(v, NULL));
3597 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3598 parse_attribute(&ad1);
3600 size = type_size(&type1, &align);
3601 /* Only remember non-default alignment. */
3602 alignoverride = 0;
3603 if (ad1.a.aligned) {
3604 if (align < ad1.a.aligned)
3605 alignoverride = ad1.a.aligned;
3606 } else if (ad1.a.packed || ad->a.packed) {
3607 alignoverride = 1;
3608 } else if (*tcc_state->pack_stack_ptr) {
3609 if (align > *tcc_state->pack_stack_ptr)
3610 alignoverride = *tcc_state->pack_stack_ptr;
3612 if (bit_size >= 0) {
3613 bt = type1.t & VT_BTYPE;
3614 if (bt != VT_INT &&
3615 bt != VT_BYTE &&
3616 bt != VT_SHORT &&
3617 bt != VT_BOOL &&
3618 bt != VT_ENUM &&
3619 bt != VT_LLONG)
3620 tcc_error("bitfields must have scalar type");
3621 bsize = size * 8;
3622 if (bit_size > bsize) {
3623 tcc_error("width of '%s' exceeds its type",
3624 get_tok_str(v, NULL));
3625 } else if (bit_size == bsize) {
3626 /* no need for bit fields */
3627 bit_pos = 0;
3628 } else {
3629 /* if type change, union, or will overrun
3630 * allignment slot, start at a newly
3631 * alligned slot */
3632 if ((bit_pos + bit_size) > bsize ||
3633 bt != prevbt || a == TOK_UNION)
3634 bit_pos = 0;
3635 /* XXX: handle LSB first */
3636 type1.t |= VT_BITFIELD |
3637 (bit_pos << VT_STRUCT_SHIFT) |
3638 (bit_size << (VT_STRUCT_SHIFT + 6));
3639 bit_pos += bit_size;
3641 prevbt = bt;
3642 } else {
3643 bit_pos = 0;
3645 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3646 /* Remember we've seen a real field to check
3647 for placement of flexible array member. */
3648 c = 1;
3650 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3651 /* See struct_layout for special casing
3652 anonymous member of struct type. */
3653 v = anon_sym++;
3655 if (v == 0 && bit_size >= 0) {
3656 /* Need to remember anon bit-fields as well.
3657 They influence layout. */
3658 v = anon_sym++;
3660 if (v) {
3661 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, extra_bytes);
3662 *ps = ss;
3663 ps = &ss->next;
3665 if (tok == ';' || tok == TOK_EOF)
3666 break;
3667 skip(',');
3669 skip(';');
3671 skip('}');
3672 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3673 parse_attribute(ad);
3674 struct_layout(type, ad);
3679 /* return 1 if basic type is a type size (short, long, long long) */
3680 ST_FUNC int is_btype_size(int bt)
3682 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3685 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3686 are added to the element type, copied because it could be a typedef. */
3687 static void parse_btype_qualify(CType *type, int qualifiers)
3689 while (type->t & VT_ARRAY) {
3690 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3691 type = &type->ref->type;
3693 type->t |= qualifiers;
3696 /* return 0 if no type declaration. otherwise, return the basic type
3697 and skip it.
3699 static int parse_btype(CType *type, AttributeDef *ad)
3701 int t, u, bt_size, complete, type_found, typespec_found;
3702 Sym *s;
3703 CType type1;
3705 memset(ad, 0, sizeof(AttributeDef));
3706 complete = 0;
3707 type_found = 0;
3708 typespec_found = 0;
3709 t = 0;
3710 while(1) {
3711 switch(tok) {
3712 case TOK_EXTENSION:
3713 /* currently, we really ignore extension */
3714 next();
3715 continue;
3717 /* basic types */
3718 case TOK_CHAR:
3719 u = VT_BYTE;
3720 basic_type:
3721 next();
3722 basic_type1:
3723 if (complete)
3724 tcc_error("too many basic types");
3725 t |= u;
3726 bt_size = is_btype_size (u & VT_BTYPE);
3727 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3728 complete = 1;
3729 typespec_found = 1;
3730 break;
3731 case TOK_VOID:
3732 u = VT_VOID;
3733 goto basic_type;
3734 case TOK_SHORT:
3735 u = VT_SHORT;
3736 goto basic_type;
3737 case TOK_INT:
3738 u = VT_INT;
3739 goto basic_type;
3740 case TOK_LONG:
3741 next();
3742 if ((t & VT_BTYPE) == VT_DOUBLE) {
3743 #ifndef TCC_TARGET_PE
3744 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3745 #endif
3746 } else if ((t & VT_BTYPE) == VT_LONG) {
3747 t = (t & ~VT_BTYPE) | VT_LLONG;
3748 } else {
3749 u = VT_LONG;
3750 goto basic_type1;
3752 break;
3753 #ifdef TCC_TARGET_ARM64
3754 case TOK_UINT128:
3755 /* GCC's __uint128_t appears in some Linux header files. Make it a
3756 synonym for long double to get the size and alignment right. */
3757 u = VT_LDOUBLE;
3758 goto basic_type;
3759 #endif
3760 case TOK_BOOL:
3761 u = VT_BOOL;
3762 goto basic_type;
3763 case TOK_FLOAT:
3764 u = VT_FLOAT;
3765 goto basic_type;
3766 case TOK_DOUBLE:
3767 next();
3768 if ((t & VT_BTYPE) == VT_LONG) {
3769 #ifdef TCC_TARGET_PE
3770 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3771 #else
3772 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3773 #endif
3774 } else {
3775 u = VT_DOUBLE;
3776 goto basic_type1;
3778 break;
3779 case TOK_ENUM:
3780 struct_decl(&type1, ad, VT_ENUM);
3781 basic_type2:
3782 u = type1.t;
3783 type->ref = type1.ref;
3784 goto basic_type1;
3785 case TOK_STRUCT:
3786 case TOK_UNION:
3787 struct_decl(&type1, ad, VT_STRUCT);
3788 goto basic_type2;
3790 /* type modifiers */
3791 case TOK_CONST1:
3792 case TOK_CONST2:
3793 case TOK_CONST3:
3794 type->t = t;
3795 parse_btype_qualify(type, VT_CONSTANT);
3796 t = type->t;
3797 next();
3798 break;
3799 case TOK_VOLATILE1:
3800 case TOK_VOLATILE2:
3801 case TOK_VOLATILE3:
3802 type->t = t;
3803 parse_btype_qualify(type, VT_VOLATILE);
3804 t = type->t;
3805 next();
3806 break;
3807 case TOK_SIGNED1:
3808 case TOK_SIGNED2:
3809 case TOK_SIGNED3:
3810 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3811 tcc_error("signed and unsigned modifier");
3812 typespec_found = 1;
3813 t |= VT_DEFSIGN;
3814 next();
3815 break;
3816 case TOK_REGISTER:
3817 case TOK_AUTO:
3818 case TOK_RESTRICT1:
3819 case TOK_RESTRICT2:
3820 case TOK_RESTRICT3:
3821 next();
3822 break;
3823 case TOK_UNSIGNED:
3824 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3825 tcc_error("signed and unsigned modifier");
3826 t |= VT_DEFSIGN | VT_UNSIGNED;
3827 next();
3828 typespec_found = 1;
3829 break;
3831 /* storage */
3832 case TOK_EXTERN:
3833 t |= VT_EXTERN;
3834 next();
3835 break;
3836 case TOK_STATIC:
3837 t |= VT_STATIC;
3838 next();
3839 break;
3840 case TOK_TYPEDEF:
3841 t |= VT_TYPEDEF;
3842 next();
3843 break;
3844 case TOK_INLINE1:
3845 case TOK_INLINE2:
3846 case TOK_INLINE3:
3847 t |= VT_INLINE;
3848 next();
3849 break;
3851 /* GNUC attribute */
3852 case TOK_ATTRIBUTE1:
3853 case TOK_ATTRIBUTE2:
3854 parse_attribute(ad);
3855 if (ad->a.mode) {
3856 u = ad->a.mode -1;
3857 t = (t & ~VT_BTYPE) | u;
3859 break;
3860 /* GNUC typeof */
3861 case TOK_TYPEOF1:
3862 case TOK_TYPEOF2:
3863 case TOK_TYPEOF3:
3864 next();
3865 parse_expr_type(&type1);
3866 /* remove all storage modifiers except typedef */
3867 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3868 goto basic_type2;
3869 default:
3870 if (typespec_found)
3871 goto the_end;
3872 s = sym_find(tok);
3873 if (!s || !(s->type.t & VT_TYPEDEF))
3874 goto the_end;
3876 type->t = ((s->type.t & ~VT_TYPEDEF) |
3877 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3878 type->ref = s->type.ref;
3879 if (t & (VT_CONSTANT | VT_VOLATILE))
3880 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3881 t = type->t;
3883 if (s->r) {
3884 /* get attributes from typedef */
3885 if (0 == ad->a.aligned)
3886 ad->a.aligned = s->a.aligned;
3887 if (0 == ad->a.func_call)
3888 ad->a.func_call = s->a.func_call;
3889 ad->a.packed |= s->a.packed;
3891 next();
3892 typespec_found = 1;
3893 break;
3895 type_found = 1;
3897 the_end:
3898 if (tcc_state->char_is_unsigned) {
3899 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3900 t |= VT_UNSIGNED;
3903 /* long is never used as type */
3904 if ((t & VT_BTYPE) == VT_LONG)
3905 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3906 defined TCC_TARGET_PE
3907 t = (t & ~VT_BTYPE) | VT_INT;
3908 #else
3909 t = (t & ~VT_BTYPE) | VT_LLONG;
3910 #endif
3911 type->t = t;
3912 return type_found;
3915 /* convert a function parameter type (array to pointer and function to
3916 function pointer) */
3917 static inline void convert_parameter_type(CType *pt)
3919 /* remove const and volatile qualifiers (XXX: const could be used
3920 to indicate a const function parameter */
3921 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3922 /* array must be transformed to pointer according to ANSI C */
3923 pt->t &= ~VT_ARRAY;
3924 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3925 mk_pointer(pt);
3929 ST_FUNC void parse_asm_str(CString *astr)
3931 skip('(');
3932 parse_mult_str(astr, "string constant");
3935 /* Parse an asm label and return the token */
3936 static int asm_label_instr(void)
3938 int v;
3939 CString astr;
3941 next();
3942 parse_asm_str(&astr);
3943 skip(')');
3944 #ifdef ASM_DEBUG
3945 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3946 #endif
3947 v = tok_alloc(astr.data, astr.size - 1)->tok;
3948 cstr_free(&astr);
3949 return v;
3952 static void post_type(CType *type, AttributeDef *ad, int storage)
3954 int n, l, t1, arg_size, align;
3955 Sym **plast, *s, *first;
3956 AttributeDef ad1;
3957 CType pt;
3959 if (tok == '(') {
3960 /* function declaration */
3961 next();
3962 l = 0;
3963 first = NULL;
3964 plast = &first;
3965 arg_size = 0;
3966 if (tok != ')') {
3967 for(;;) {
3968 /* read param name and compute offset */
3969 if (l != FUNC_OLD) {
3970 if (!parse_btype(&pt, &ad1)) {
3971 if (l) {
3972 tcc_error("invalid type");
3973 } else {
3974 l = FUNC_OLD;
3975 goto old_proto;
3978 l = FUNC_NEW;
3979 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3980 break;
3981 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3982 if ((pt.t & VT_BTYPE) == VT_VOID)
3983 tcc_error("parameter declared as void");
3984 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3985 } else {
3986 old_proto:
3987 n = tok;
3988 if (n < TOK_UIDENT)
3989 expect("identifier");
3990 pt.t = VT_INT;
3991 next();
3993 convert_parameter_type(&pt);
3994 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3995 *plast = s;
3996 plast = &s->next;
3997 if (tok == ')')
3998 break;
3999 skip(',');
4000 if (l == FUNC_NEW && tok == TOK_DOTS) {
4001 l = FUNC_ELLIPSIS;
4002 next();
4003 break;
4007 /* if no parameters, then old type prototype */
4008 if (l == 0)
4009 l = FUNC_OLD;
4010 skip(')');
4011 /* NOTE: const is ignored in returned type as it has a special
4012 meaning in gcc / C++ */
4013 type->t &= ~VT_CONSTANT;
4014 /* some ancient pre-K&R C allows a function to return an array
4015 and the array brackets to be put after the arguments, such
4016 that "int c()[]" means something like "int[] c()" */
4017 if (tok == '[') {
4018 next();
4019 skip(']'); /* only handle simple "[]" */
4020 type->t |= VT_PTR;
4022 /* we push a anonymous symbol which will contain the function prototype */
4023 ad->a.func_args = arg_size;
4024 s = sym_push(SYM_FIELD, type, 0, l);
4025 s->a = ad->a;
4026 s->next = first;
4027 type->t = VT_FUNC;
4028 type->ref = s;
4029 } else if (tok == '[') {
4030 int saved_nocode_wanted = nocode_wanted;
4031 /* array definition */
4032 next();
4033 if (tok == TOK_RESTRICT1)
4034 next();
4035 n = -1;
4036 t1 = 0;
4037 if (tok != ']') {
4038 if (!local_stack || (storage & VT_STATIC))
4039 vpushi(expr_const());
4040 else {
4041 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4042 length must always be evaluated, even under nocode_wanted,
4043 so that its size slot is initialized (e.g. under sizeof
4044 or typeof). */
4045 nocode_wanted = 0;
4046 gexpr();
4048 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4049 n = vtop->c.i;
4050 if (n < 0)
4051 tcc_error("invalid array size");
4052 } else {
4053 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4054 tcc_error("size of variable length array should be an integer");
4055 t1 = VT_VLA;
4058 skip(']');
4059 /* parse next post type */
4060 post_type(type, ad, storage);
4061 if (type->t == VT_FUNC)
4062 tcc_error("declaration of an array of functions");
4063 t1 |= type->t & VT_VLA;
4065 if (t1 & VT_VLA) {
4066 loc -= type_size(&int_type, &align);
4067 loc &= -align;
4068 n = loc;
4070 vla_runtime_type_size(type, &align);
4071 gen_op('*');
4072 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4073 vswap();
4074 vstore();
4076 if (n != -1)
4077 vpop();
4078 nocode_wanted = saved_nocode_wanted;
4080 /* we push an anonymous symbol which will contain the array
4081 element type */
4082 s = sym_push(SYM_FIELD, type, 0, n);
4083 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4084 type->ref = s;
4088 /* Parse a type declaration (except basic type), and return the type
4089 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4090 expected. 'type' should contain the basic type. 'ad' is the
4091 attribute definition of the basic type. It can be modified by
4092 type_decl().
4094 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4096 Sym *s;
4097 CType type1, *type2;
4098 int qualifiers, storage;
4100 while (tok == '*') {
4101 qualifiers = 0;
4102 redo:
4103 next();
4104 switch(tok) {
4105 case TOK_CONST1:
4106 case TOK_CONST2:
4107 case TOK_CONST3:
4108 qualifiers |= VT_CONSTANT;
4109 goto redo;
4110 case TOK_VOLATILE1:
4111 case TOK_VOLATILE2:
4112 case TOK_VOLATILE3:
4113 qualifiers |= VT_VOLATILE;
4114 goto redo;
4115 case TOK_RESTRICT1:
4116 case TOK_RESTRICT2:
4117 case TOK_RESTRICT3:
4118 goto redo;
4119 /* XXX: clarify attribute handling */
4120 case TOK_ATTRIBUTE1:
4121 case TOK_ATTRIBUTE2:
4122 parse_attribute(ad);
4123 break;
4125 mk_pointer(type);
4126 type->t |= qualifiers;
4129 /* recursive type */
4130 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4131 type1.t = 0; /* XXX: same as int */
4132 if (tok == '(') {
4133 next();
4134 /* XXX: this is not correct to modify 'ad' at this point, but
4135 the syntax is not clear */
4136 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4137 parse_attribute(ad);
4138 type_decl(&type1, ad, v, td);
4139 skip(')');
4140 } else {
4141 /* type identifier */
4142 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4143 *v = tok;
4144 next();
4145 } else {
4146 if (!(td & TYPE_ABSTRACT))
4147 expect("identifier");
4148 *v = 0;
4151 storage = type->t & VT_STORAGE;
4152 type->t &= ~VT_STORAGE;
4153 post_type(type, ad, storage);
4154 type->t |= storage;
4155 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4156 parse_attribute(ad);
4158 if (!type1.t)
4159 return;
4160 /* append type at the end of type1 */
4161 type2 = &type1;
4162 for(;;) {
4163 s = type2->ref;
4164 type2 = &s->type;
4165 if (!type2->t) {
4166 *type2 = *type;
4167 break;
4170 *type = type1;
4173 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4174 ST_FUNC int lvalue_type(int t)
4176 int bt, r;
4177 r = VT_LVAL;
4178 bt = t & VT_BTYPE;
4179 if (bt == VT_BYTE || bt == VT_BOOL)
4180 r |= VT_LVAL_BYTE;
4181 else if (bt == VT_SHORT)
4182 r |= VT_LVAL_SHORT;
4183 else
4184 return r;
4185 if (t & VT_UNSIGNED)
4186 r |= VT_LVAL_UNSIGNED;
4187 return r;
4190 /* indirection with full error checking and bound check */
4191 ST_FUNC void indir(void)
4193 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4194 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4195 return;
4196 expect("pointer");
4198 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4199 gv(RC_INT);
4200 vtop->type = *pointed_type(&vtop->type);
4201 /* Arrays and functions are never lvalues */
4202 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4203 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4204 vtop->r |= lvalue_type(vtop->type.t);
4205 /* if bound checking, the referenced pointer must be checked */
4206 #ifdef CONFIG_TCC_BCHECK
4207 if (tcc_state->do_bounds_check)
4208 vtop->r |= VT_MUSTBOUND;
4209 #endif
4213 /* pass a parameter to a function and do type checking and casting */
4214 static void gfunc_param_typed(Sym *func, Sym *arg)
4216 int func_type;
4217 CType type;
4219 func_type = func->c;
4220 if (func_type == FUNC_OLD ||
4221 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4222 /* default casting : only need to convert float to double */
4223 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4224 type.t = VT_DOUBLE;
4225 gen_cast(&type);
4226 } else if (vtop->type.t & VT_BITFIELD) {
4227 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4228 type.ref = vtop->type.ref;
4229 gen_cast(&type);
4231 } else if (arg == NULL) {
4232 tcc_error("too many arguments to function");
4233 } else {
4234 type = arg->type;
4235 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4236 gen_assign_cast(&type);
4240 /* parse an expression of the form '(type)' or '(expr)' and return its
4241 type */
4242 static void parse_expr_type(CType *type)
4244 int n;
4245 AttributeDef ad;
4247 skip('(');
4248 if (parse_btype(type, &ad)) {
4249 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4250 } else {
4251 expr_type(type);
4253 skip(')');
4256 static void parse_type(CType *type)
4258 AttributeDef ad;
4259 int n;
4261 if (!parse_btype(type, &ad)) {
4262 expect("type");
4264 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4267 static void vpush_tokc(int t)
4269 CType type;
4270 type.t = t;
4271 type.ref = 0;
4272 vsetc(&type, VT_CONST, &tokc);
4275 ST_FUNC void unary(void)
4277 int n, t, align, size, r, sizeof_caller;
4278 CType type;
4279 Sym *s;
4280 AttributeDef ad;
4282 sizeof_caller = in_sizeof;
4283 in_sizeof = 0;
4284 /* XXX: GCC 2.95.3 does not generate a table although it should be
4285 better here */
4286 tok_next:
4287 switch(tok) {
4288 case TOK_EXTENSION:
4289 next();
4290 goto tok_next;
4291 case TOK_CINT:
4292 case TOK_CCHAR:
4293 case TOK_LCHAR:
4294 vpushi(tokc.i);
4295 next();
4296 break;
4297 case TOK_CUINT:
4298 vpush_tokc(VT_INT | VT_UNSIGNED);
4299 next();
4300 break;
4301 case TOK_CLLONG:
4302 vpush_tokc(VT_LLONG);
4303 next();
4304 break;
4305 case TOK_CULLONG:
4306 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4307 next();
4308 break;
4309 case TOK_CFLOAT:
4310 vpush_tokc(VT_FLOAT);
4311 next();
4312 break;
4313 case TOK_CDOUBLE:
4314 vpush_tokc(VT_DOUBLE);
4315 next();
4316 break;
4317 case TOK_CLDOUBLE:
4318 vpush_tokc(VT_LDOUBLE);
4319 next();
4320 break;
4321 case TOK___FUNCTION__:
4322 if (!gnu_ext)
4323 goto tok_identifier;
4324 /* fall thru */
4325 case TOK___FUNC__:
4327 void *ptr;
4328 int len;
4329 /* special function name identifier */
4330 len = strlen(funcname) + 1;
4331 /* generate char[len] type */
4332 type.t = VT_BYTE;
4333 mk_pointer(&type);
4334 type.t |= VT_ARRAY;
4335 type.ref->c = len;
4336 vpush_ref(&type, data_section, data_section->data_offset, len);
4337 ptr = section_ptr_add(data_section, len);
4338 memcpy(ptr, funcname, len);
4339 next();
4341 break;
4342 case TOK_LSTR:
4343 #ifdef TCC_TARGET_PE
4344 t = VT_SHORT | VT_UNSIGNED;
4345 #else
4346 t = VT_INT;
4347 #endif
4348 goto str_init;
4349 case TOK_STR:
4350 /* string parsing */
4351 t = VT_BYTE;
4352 str_init:
4353 if (tcc_state->warn_write_strings)
4354 t |= VT_CONSTANT;
4355 type.t = t;
4356 mk_pointer(&type);
4357 type.t |= VT_ARRAY;
4358 memset(&ad, 0, sizeof(AttributeDef));
4359 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4360 break;
4361 case '(':
4362 next();
4363 /* cast ? */
4364 if (parse_btype(&type, &ad)) {
4365 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4366 skip(')');
4367 /* check ISOC99 compound literal */
4368 if (tok == '{') {
4369 /* data is allocated locally by default */
4370 if (global_expr)
4371 r = VT_CONST;
4372 else
4373 r = VT_LOCAL;
4374 /* all except arrays are lvalues */
4375 if (!(type.t & VT_ARRAY))
4376 r |= lvalue_type(type.t);
4377 memset(&ad, 0, sizeof(AttributeDef));
4378 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4379 } else {
4380 if (sizeof_caller) {
4381 vpush(&type);
4382 return;
4384 unary();
4385 gen_cast(&type);
4387 } else if (tok == '{') {
4388 if (const_wanted)
4389 tcc_error("expected constant");
4390 /* save all registers */
4391 if (!nocode_wanted)
4392 save_regs(0);
4393 /* statement expression : we do not accept break/continue
4394 inside as GCC does */
4395 block(NULL, NULL, 1);
4396 skip(')');
4397 } else {
4398 gexpr();
4399 skip(')');
4401 break;
4402 case '*':
4403 next();
4404 unary();
4405 indir();
4406 break;
4407 case '&':
4408 next();
4409 unary();
4410 /* functions names must be treated as function pointers,
4411 except for unary '&' and sizeof. Since we consider that
4412 functions are not lvalues, we only have to handle it
4413 there and in function calls. */
4414 /* arrays can also be used although they are not lvalues */
4415 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4416 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4417 test_lvalue();
4418 mk_pointer(&vtop->type);
4419 gaddrof();
4420 break;
4421 case '!':
4422 next();
4423 unary();
4424 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4425 CType boolean;
4426 boolean.t = VT_BOOL;
4427 gen_cast(&boolean);
4428 vtop->c.i = !vtop->c.i;
4429 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4430 vtop->c.i ^= 1;
4431 else {
4432 save_regs(1);
4433 vseti(VT_JMP, gvtst(1, 0));
4435 break;
4436 case '~':
4437 next();
4438 unary();
4439 vpushi(-1);
4440 gen_op('^');
4441 break;
4442 case '+':
4443 next();
4444 unary();
4445 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4446 tcc_error("pointer not accepted for unary plus");
4447 /* In order to force cast, we add zero, except for floating point
4448 where we really need an noop (otherwise -0.0 will be transformed
4449 into +0.0). */
4450 if (!is_float(vtop->type.t)) {
4451 vpushi(0);
4452 gen_op('+');
4454 break;
4455 case TOK_SIZEOF:
4456 case TOK_ALIGNOF1:
4457 case TOK_ALIGNOF2:
4458 t = tok;
4459 next();
4460 in_sizeof++;
4461 unary_type(&type); // Perform a in_sizeof = 0;
4462 size = type_size(&type, &align);
4463 if (t == TOK_SIZEOF) {
4464 if (!(type.t & VT_VLA)) {
4465 if (size < 0)
4466 tcc_error("sizeof applied to an incomplete type");
4467 vpushs(size);
4468 } else {
4469 vla_runtime_type_size(&type, &align);
4471 } else {
4472 vpushs(align);
4474 vtop->type.t |= VT_UNSIGNED;
4475 break;
4477 case TOK_builtin_expect:
4479 /* __builtin_expect is a no-op for now */
4480 int saved_nocode_wanted;
4481 next();
4482 skip('(');
4483 expr_eq();
4484 skip(',');
4485 saved_nocode_wanted = nocode_wanted;
4486 nocode_wanted = 1;
4487 expr_lor_const();
4488 vpop();
4489 nocode_wanted = saved_nocode_wanted;
4490 skip(')');
4492 break;
4493 case TOK_builtin_types_compatible_p:
4495 CType type1, type2;
4496 next();
4497 skip('(');
4498 parse_type(&type1);
4499 skip(',');
4500 parse_type(&type2);
4501 skip(')');
4502 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4503 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4504 vpushi(is_compatible_types(&type1, &type2));
4506 break;
4507 case TOK_builtin_choose_expr:
4509 int saved_nocode_wanted;
4510 int64_t c;
4511 next();
4512 skip('(');
4513 c = expr_const64();
4514 skip(',');
4515 if (!c) {
4516 saved_nocode_wanted = nocode_wanted;
4517 nocode_wanted = 1;
4519 expr_eq();
4520 if (!c) {
4521 vpop();
4522 nocode_wanted = saved_nocode_wanted;
4524 skip(',');
4525 if (c) {
4526 saved_nocode_wanted = nocode_wanted;
4527 nocode_wanted = 1;
4529 expr_eq();
4530 if (c) {
4531 vpop();
4532 nocode_wanted = saved_nocode_wanted;
4534 skip(')');
4536 break;
4537 case TOK_builtin_constant_p:
4539 int saved_nocode_wanted, res;
4540 next();
4541 skip('(');
4542 saved_nocode_wanted = nocode_wanted;
4543 nocode_wanted = 1;
4544 gexpr();
4545 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4546 vpop();
4547 nocode_wanted = saved_nocode_wanted;
4548 skip(')');
4549 vpushi(res);
4551 break;
4552 case TOK_builtin_frame_address:
4553 case TOK_builtin_return_address:
4555 int tok1 = tok;
4556 int level;
4557 CType type;
4558 next();
4559 skip('(');
4560 if (tok != TOK_CINT) {
4561 tcc_error("%s only takes positive integers",
4562 tok1 == TOK_builtin_return_address ?
4563 "__builtin_return_address" :
4564 "__builtin_frame_address");
4566 level = (uint32_t)tokc.i;
4567 next();
4568 skip(')');
4569 type.t = VT_VOID;
4570 mk_pointer(&type);
4571 vset(&type, VT_LOCAL, 0); /* local frame */
4572 while (level--) {
4573 mk_pointer(&vtop->type);
4574 indir(); /* -> parent frame */
4576 if (tok1 == TOK_builtin_return_address) {
4577 // assume return address is just above frame pointer on stack
4578 vpushi(PTR_SIZE);
4579 gen_op('+');
4580 mk_pointer(&vtop->type);
4581 indir();
4584 break;
4585 #ifdef TCC_TARGET_X86_64
4586 #ifdef TCC_TARGET_PE
4587 case TOK_builtin_va_start:
4589 next();
4590 skip('(');
4591 expr_eq();
4592 skip(',');
4593 expr_eq();
4594 skip(')');
4595 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4596 tcc_error("__builtin_va_start expects a local variable");
4597 vtop->r &= ~(VT_LVAL | VT_REF);
4598 vtop->type = char_pointer_type;
4599 vtop->c.i += 8;
4600 vstore();
4602 break;
4603 #else
4604 case TOK_builtin_va_arg_types:
4606 CType type;
4607 next();
4608 skip('(');
4609 parse_type(&type);
4610 skip(')');
4611 vpushi(classify_x86_64_va_arg(&type));
4613 break;
4614 #endif
4615 #endif
4617 #ifdef TCC_TARGET_ARM64
4618 case TOK___va_start: {
4619 if (nocode_wanted)
4620 tcc_error("statement in global scope");
4621 next();
4622 skip('(');
4623 expr_eq();
4624 skip(',');
4625 expr_eq();
4626 skip(')');
4627 //xx check types
4628 gen_va_start();
4629 vpushi(0);
4630 vtop->type.t = VT_VOID;
4631 break;
4633 case TOK___va_arg: {
4634 CType type;
4635 if (nocode_wanted)
4636 tcc_error("statement in global scope");
4637 next();
4638 skip('(');
4639 expr_eq();
4640 skip(',');
4641 parse_type(&type);
4642 skip(')');
4643 //xx check types
4644 gen_va_arg(&type);
4645 vtop->type = type;
4646 break;
4648 case TOK___arm64_clear_cache: {
4649 next();
4650 skip('(');
4651 expr_eq();
4652 skip(',');
4653 expr_eq();
4654 skip(')');
4655 gen_clear_cache();
4656 vpushi(0);
4657 vtop->type.t = VT_VOID;
4658 break;
4660 #endif
4661 /* pre operations */
4662 case TOK_INC:
4663 case TOK_DEC:
4664 t = tok;
4665 next();
4666 unary();
4667 inc(0, t);
4668 break;
4669 case '-':
4670 next();
4671 unary();
4672 t = vtop->type.t & VT_BTYPE;
4673 if (is_float(t)) {
4674 /* In IEEE negate(x) isn't subtract(0,x), but rather
4675 subtract(-0, x). */
4676 vpush(&vtop->type);
4677 if (t == VT_FLOAT)
4678 vtop->c.f = -0.0f;
4679 else if (t == VT_DOUBLE)
4680 vtop->c.d = -0.0;
4681 else
4682 vtop->c.ld = -0.0;
4683 } else
4684 vpushi(0);
4685 vswap();
4686 gen_op('-');
4687 break;
4688 case TOK_LAND:
4689 if (!gnu_ext)
4690 goto tok_identifier;
4691 next();
4692 /* allow to take the address of a label */
4693 if (tok < TOK_UIDENT)
4694 expect("label identifier");
4695 s = label_find(tok);
4696 if (!s) {
4697 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4698 } else {
4699 if (s->r == LABEL_DECLARED)
4700 s->r = LABEL_FORWARD;
4702 if (!s->type.t) {
4703 s->type.t = VT_VOID;
4704 mk_pointer(&s->type);
4705 s->type.t |= VT_STATIC;
4707 vpushsym(&s->type, s);
4708 next();
4709 break;
4711 // special qnan , snan and infinity values
4712 case TOK___NAN__:
4713 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4714 next();
4715 break;
4716 case TOK___SNAN__:
4717 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4718 next();
4719 break;
4720 case TOK___INF__:
4721 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4722 next();
4723 break;
4725 default:
4726 tok_identifier:
4727 t = tok;
4728 next();
4729 if (t < TOK_UIDENT)
4730 expect("identifier");
4731 s = sym_find(t);
4732 if (!s) {
4733 const char *name = get_tok_str(t, NULL);
4734 if (tok != '(')
4735 tcc_error("'%s' undeclared", name);
4736 /* for simple function calls, we tolerate undeclared
4737 external reference to int() function */
4738 if (tcc_state->warn_implicit_function_declaration
4739 #ifdef TCC_TARGET_PE
4740 /* people must be warned about using undeclared WINAPI functions
4741 (which usually start with uppercase letter) */
4742 || (name[0] >= 'A' && name[0] <= 'Z')
4743 #endif
4745 tcc_warning("implicit declaration of function '%s'", name);
4746 s = external_global_sym(t, &func_old_type, 0);
4748 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4749 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4750 /* if referencing an inline function, then we generate a
4751 symbol to it if not already done. It will have the
4752 effect to generate code for it at the end of the
4753 compilation unit. Inline function as always
4754 generated in the text section. */
4755 if (!s->c && !nocode_wanted)
4756 put_extern_sym(s, text_section, 0, 0);
4757 r = VT_SYM | VT_CONST;
4758 } else {
4759 r = s->r;
4760 /* A symbol that has a register is a local register variable,
4761 which starts out as VT_LOCAL value. */
4762 if ((r & VT_VALMASK) < VT_CONST)
4763 r = (r & ~VT_VALMASK) | VT_LOCAL;
4765 vset(&s->type, r, s->c);
4766 /* Point to s as backpointer (even without r&VT_SYM).
4767 Will be used by at least the x86 inline asm parser for
4768 regvars. */
4769 vtop->sym = s;
4770 if (vtop->r & VT_SYM) {
4771 vtop->c.i = 0;
4773 break;
4776 /* post operations */
4777 while (1) {
4778 if (tok == TOK_INC || tok == TOK_DEC) {
4779 inc(1, tok);
4780 next();
4781 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4782 int qualifiers;
4783 /* field */
4784 if (tok == TOK_ARROW)
4785 indir();
4786 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4787 test_lvalue();
4788 gaddrof();
4789 /* expect pointer on structure */
4790 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4791 expect("struct or union");
4792 if (tok == TOK_CDOUBLE)
4793 expect("field name");
4794 next();
4795 if (tok == TOK_CINT || tok == TOK_CUINT)
4796 expect("field name");
4797 s = find_field(&vtop->type, tok);
4798 if (!s)
4799 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4800 /* add field offset to pointer */
4801 vtop->type = char_pointer_type; /* change type to 'char *' */
4802 vpushi(s->c);
4803 gen_op('+');
4804 /* change type to field type, and set to lvalue */
4805 vtop->type = s->type;
4806 vtop->type.t |= qualifiers;
4807 /* an array is never an lvalue */
4808 if (!(vtop->type.t & VT_ARRAY)) {
4809 vtop->r |= lvalue_type(vtop->type.t);
4810 #ifdef CONFIG_TCC_BCHECK
4811 /* if bound checking, the referenced pointer must be checked */
4812 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4813 vtop->r |= VT_MUSTBOUND;
4814 #endif
4816 next();
4817 } else if (tok == '[') {
4818 next();
4819 gexpr();
4820 gen_op('+');
4821 indir();
4822 skip(']');
4823 } else if (tok == '(') {
4824 SValue ret;
4825 Sym *sa;
4826 int nb_args, ret_nregs, ret_align, regsize, variadic;
4828 /* function call */
4829 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4830 /* pointer test (no array accepted) */
4831 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4832 vtop->type = *pointed_type(&vtop->type);
4833 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4834 goto error_func;
4835 } else {
4836 error_func:
4837 expect("function pointer");
4839 } else {
4840 vtop->r &= ~VT_LVAL; /* no lvalue */
4842 /* get return type */
4843 s = vtop->type.ref;
4844 next();
4845 sa = s->next; /* first parameter */
4846 nb_args = 0;
4847 ret.r2 = VT_CONST;
4848 /* compute first implicit argument if a structure is returned */
4849 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4850 variadic = (s->c == FUNC_ELLIPSIS);
4851 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4852 &ret_align, &regsize);
4853 if (!ret_nregs) {
4854 /* get some space for the returned structure */
4855 size = type_size(&s->type, &align);
4856 #ifdef TCC_TARGET_ARM64
4857 /* On arm64, a small struct is return in registers.
4858 It is much easier to write it to memory if we know
4859 that we are allowed to write some extra bytes, so
4860 round the allocated space up to a power of 2: */
4861 if (size < 16)
4862 while (size & (size - 1))
4863 size = (size | (size - 1)) + 1;
4864 #endif
4865 loc = (loc - size) & -align;
4866 ret.type = s->type;
4867 ret.r = VT_LOCAL | VT_LVAL;
4868 /* pass it as 'int' to avoid structure arg passing
4869 problems */
4870 vseti(VT_LOCAL, loc);
4871 ret.c = vtop->c;
4872 nb_args++;
4874 } else {
4875 ret_nregs = 1;
4876 ret.type = s->type;
4879 if (ret_nregs) {
4880 /* return in register */
4881 if (is_float(ret.type.t)) {
4882 ret.r = reg_fret(ret.type.t);
4883 #ifdef TCC_TARGET_X86_64
4884 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4885 ret.r2 = REG_QRET;
4886 #endif
4887 } else {
4888 #ifndef TCC_TARGET_ARM64
4889 #ifdef TCC_TARGET_X86_64
4890 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4891 #else
4892 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4893 #endif
4894 ret.r2 = REG_LRET;
4895 #endif
4896 ret.r = REG_IRET;
4898 ret.c.i = 0;
4900 if (tok != ')') {
4901 for(;;) {
4902 expr_eq();
4903 gfunc_param_typed(s, sa);
4904 nb_args++;
4905 if (sa)
4906 sa = sa->next;
4907 if (tok == ')')
4908 break;
4909 skip(',');
4912 if (sa)
4913 tcc_error("too few arguments to function");
4914 skip(')');
4915 if (!nocode_wanted) {
4916 gfunc_call(nb_args);
4917 } else {
4918 vtop -= (nb_args + 1);
4921 /* return value */
4922 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4923 vsetc(&ret.type, r, &ret.c);
4924 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4927 /* handle packed struct return */
4928 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4929 int addr, offset;
4931 size = type_size(&s->type, &align);
4932 /* We're writing whole regs often, make sure there's enough
4933 space. Assume register size is power of 2. */
4934 if (regsize > align)
4935 align = regsize;
4936 loc = (loc - size) & -align;
4937 addr = loc;
4938 offset = 0;
4939 for (;;) {
4940 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4941 vswap();
4942 vstore();
4943 vtop--;
4944 if (--ret_nregs == 0)
4945 break;
4946 offset += regsize;
4948 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4950 } else {
4951 break;
4956 ST_FUNC void expr_prod(void)
4958 int t;
4960 unary();
4961 while (tok == '*' || tok == '/' || tok == '%') {
4962 t = tok;
4963 next();
4964 unary();
4965 gen_op(t);
4969 ST_FUNC void expr_sum(void)
4971 int t;
4973 expr_prod();
4974 while (tok == '+' || tok == '-') {
4975 t = tok;
4976 next();
4977 expr_prod();
4978 gen_op(t);
4982 static void expr_shift(void)
4984 int t;
4986 expr_sum();
4987 while (tok == TOK_SHL || tok == TOK_SAR) {
4988 t = tok;
4989 next();
4990 expr_sum();
4991 gen_op(t);
4995 static void expr_cmp(void)
4997 int t;
4999 expr_shift();
5000 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5001 tok == TOK_ULT || tok == TOK_UGE) {
5002 t = tok;
5003 next();
5004 expr_shift();
5005 gen_op(t);
5009 static void expr_cmpeq(void)
5011 int t;
5013 expr_cmp();
5014 while (tok == TOK_EQ || tok == TOK_NE) {
5015 t = tok;
5016 next();
5017 expr_cmp();
5018 gen_op(t);
5022 static void expr_and(void)
5024 expr_cmpeq();
5025 while (tok == '&') {
5026 next();
5027 expr_cmpeq();
5028 gen_op('&');
5032 static void expr_xor(void)
5034 expr_and();
5035 while (tok == '^') {
5036 next();
5037 expr_and();
5038 gen_op('^');
5042 static void expr_or(void)
5044 expr_xor();
5045 while (tok == '|') {
5046 next();
5047 expr_xor();
5048 gen_op('|');
5052 /* XXX: fix this mess */
5053 static void expr_land_const(void)
5055 expr_or();
5056 while (tok == TOK_LAND) {
5057 next();
5058 expr_or();
5059 gen_op(TOK_LAND);
5062 static void expr_lor_const(void)
5064 expr_land_const();
5065 while (tok == TOK_LOR) {
5066 next();
5067 expr_land_const();
5068 gen_op(TOK_LOR);
5072 static void expr_land(void)
5074 expr_or();
5075 if (tok == TOK_LAND) {
5076 int t = 0;
5077 for(;;) {
5078 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5079 CType ctb;
5080 ctb.t = VT_BOOL;
5081 gen_cast(&ctb);
5082 if (vtop->c.i) {
5083 vpop();
5084 } else {
5085 int saved_nocode_wanted = nocode_wanted;
5086 nocode_wanted = 1;
5087 while (tok == TOK_LAND) {
5088 next();
5089 expr_or();
5090 vpop();
5092 if (t)
5093 gsym(t);
5094 nocode_wanted = saved_nocode_wanted;
5095 gen_cast(&int_type);
5096 break;
5098 } else {
5099 if (!t)
5100 save_regs(1);
5101 t = gvtst(1, t);
5103 if (tok != TOK_LAND) {
5104 if (t)
5105 vseti(VT_JMPI, t);
5106 else
5107 vpushi(1);
5108 break;
5110 next();
5111 expr_or();
5116 static void expr_lor(void)
5118 expr_land();
5119 if (tok == TOK_LOR) {
5120 int t = 0;
5121 for(;;) {
5122 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5123 CType ctb;
5124 ctb.t = VT_BOOL;
5125 gen_cast(&ctb);
5126 if (!vtop->c.i) {
5127 vpop();
5128 } else {
5129 int saved_nocode_wanted = nocode_wanted;
5130 nocode_wanted = 1;
5131 while (tok == TOK_LOR) {
5132 next();
5133 expr_land();
5134 vpop();
5136 if (t)
5137 gsym(t);
5138 nocode_wanted = saved_nocode_wanted;
5139 gen_cast(&int_type);
5140 break;
5142 } else {
5143 if (!t)
5144 save_regs(1);
5145 t = gvtst(0, t);
5147 if (tok != TOK_LOR) {
5148 if (t)
5149 vseti(VT_JMP, t);
5150 else
5151 vpushi(0);
5152 break;
5154 next();
5155 expr_land();
5160 /* Assuming vtop is a value used in a conditional context
5161 (i.e. compared with zero) return 0 if it's false, 1 if
5162 true and -1 if it can't be statically determined. */
5163 static int condition_3way(void)
5165 int c = -1;
5166 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5167 (!(vtop->r & VT_SYM) ||
5168 !(vtop->sym->type.t & VT_WEAK))) {
5169 CType boolean;
5170 boolean.t = VT_BOOL;
5171 vdup();
5172 gen_cast(&boolean);
5173 c = vtop->c.i;
5174 vpop();
5176 return c;
5179 static void expr_cond(void)
5181 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5182 int c;
5183 SValue sv;
5184 CType type, type1, type2;
5186 expr_lor();
5187 if (tok == '?') {
5188 next();
5189 c = condition_3way();
5190 if (c >= 0) {
5191 int saved_nocode_wanted = nocode_wanted;
5192 if (c) {
5193 if (tok != ':' || !gnu_ext) {
5194 vpop();
5195 gexpr();
5197 skip(':');
5198 nocode_wanted = 1;
5199 expr_cond();
5200 vpop();
5201 nocode_wanted = saved_nocode_wanted;
5202 } else {
5203 vpop();
5204 if (tok != ':' || !gnu_ext) {
5205 nocode_wanted = 1;
5206 gexpr();
5207 vpop();
5208 nocode_wanted = saved_nocode_wanted;
5210 skip(':');
5211 expr_cond();
5214 else {
5215 /* XXX This doesn't handle nocode_wanted correctly at all.
5216 It unconditionally calls gv/gvtst and friends. That's
5217 the case for many of the expr_ routines. Currently
5218 that should generate only useless code, but depending
5219 on other operand handling this might also generate
5220 pointer derefs for lvalue conversions whose result
5221 is useless, but nevertheless can lead to segfault.
5223 Somewhen we need to overhaul the whole nocode_wanted
5224 handling. */
5225 if (vtop != vstack) {
5226 /* needed to avoid having different registers saved in
5227 each branch */
5228 if (is_float(vtop->type.t)) {
5229 rc = RC_FLOAT;
5230 #ifdef TCC_TARGET_X86_64
5231 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5232 rc = RC_ST0;
5234 #endif
5236 else
5237 rc = RC_INT;
5238 gv(rc);
5239 save_regs(1);
5241 if (tok == ':' && gnu_ext) {
5242 gv_dup();
5243 tt = gvtst(1, 0);
5244 } else {
5245 tt = gvtst(1, 0);
5246 gexpr();
5248 type1 = vtop->type;
5249 sv = *vtop; /* save value to handle it later */
5250 vtop--; /* no vpop so that FP stack is not flushed */
5251 skip(':');
5252 u = gjmp(0);
5253 gsym(tt);
5254 expr_cond();
5255 type2 = vtop->type;
5257 t1 = type1.t;
5258 bt1 = t1 & VT_BTYPE;
5259 t2 = type2.t;
5260 bt2 = t2 & VT_BTYPE;
5261 /* cast operands to correct type according to ISOC rules */
5262 if (is_float(bt1) || is_float(bt2)) {
5263 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5264 type.t = VT_LDOUBLE;
5265 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5266 type.t = VT_DOUBLE;
5267 } else {
5268 type.t = VT_FLOAT;
5270 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5271 /* cast to biggest op */
5272 type.t = VT_LLONG;
5273 /* convert to unsigned if it does not fit in a long long */
5274 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5275 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5276 type.t |= VT_UNSIGNED;
5277 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5278 /* If one is a null ptr constant the result type
5279 is the other. */
5280 if (is_null_pointer (vtop))
5281 type = type1;
5282 else if (is_null_pointer (&sv))
5283 type = type2;
5284 /* XXX: test pointer compatibility, C99 has more elaborate
5285 rules here. */
5286 else
5287 type = type1;
5288 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5289 /* XXX: test function pointer compatibility */
5290 type = bt1 == VT_FUNC ? type1 : type2;
5291 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5292 /* XXX: test structure compatibility */
5293 type = bt1 == VT_STRUCT ? type1 : type2;
5294 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5295 /* NOTE: as an extension, we accept void on only one side */
5296 type.t = VT_VOID;
5297 } else {
5298 /* integer operations */
5299 type.t = VT_INT;
5300 /* convert to unsigned if it does not fit in an integer */
5301 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5302 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5303 type.t |= VT_UNSIGNED;
5305 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5306 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5307 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5309 /* now we convert second operand */
5310 gen_cast(&type);
5311 if (islv) {
5312 mk_pointer(&vtop->type);
5313 gaddrof();
5315 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5316 gaddrof();
5317 rc = RC_INT;
5318 if (is_float(type.t)) {
5319 rc = RC_FLOAT;
5320 #ifdef TCC_TARGET_X86_64
5321 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5322 rc = RC_ST0;
5324 #endif
5325 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5326 /* for long longs, we use fixed registers to avoid having
5327 to handle a complicated move */
5328 rc = RC_IRET;
5331 r2 = gv(rc);
5332 /* this is horrible, but we must also convert first
5333 operand */
5334 tt = gjmp(0);
5335 gsym(u);
5336 /* put again first value and cast it */
5337 *vtop = sv;
5338 gen_cast(&type);
5339 if (islv) {
5340 mk_pointer(&vtop->type);
5341 gaddrof();
5343 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5344 gaddrof();
5345 r1 = gv(rc);
5346 move_reg(r2, r1, type.t);
5347 vtop->r = r2;
5348 gsym(tt);
5349 if (islv)
5350 indir();
5355 static void expr_eq(void)
5357 int t;
5359 expr_cond();
5360 if (tok == '=' ||
5361 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5362 tok == TOK_A_XOR || tok == TOK_A_OR ||
5363 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5364 test_lvalue();
5365 t = tok;
5366 next();
5367 if (t == '=') {
5368 expr_eq();
5369 } else {
5370 vdup();
5371 expr_eq();
5372 gen_op(t & 0x7f);
5374 vstore();
5378 ST_FUNC void gexpr(void)
5380 while (1) {
5381 expr_eq();
5382 if (tok != ',')
5383 break;
5384 vpop();
5385 next();
5389 /* parse an expression and return its type without any side effect. */
5390 static void expr_type(CType *type)
5392 int saved_nocode_wanted;
5394 saved_nocode_wanted = nocode_wanted;
5395 nocode_wanted = 1;
5396 gexpr();
5397 *type = vtop->type;
5398 vpop();
5399 nocode_wanted = saved_nocode_wanted;
5402 /* parse a unary expression and return its type without any side
5403 effect. */
5404 static void unary_type(CType *type)
5406 int a;
5408 a = nocode_wanted;
5409 nocode_wanted = 1;
5410 unary();
5411 *type = vtop->type;
5412 vpop();
5413 nocode_wanted = a;
5416 /* parse a constant expression and return value in vtop. */
5417 static void expr_const1(void)
5419 int a;
5420 a = const_wanted;
5421 const_wanted = 1;
5422 expr_cond();
5423 const_wanted = a;
5426 /* parse an integer constant and return its value. */
5427 static inline int64_t expr_const64(void)
5429 int64_t c;
5430 expr_const1();
5431 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5432 expect("constant expression");
5433 c = vtop->c.i;
5434 vpop();
5435 return c;
5438 /* parse an integer constant and return its value.
5439 Complain if it doesn't fit 32bit (signed or unsigned). */
5440 ST_FUNC int expr_const(void)
5442 int c;
5443 int64_t wc = expr_const64();
5444 c = wc;
5445 if (c != wc && (unsigned)c != wc)
5446 tcc_error("constant exceeds 32 bit");
5447 return c;
5450 /* return the label token if current token is a label, otherwise
5451 return zero */
5452 static int is_label(void)
5454 int last_tok;
5456 /* fast test first */
5457 if (tok < TOK_UIDENT)
5458 return 0;
5459 /* no need to save tokc because tok is an identifier */
5460 last_tok = tok;
5461 next();
5462 if (tok == ':') {
5463 next();
5464 return last_tok;
5465 } else {
5466 unget_tok(last_tok);
5467 return 0;
5471 static void label_or_decl(int l)
5473 int last_tok;
5475 /* fast test first */
5476 if (tok >= TOK_UIDENT)
5478 /* no need to save tokc because tok is an identifier */
5479 last_tok = tok;
5480 next();
5481 if (tok == ':') {
5482 unget_tok(last_tok);
5483 return;
5485 unget_tok(last_tok);
5487 decl(l);
5490 static int case_cmp(const void *pa, const void *pb)
5492 int64_t a = (*(struct case_t**) pa)->v1;
5493 int64_t b = (*(struct case_t**) pb)->v1;
5494 return a < b ? -1 : a > b;
5497 static void gcase(struct case_t **base, int len, int *bsym)
5499 struct case_t *p;
5500 int e;
5501 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5502 gv(RC_INT);
5503 while (len > 4) {
5504 /* binary search */
5505 p = base[len/2];
5506 vdup();
5507 if (ll)
5508 vpushll(p->v2);
5509 else
5510 vpushi(p->v2);
5511 gen_op(TOK_LE);
5512 e = gtst(1, 0);
5513 vdup();
5514 if (ll)
5515 vpushll(p->v1);
5516 else
5517 vpushi(p->v1);
5518 gen_op(TOK_GE);
5519 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5520 /* x < v1 */
5521 gcase(base, len/2, bsym);
5522 if (cur_switch->def_sym)
5523 gjmp_addr(cur_switch->def_sym);
5524 else
5525 *bsym = gjmp(*bsym);
5526 /* x > v2 */
5527 gsym(e);
5528 e = len/2 + 1;
5529 base += e; len -= e;
5531 /* linear scan */
5532 while (len--) {
5533 p = *base++;
5534 vdup();
5535 if (ll)
5536 vpushll(p->v2);
5537 else
5538 vpushi(p->v2);
5539 if (p->v1 == p->v2) {
5540 gen_op(TOK_EQ);
5541 gtst_addr(0, p->sym);
5542 } else {
5543 gen_op(TOK_LE);
5544 e = gtst(1, 0);
5545 vdup();
5546 if (ll)
5547 vpushll(p->v1);
5548 else
5549 vpushi(p->v1);
5550 gen_op(TOK_GE);
5551 gtst_addr(0, p->sym);
5552 gsym(e);
5557 static void block(int *bsym, int *csym, int is_expr)
5559 int a, b, c, d, cond;
5560 Sym *s;
5562 /* generate line number info */
5563 if (tcc_state->do_debug &&
5564 (last_line_num != file->line_num || last_ind != ind)) {
5565 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5566 last_ind = ind;
5567 last_line_num = file->line_num;
5570 if (is_expr) {
5571 /* default return value is (void) */
5572 vpushi(0);
5573 vtop->type.t = VT_VOID;
5576 if (tok == TOK_IF) {
5577 /* if test */
5578 int saved_nocode_wanted = nocode_wanted;
5579 next();
5580 skip('(');
5581 gexpr();
5582 skip(')');
5583 cond = condition_3way();
5584 if (cond == 0)
5585 nocode_wanted |= 2;
5586 a = gvtst(1, 0);
5587 block(bsym, csym, 0);
5588 if (cond != 1)
5589 nocode_wanted = saved_nocode_wanted;
5590 c = tok;
5591 if (c == TOK_ELSE) {
5592 next();
5593 if (cond == 1)
5594 nocode_wanted |= 2;
5595 d = gjmp(0);
5596 gsym(a);
5597 block(bsym, csym, 0);
5598 gsym(d); /* patch else jmp */
5599 if (cond != 0)
5600 nocode_wanted = saved_nocode_wanted;
5601 } else
5602 gsym(a);
5603 } else if (tok == TOK_WHILE) {
5604 int saved_nocode_wanted;
5605 nocode_wanted &= ~2;
5606 next();
5607 d = ind;
5608 vla_sp_restore();
5609 skip('(');
5610 gexpr();
5611 skip(')');
5612 a = gvtst(1, 0);
5613 b = 0;
5614 ++local_scope;
5615 saved_nocode_wanted = nocode_wanted;
5616 block(&a, &b, 0);
5617 nocode_wanted = saved_nocode_wanted;
5618 --local_scope;
5619 if(!nocode_wanted)
5620 gjmp_addr(d);
5621 gsym(a);
5622 gsym_addr(b, d);
5623 } else if (tok == '{') {
5624 Sym *llabel;
5625 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5627 next();
5628 /* record local declaration stack position */
5629 s = local_stack;
5630 llabel = local_label_stack;
5631 ++local_scope;
5633 /* handle local labels declarations */
5634 if (tok == TOK_LABEL) {
5635 next();
5636 for(;;) {
5637 if (tok < TOK_UIDENT)
5638 expect("label identifier");
5639 label_push(&local_label_stack, tok, LABEL_DECLARED);
5640 next();
5641 if (tok == ',') {
5642 next();
5643 } else {
5644 skip(';');
5645 break;
5649 while (tok != '}') {
5650 label_or_decl(VT_LOCAL);
5651 if (tok != '}') {
5652 if (is_expr)
5653 vpop();
5654 block(bsym, csym, is_expr);
5657 /* pop locally defined labels */
5658 label_pop(&local_label_stack, llabel);
5659 /* pop locally defined symbols */
5660 --local_scope;
5661 /* In the is_expr case (a statement expression is finished here),
5662 vtop might refer to symbols on the local_stack. Either via the
5663 type or via vtop->sym. We can't pop those nor any that in turn
5664 might be referred to. To make it easier we don't roll back
5665 any symbols in that case; some upper level call to block() will
5666 do that. We do have to remove such symbols from the lookup
5667 tables, though. sym_pop will do that. */
5668 sym_pop(&local_stack, s, is_expr);
5670 /* Pop VLA frames and restore stack pointer if required */
5671 if (vlas_in_scope > saved_vlas_in_scope) {
5672 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5673 vla_sp_restore();
5675 vlas_in_scope = saved_vlas_in_scope;
5677 next();
5678 } else if (tok == TOK_RETURN) {
5679 next();
5680 if (tok != ';') {
5681 gexpr();
5682 gen_assign_cast(&func_vt);
5683 #ifdef TCC_TARGET_ARM64
5684 // Perhaps it would be better to use this for all backends:
5685 greturn();
5686 #else
5687 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5688 CType type, ret_type;
5689 int ret_align, ret_nregs, regsize;
5690 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5691 &ret_align, &regsize);
5692 if (0 == ret_nregs) {
5693 /* if returning structure, must copy it to implicit
5694 first pointer arg location */
5695 type = func_vt;
5696 mk_pointer(&type);
5697 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5698 indir();
5699 vswap();
5700 /* copy structure value to pointer */
5701 vstore();
5702 } else {
5703 /* returning structure packed into registers */
5704 int r, size, addr, align;
5705 size = type_size(&func_vt,&align);
5706 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5707 (vtop->c.i & (ret_align-1)))
5708 && (align & (ret_align-1))) {
5709 loc = (loc - size) & -ret_align;
5710 addr = loc;
5711 type = func_vt;
5712 vset(&type, VT_LOCAL | VT_LVAL, addr);
5713 vswap();
5714 vstore();
5715 vpop();
5716 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5718 vtop->type = ret_type;
5719 if (is_float(ret_type.t))
5720 r = rc_fret(ret_type.t);
5721 else
5722 r = RC_IRET;
5724 if (ret_nregs == 1)
5725 gv(r);
5726 else {
5727 for (;;) {
5728 vdup();
5729 gv(r);
5730 vpop();
5731 if (--ret_nregs == 0)
5732 break;
5733 /* We assume that when a structure is returned in multiple
5734 registers, their classes are consecutive values of the
5735 suite s(n) = 2^n */
5736 r <<= 1;
5737 vtop->c.i += regsize;
5741 } else if (is_float(func_vt.t)) {
5742 gv(rc_fret(func_vt.t));
5743 } else {
5744 gv(RC_IRET);
5746 #endif
5747 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5749 skip(';');
5750 /* jump unless last stmt in top-level block */
5751 if (tok != '}' || local_scope != 1)
5752 rsym = gjmp(rsym);
5753 nocode_wanted |= 2;
5754 } else if (tok == TOK_BREAK) {
5755 /* compute jump */
5756 if (!bsym)
5757 tcc_error("cannot break");
5758 *bsym = gjmp(*bsym);
5759 next();
5760 skip(';');
5761 nocode_wanted |= 2;
5762 } else if (tok == TOK_CONTINUE) {
5763 /* compute jump */
5764 if (!csym)
5765 tcc_error("cannot continue");
5766 vla_sp_restore_root();
5767 *csym = gjmp(*csym);
5768 next();
5769 skip(';');
5770 } else if (tok == TOK_FOR) {
5771 int e;
5772 int saved_nocode_wanted;
5773 nocode_wanted &= ~2;
5774 next();
5775 skip('(');
5776 s = local_stack;
5777 ++local_scope;
5778 if (tok != ';') {
5779 /* c99 for-loop init decl? */
5780 if (!decl0(VT_LOCAL, 1)) {
5781 /* no, regular for-loop init expr */
5782 gexpr();
5783 vpop();
5786 skip(';');
5787 d = ind;
5788 c = ind;
5789 vla_sp_restore();
5790 a = 0;
5791 b = 0;
5792 if (tok != ';') {
5793 gexpr();
5794 a = gvtst(1, 0);
5796 skip(';');
5797 if (tok != ')') {
5798 e = gjmp(0);
5799 c = ind;
5800 vla_sp_restore();
5801 gexpr();
5802 vpop();
5803 gjmp_addr(d);
5804 gsym(e);
5806 skip(')');
5807 saved_nocode_wanted = nocode_wanted;
5808 block(&a, &b, 0);
5809 nocode_wanted = saved_nocode_wanted;
5810 if(!nocode_wanted)
5811 gjmp_addr(c);
5812 gsym(a);
5813 gsym_addr(b, c);
5814 --local_scope;
5815 sym_pop(&local_stack, s, 0);
5817 } else
5818 if (tok == TOK_DO) {
5819 int saved_nocode_wanted;
5820 nocode_wanted &= ~2;
5821 next();
5822 a = 0;
5823 b = 0;
5824 d = ind;
5825 vla_sp_restore();
5826 saved_nocode_wanted = nocode_wanted;
5827 block(&a, &b, 0);
5828 nocode_wanted = saved_nocode_wanted;
5829 skip(TOK_WHILE);
5830 skip('(');
5831 gsym(b);
5832 gexpr();
5833 c = gvtst(0, 0);
5834 if (!nocode_wanted)
5835 gsym_addr(c, d);
5836 skip(')');
5837 gsym(a);
5838 skip(';');
5839 } else
5840 if (tok == TOK_SWITCH) {
5841 struct switch_t *saved, sw;
5842 int saved_nocode_wanted = nocode_wanted;
5843 SValue switchval;
5844 next();
5845 skip('(');
5846 gexpr();
5847 skip(')');
5848 switchval = *vtop--;
5849 a = 0;
5850 b = gjmp(0); /* jump to first case */
5851 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5852 saved = cur_switch;
5853 cur_switch = &sw;
5854 block(&a, csym, 0);
5855 nocode_wanted = saved_nocode_wanted;
5856 a = gjmp(a); /* add implicit break */
5857 /* case lookup */
5858 gsym(b);
5859 if (!nocode_wanted) {
5860 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5861 for (b = 1; b < sw.n; b++)
5862 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5863 tcc_error("duplicate case value");
5864 /* Our switch table sorting is signed, so the compared
5865 value needs to be as well when it's 64bit. */
5866 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5867 switchval.type.t &= ~VT_UNSIGNED;
5868 vpushv(&switchval);
5869 gcase(sw.p, sw.n, &a);
5870 vpop();
5871 if (sw.def_sym)
5872 gjmp_addr(sw.def_sym);
5874 dynarray_reset(&sw.p, &sw.n);
5875 cur_switch = saved;
5876 /* break label */
5877 gsym(a);
5878 } else
5879 if (tok == TOK_CASE) {
5880 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5881 if (!cur_switch)
5882 expect("switch");
5883 nocode_wanted &= ~2;
5884 next();
5885 cr->v1 = cr->v2 = expr_const64();
5886 if (gnu_ext && tok == TOK_DOTS) {
5887 next();
5888 cr->v2 = expr_const64();
5889 if (cr->v2 < cr->v1)
5890 tcc_warning("empty case range");
5892 cr->sym = ind;
5893 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5894 skip(':');
5895 is_expr = 0;
5896 goto block_after_label;
5897 } else
5898 if (tok == TOK_DEFAULT) {
5899 next();
5900 skip(':');
5901 if (!cur_switch)
5902 expect("switch");
5903 if (cur_switch->def_sym)
5904 tcc_error("too many 'default'");
5905 cur_switch->def_sym = ind;
5906 is_expr = 0;
5907 goto block_after_label;
5908 } else
5909 if (tok == TOK_GOTO) {
5910 next();
5911 if (tok == '*' && gnu_ext) {
5912 /* computed goto */
5913 next();
5914 gexpr();
5915 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5916 expect("pointer");
5917 if (!nocode_wanted)
5918 ggoto();
5919 else
5920 vtop--;
5921 } else if (tok >= TOK_UIDENT) {
5922 s = label_find(tok);
5923 /* put forward definition if needed */
5924 if (!s) {
5925 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5926 } else {
5927 if (s->r == LABEL_DECLARED)
5928 s->r = LABEL_FORWARD;
5930 vla_sp_restore_root();
5931 if (nocode_wanted)
5933 else if (s->r & LABEL_FORWARD)
5934 s->jnext = gjmp(s->jnext);
5935 else
5936 gjmp_addr(s->jnext);
5937 next();
5938 } else {
5939 expect("label identifier");
5941 skip(';');
5942 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5943 asm_instr();
5944 } else {
5945 b = is_label();
5946 if (b) {
5947 /* label case */
5948 s = label_find(b);
5949 if (s) {
5950 if (s->r == LABEL_DEFINED)
5951 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5952 gsym(s->jnext);
5953 s->r = LABEL_DEFINED;
5954 } else {
5955 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5957 s->jnext = ind;
5958 vla_sp_restore();
5959 /* we accept this, but it is a mistake */
5960 block_after_label:
5961 nocode_wanted &= ~2;
5962 if (tok == '}') {
5963 tcc_warning("deprecated use of label at end of compound statement");
5964 } else {
5965 if (is_expr)
5966 vpop();
5967 block(bsym, csym, is_expr);
5969 } else {
5970 /* expression case */
5971 if (tok != ';') {
5972 if (is_expr) {
5973 vpop();
5974 gexpr();
5975 } else {
5976 gexpr();
5977 vpop();
5980 skip(';');
5985 #define EXPR_CONST 1
5986 #define EXPR_ANY 2
5988 static void parse_init_elem(int expr_type)
5990 int saved_global_expr;
5991 switch(expr_type) {
5992 case EXPR_CONST:
5993 /* compound literals must be allocated globally in this case */
5994 saved_global_expr = global_expr;
5995 global_expr = 1;
5996 expr_const1();
5997 global_expr = saved_global_expr;
5998 /* NOTE: symbols are accepted */
5999 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
6000 tcc_error("initializer element is not constant");
6001 break;
6002 case EXPR_ANY:
6003 expr_eq();
6004 break;
6008 /* t is the array or struct type. c is the array or struct
6009 address. cur_field is the pointer to the current
6010 value, for arrays the 'c' member contains the current start
6011 index and the 'r' contains the end index (in case of range init).
6012 'size_only' is true if only size info is needed (only used
6013 in arrays) */
6014 static void decl_designator(CType *type, Section *sec, unsigned long c,
6015 Sym **cur_field, int size_only)
6017 Sym *s, *f;
6018 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6019 CType type1;
6021 notfirst = 0;
6022 elem_size = 0;
6023 nb_elems = 1;
6024 if (gnu_ext && (l = is_label()) != 0)
6025 goto struct_field;
6026 while (tok == '[' || tok == '.') {
6027 if (tok == '[') {
6028 if (!(type->t & VT_ARRAY))
6029 expect("array type");
6030 s = type->ref;
6031 next();
6032 index = expr_const();
6033 if (index < 0 || (s->c >= 0 && index >= s->c))
6034 tcc_error("invalid index");
6035 if (tok == TOK_DOTS && gnu_ext) {
6036 next();
6037 index_last = expr_const();
6038 if (index_last < 0 ||
6039 (s->c >= 0 && index_last >= s->c) ||
6040 index_last < index)
6041 tcc_error("invalid index");
6042 } else {
6043 index_last = index;
6045 skip(']');
6046 if (!notfirst) {
6047 (*cur_field)->c = index;
6048 (*cur_field)->r = index_last;
6050 type = pointed_type(type);
6051 elem_size = type_size(type, &align);
6052 c += index * elem_size;
6053 /* NOTE: we only support ranges for last designator */
6054 nb_elems = index_last - index + 1;
6055 if (nb_elems != 1) {
6056 notfirst = 1;
6057 break;
6059 } else {
6060 next();
6061 l = tok;
6062 next();
6063 struct_field:
6064 if ((type->t & VT_BTYPE) != VT_STRUCT)
6065 expect("struct/union type");
6066 f = find_field(type, l);
6067 if (!f)
6068 expect("field");
6069 if (!notfirst)
6070 *cur_field = f;
6071 /* XXX: fix this mess by using explicit storage field */
6072 type1 = f->type;
6073 type1.t |= (type->t & ~VT_TYPE);
6074 type = &type1;
6075 c += f->c;
6077 notfirst = 1;
6079 if (notfirst) {
6080 if (tok == '=') {
6081 next();
6082 } else {
6083 if (!gnu_ext)
6084 expect("=");
6086 } else {
6087 if (type->t & VT_ARRAY) {
6088 index = (*cur_field)->c;
6089 if (type->ref->c >= 0 && index >= type->ref->c)
6090 tcc_error("index too large");
6091 type = pointed_type(type);
6092 c += index * type_size(type, &align);
6093 } else {
6094 f = *cur_field;
6095 if (!f)
6096 tcc_error("too many field init");
6097 /* XXX: fix this mess by using explicit storage field */
6098 type1 = f->type;
6099 type1.t |= (type->t & ~VT_TYPE);
6100 type = &type1;
6101 c += f->c;
6104 decl_initializer(type, sec, c, 0, size_only);
6106 /* XXX: make it more general */
6107 if (!size_only && nb_elems > 1) {
6108 unsigned long c_end;
6109 uint8_t *src, *dst;
6110 int i;
6112 if (!sec) {
6113 vset(type, VT_LOCAL|VT_LVAL, c);
6114 for (i = 1; i < nb_elems; i++) {
6115 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6116 vswap();
6117 vstore();
6119 vpop();
6120 } else {
6121 c_end = c + nb_elems * elem_size;
6122 if (c_end > sec->data_allocated)
6123 section_realloc(sec, c_end);
6124 src = sec->data + c;
6125 dst = src;
6126 for(i = 1; i < nb_elems; i++) {
6127 dst += elem_size;
6128 memcpy(dst, src, elem_size);
6134 /* store a value or an expression directly in global data or in local array */
6135 static void init_putv(CType *type, Section *sec, unsigned long c)
6137 int bt, bit_pos, bit_size;
6138 void *ptr;
6139 unsigned long long bit_mask;
6140 CType dtype;
6142 dtype = *type;
6143 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6145 if (sec) {
6146 int size, align;
6147 /* XXX: not portable */
6148 /* XXX: generate error if incorrect relocation */
6149 gen_assign_cast(&dtype);
6150 bt = type->t & VT_BTYPE;
6151 size = type_size(type, &align);
6152 if (c + size > sec->data_allocated) {
6153 section_realloc(sec, c + size);
6155 ptr = sec->data + c;
6156 /* XXX: make code faster ? */
6157 if (!(type->t & VT_BITFIELD)) {
6158 bit_pos = 0;
6159 bit_size = PTR_SIZE * 8;
6160 bit_mask = -1LL;
6161 } else {
6162 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6163 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6164 bit_mask = (1LL << bit_size) - 1;
6166 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6167 vtop->sym->v >= SYM_FIRST_ANOM &&
6168 /* XXX This rejects compount literals like
6169 '(void *){ptr}'. The problem is that '&sym' is
6170 represented the same way, which would be ruled out
6171 by the SYM_FIRST_ANOM check above, but also '"string"'
6172 in 'char *p = "string"' is represented the same
6173 with the type being VT_PTR and the symbol being an
6174 anonymous one. That is, there's no difference in vtop
6175 between '(void *){x}' and '&(void *){x}'. Ignore
6176 pointer typed entities here. Hopefully no real code
6177 will every use compound literals with scalar type. */
6178 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6179 /* These come from compound literals, memcpy stuff over. */
6180 Section *ssec;
6181 ElfW(Sym) *esym;
6182 ElfW_Rel *rel;
6183 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6184 ssec = tcc_state->sections[esym->st_shndx];
6185 memmove (ptr, ssec->data + esym->st_value, size);
6186 if (ssec->reloc) {
6187 /* We need to copy over all memory contents, and that
6188 includes relocations. Use the fact that relocs are
6189 created it order, so look from the end of relocs
6190 until we hit one before the copied region. */
6191 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6192 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6193 while (num_relocs--) {
6194 rel--;
6195 if (rel->r_offset >= esym->st_value + size)
6196 continue;
6197 if (rel->r_offset < esym->st_value)
6198 break;
6199 /* Note: if the same fields are initialized multiple
6200 times (possible with designators) then we possibly
6201 add multiple relocations for the same offset here.
6202 That would lead to wrong code, the last reloc needs
6203 to win. We clean this up later after the whole
6204 initializer is parsed. */
6205 put_elf_reloca(symtab_section, sec,
6206 c + rel->r_offset - esym->st_value,
6207 ELFW(R_TYPE)(rel->r_info),
6208 ELFW(R_SYM)(rel->r_info),
6209 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6210 rel->r_addend
6211 #else
6213 #endif
6217 } else {
6218 if ((vtop->r & VT_SYM) &&
6219 (bt == VT_BYTE ||
6220 bt == VT_SHORT ||
6221 bt == VT_DOUBLE ||
6222 bt == VT_LDOUBLE ||
6223 #if PTR_SIZE == 8
6224 (bt == VT_LLONG && bit_size != 64) ||
6225 bt == VT_INT
6226 #else
6227 bt == VT_LLONG ||
6228 (bt == VT_INT && bit_size != 32)
6229 #endif
6231 tcc_error("initializer element is not computable at load time");
6232 switch(bt) {
6233 /* XXX: when cross-compiling we assume that each type has the
6234 same representation on host and target, which is likely to
6235 be wrong in the case of long double */
6236 case VT_BOOL:
6237 vtop->c.i = (vtop->c.i != 0);
6238 case VT_BYTE:
6239 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6240 break;
6241 case VT_SHORT:
6242 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6243 break;
6244 case VT_DOUBLE:
6245 *(double *)ptr = vtop->c.d;
6246 break;
6247 case VT_LDOUBLE:
6248 if (sizeof(long double) == LDOUBLE_SIZE)
6249 *(long double *)ptr = vtop->c.ld;
6250 else if (sizeof(double) == LDOUBLE_SIZE)
6251 *(double *)ptr = vtop->c.ld;
6252 else
6253 tcc_error("can't cross compile long double constants");
6254 break;
6255 #if PTR_SIZE != 8
6256 case VT_LLONG:
6257 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6258 break;
6259 #else
6260 case VT_LLONG:
6261 #endif
6262 case VT_PTR:
6264 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6265 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6266 if (vtop->r & VT_SYM)
6267 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6268 else
6269 *(addr_t *)ptr |= val;
6270 #else
6271 if (vtop->r & VT_SYM)
6272 greloc(sec, vtop->sym, c, R_DATA_PTR);
6273 *(addr_t *)ptr |= val;
6274 #endif
6275 break;
6277 default:
6279 int val = (vtop->c.i & bit_mask) << bit_pos;
6280 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6281 if (vtop->r & VT_SYM)
6282 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6283 else
6284 *(int *)ptr |= val;
6285 #else
6286 if (vtop->r & VT_SYM)
6287 greloc(sec, vtop->sym, c, R_DATA_PTR);
6288 *(int *)ptr |= val;
6289 #endif
6290 break;
6294 vtop--;
6295 } else {
6296 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6297 vswap();
6298 vstore();
6299 vpop();
6303 /* put zeros for variable based init */
6304 static void init_putz(Section *sec, unsigned long c, int size)
6306 if (sec) {
6307 /* nothing to do because globals are already set to zero */
6308 } else {
6309 vpush_global_sym(&func_old_type, TOK_memset);
6310 vseti(VT_LOCAL, c);
6311 #ifdef TCC_TARGET_ARM
6312 vpushs(size);
6313 vpushi(0);
6314 #else
6315 vpushi(0);
6316 vpushs(size);
6317 #endif
6318 gfunc_call(3);
6322 /* 't' contains the type and storage info. 'c' is the offset of the
6323 object in section 'sec'. If 'sec' is NULL, it means stack based
6324 allocation. 'first' is true if array '{' must be read (multi
6325 dimension implicit array init handling). 'size_only' is true if
6326 size only evaluation is wanted (only for arrays). */
6327 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6328 int first, int size_only)
6330 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6331 int size1, align1;
6332 int have_elem;
6333 Sym *s, *f;
6334 Sym indexsym;
6335 CType *t1;
6337 /* If we currently are at an '}' or ',' we have read an initializer
6338 element in one of our callers, and not yet consumed it. */
6339 have_elem = tok == '}' || tok == ',';
6340 if (!have_elem && tok != '{' &&
6341 /* In case of strings we have special handling for arrays, so
6342 don't consume them as initializer value (which would commit them
6343 to some anonymous symbol). */
6344 tok != TOK_LSTR && tok != TOK_STR &&
6345 !size_only) {
6346 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6347 have_elem = 1;
6350 if (have_elem &&
6351 !(type->t & VT_ARRAY) &&
6352 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6353 The source type might have VT_CONSTANT set, which is
6354 of course assignable to non-const elements. */
6355 is_compatible_parameter_types(type, &vtop->type)) {
6356 init_putv(type, sec, c);
6357 } else if (type->t & VT_ARRAY) {
6358 s = type->ref;
6359 n = s->c;
6360 array_length = 0;
6361 t1 = pointed_type(type);
6362 size1 = type_size(t1, &align1);
6364 no_oblock = 1;
6365 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6366 tok == '{') {
6367 if (tok != '{')
6368 tcc_error("character array initializer must be a literal,"
6369 " optionally enclosed in braces");
6370 skip('{');
6371 no_oblock = 0;
6374 /* only parse strings here if correct type (otherwise: handle
6375 them as ((w)char *) expressions */
6376 if ((tok == TOK_LSTR &&
6377 #ifdef TCC_TARGET_PE
6378 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6379 #else
6380 (t1->t & VT_BTYPE) == VT_INT
6381 #endif
6382 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6383 while (tok == TOK_STR || tok == TOK_LSTR) {
6384 int cstr_len, ch;
6386 /* compute maximum number of chars wanted */
6387 if (tok == TOK_STR)
6388 cstr_len = tokc.str.size;
6389 else
6390 cstr_len = tokc.str.size / sizeof(nwchar_t);
6391 cstr_len--;
6392 nb = cstr_len;
6393 if (n >= 0 && nb > (n - array_length))
6394 nb = n - array_length;
6395 if (!size_only) {
6396 if (cstr_len > nb)
6397 tcc_warning("initializer-string for array is too long");
6398 /* in order to go faster for common case (char
6399 string in global variable, we handle it
6400 specifically */
6401 if (sec && tok == TOK_STR && size1 == 1) {
6402 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6403 } else {
6404 for(i=0;i<nb;i++) {
6405 if (tok == TOK_STR)
6406 ch = ((unsigned char *)tokc.str.data)[i];
6407 else
6408 ch = ((nwchar_t *)tokc.str.data)[i];
6409 vpushi(ch);
6410 init_putv(t1, sec, c + (array_length + i) * size1);
6414 array_length += nb;
6415 next();
6417 /* only add trailing zero if enough storage (no
6418 warning in this case since it is standard) */
6419 if (n < 0 || array_length < n) {
6420 if (!size_only) {
6421 vpushi(0);
6422 init_putv(t1, sec, c + (array_length * size1));
6424 array_length++;
6426 } else {
6427 indexsym.c = 0;
6428 indexsym.r = 0;
6429 f = &indexsym;
6431 do_init_list:
6432 while (tok != '}' || have_elem) {
6433 decl_designator(type, sec, c, &f, size_only);
6434 have_elem = 0;
6435 index = f->c;
6436 /* must put zero in holes (note that doing it that way
6437 ensures that it even works with designators) */
6438 if (!size_only && array_length < index) {
6439 init_putz(sec, c + array_length * size1,
6440 (index - array_length) * size1);
6442 if (type->t & VT_ARRAY) {
6443 index = indexsym.c = ++indexsym.r;
6444 } else {
6445 index = index + type_size(&f->type, &align1);
6446 if (s->type.t == TOK_UNION)
6447 f = NULL;
6448 else
6449 f = f->next;
6451 if (index > array_length)
6452 array_length = index;
6454 if (type->t & VT_ARRAY) {
6455 /* special test for multi dimensional arrays (may not
6456 be strictly correct if designators are used at the
6457 same time) */
6458 if (no_oblock && index >= n)
6459 break;
6460 } else {
6461 if (no_oblock && f == NULL)
6462 break;
6464 if (tok == '}')
6465 break;
6466 skip(',');
6469 /* put zeros at the end */
6470 if (!size_only && array_length < n) {
6471 init_putz(sec, c + array_length * size1,
6472 (n - array_length) * size1);
6474 if (!no_oblock)
6475 skip('}');
6476 /* patch type size if needed, which happens only for array types */
6477 if (n < 0)
6478 s->c = array_length;
6479 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6480 size1 = 1;
6481 no_oblock = 1;
6482 if (first || tok == '{') {
6483 skip('{');
6484 no_oblock = 0;
6486 s = type->ref;
6487 f = s->next;
6488 array_length = 0;
6489 n = s->c;
6490 goto do_init_list;
6491 } else if (tok == '{') {
6492 next();
6493 decl_initializer(type, sec, c, first, size_only);
6494 skip('}');
6495 } else if (size_only) {
6496 /* If we supported only ISO C we wouldn't have to accept calling
6497 this on anything than an array size_only==1 (and even then
6498 only on the outermost level, so no recursion would be needed),
6499 because initializing a flex array member isn't supported.
6500 But GNU C supports it, so we need to recurse even into
6501 subfields of structs and arrays when size_only is set. */
6502 /* just skip expression */
6503 parlevel = parlevel1 = 0;
6504 while ((parlevel > 0 || parlevel1 > 0 ||
6505 (tok != '}' && tok != ',')) && tok != -1) {
6506 if (tok == '(')
6507 parlevel++;
6508 else if (tok == ')') {
6509 if (parlevel == 0 && parlevel1 == 0)
6510 break;
6511 parlevel--;
6513 else if (tok == '{')
6514 parlevel1++;
6515 else if (tok == '}') {
6516 if (parlevel == 0 && parlevel1 == 0)
6517 break;
6518 parlevel1--;
6520 next();
6522 } else {
6523 if (!have_elem) {
6524 /* This should happen only when we haven't parsed
6525 the init element above for fear of committing a
6526 string constant to memory too early. */
6527 if (tok != TOK_STR && tok != TOK_LSTR)
6528 expect("string constant");
6529 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6531 init_putv(type, sec, c);
6535 /* parse an initializer for type 't' if 'has_init' is non zero, and
6536 allocate space in local or global data space ('r' is either
6537 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6538 variable 'v' of scope 'scope' is declared before initializers
6539 are parsed. If 'v' is zero, then a reference to the new object
6540 is put in the value stack. If 'has_init' is 2, a special parsing
6541 is done to handle string constants. */
6542 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6543 int has_init, int v, int scope)
6545 int size, align, addr, data_offset;
6546 int level;
6547 ParseState saved_parse_state = {0};
6548 TokenString *init_str = NULL;
6549 Section *sec;
6550 Sym *flexible_array;
6552 flexible_array = NULL;
6553 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6554 Sym *field = type->ref->next;
6555 if (field) {
6556 while (field->next)
6557 field = field->next;
6558 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6559 flexible_array = field;
6563 size = type_size(type, &align);
6564 /* If unknown size, we must evaluate it before
6565 evaluating initializers because
6566 initializers can generate global data too
6567 (e.g. string pointers or ISOC99 compound
6568 literals). It also simplifies local
6569 initializers handling */
6570 if (size < 0 || (flexible_array && has_init)) {
6571 if (!has_init)
6572 tcc_error("unknown type size");
6573 /* get all init string */
6574 init_str = tok_str_alloc();
6575 if (has_init == 2) {
6576 /* only get strings */
6577 while (tok == TOK_STR || tok == TOK_LSTR) {
6578 tok_str_add_tok(init_str);
6579 next();
6581 } else {
6582 level = 0;
6583 while (level > 0 || (tok != ',' && tok != ';')) {
6584 if (tok < 0)
6585 tcc_error("unexpected end of file in initializer");
6586 tok_str_add_tok(init_str);
6587 if (tok == '{')
6588 level++;
6589 else if (tok == '}') {
6590 level--;
6591 if (level <= 0) {
6592 next();
6593 break;
6596 next();
6599 tok_str_add(init_str, -1);
6600 tok_str_add(init_str, 0);
6602 /* compute size */
6603 save_parse_state(&saved_parse_state);
6605 begin_macro(init_str, 1);
6606 next();
6607 decl_initializer(type, NULL, 0, 1, 1);
6608 /* prepare second initializer parsing */
6609 macro_ptr = init_str->str;
6610 next();
6612 /* if still unknown size, error */
6613 size = type_size(type, &align);
6614 if (size < 0)
6615 tcc_error("unknown type size");
6617 /* If there's a flex member and it was used in the initializer
6618 adjust size. */
6619 if (flexible_array &&
6620 flexible_array->type.ref->c > 0)
6621 size += flexible_array->type.ref->c
6622 * pointed_size(&flexible_array->type);
6623 /* take into account specified alignment if bigger */
6624 if (ad->a.aligned) {
6625 if (ad->a.aligned > align)
6626 align = ad->a.aligned;
6627 } else if (ad->a.packed) {
6628 align = 1;
6630 if ((r & VT_VALMASK) == VT_LOCAL) {
6631 sec = NULL;
6632 #ifdef CONFIG_TCC_BCHECK
6633 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6634 loc--;
6636 #endif
6637 loc = (loc - size) & -align;
6638 addr = loc;
6639 #ifdef CONFIG_TCC_BCHECK
6640 /* handles bounds */
6641 /* XXX: currently, since we do only one pass, we cannot track
6642 '&' operators, so we add only arrays */
6643 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6644 addr_t *bounds_ptr;
6645 /* add padding between regions */
6646 loc--;
6647 /* then add local bound info */
6648 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6649 bounds_ptr[0] = addr;
6650 bounds_ptr[1] = size;
6652 #endif
6653 if (v) {
6654 /* local variable */
6655 #ifdef CONFIG_TCC_ASM
6656 if (ad->asm_label) {
6657 int reg = asm_parse_regvar(ad->asm_label);
6658 if (reg >= 0)
6659 r = (r & ~VT_VALMASK) | reg;
6661 #endif
6662 sym_push(v, type, r, addr);
6663 } else {
6664 /* push local reference */
6665 vset(type, r, addr);
6667 } else {
6668 Sym *sym;
6670 sym = NULL;
6671 if (v && scope == VT_CONST) {
6672 /* see if the symbol was already defined */
6673 sym = sym_find(v);
6674 if (sym) {
6675 if (!is_compatible_types(&sym->type, type))
6676 tcc_error("incompatible types for redefinition of '%s'",
6677 get_tok_str(v, NULL));
6678 if (sym->type.t & VT_EXTERN) {
6679 /* if the variable is extern, it was not allocated */
6680 sym->type.t &= ~VT_EXTERN;
6681 /* set array size if it was omitted in extern
6682 declaration */
6683 if ((sym->type.t & VT_ARRAY) &&
6684 sym->type.ref->c < 0 &&
6685 type->ref->c >= 0)
6686 sym->type.ref->c = type->ref->c;
6687 } else {
6688 /* we accept several definitions of the same
6689 global variable. this is tricky, because we
6690 must play with the SHN_COMMON type of the symbol */
6691 /* XXX: should check if the variable was already
6692 initialized. It is incorrect to initialized it
6693 twice */
6694 /* no init data, we won't add more to the symbol */
6695 if (!has_init)
6696 goto no_alloc;
6701 /* allocate symbol in corresponding section */
6702 sec = ad->section;
6703 if (!sec) {
6704 if (has_init)
6705 sec = data_section;
6706 else if (tcc_state->nocommon)
6707 sec = bss_section;
6709 if (sec) {
6710 data_offset = sec->data_offset;
6711 data_offset = (data_offset + align - 1) & -align;
6712 addr = data_offset;
6713 /* very important to increment global pointer at this time
6714 because initializers themselves can create new initializers */
6715 data_offset += size;
6716 #ifdef CONFIG_TCC_BCHECK
6717 /* add padding if bound check */
6718 if (tcc_state->do_bounds_check)
6719 data_offset++;
6720 #endif
6721 sec->data_offset = data_offset;
6722 /* allocate section space to put the data */
6723 if (sec->sh_type != SHT_NOBITS &&
6724 data_offset > sec->data_allocated)
6725 section_realloc(sec, data_offset);
6726 /* align section if needed */
6727 if (align > sec->sh_addralign)
6728 sec->sh_addralign = align;
6729 } else {
6730 addr = 0; /* avoid warning */
6733 if (v) {
6734 if (scope != VT_CONST || !sym) {
6735 sym = sym_push(v, type, r | VT_SYM, 0);
6736 sym->asm_label = ad->asm_label;
6738 /* update symbol definition */
6739 if (sec) {
6740 put_extern_sym(sym, sec, addr, size);
6741 } else {
6742 ElfW(Sym) *esym;
6743 /* put a common area */
6744 put_extern_sym(sym, NULL, align, size);
6745 /* XXX: find a nicer way */
6746 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6747 esym->st_shndx = SHN_COMMON;
6749 } else {
6750 /* push global reference */
6751 sym = get_sym_ref(type, sec, addr, size);
6752 vpushsym(type, sym);
6754 /* patch symbol weakness */
6755 if (type->t & VT_WEAK)
6756 weaken_symbol(sym);
6757 apply_visibility(sym, type);
6758 #ifdef CONFIG_TCC_BCHECK
6759 /* handles bounds now because the symbol must be defined
6760 before for the relocation */
6761 if (tcc_state->do_bounds_check) {
6762 addr_t *bounds_ptr;
6764 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6765 /* then add global bound info */
6766 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6767 bounds_ptr[0] = 0; /* relocated */
6768 bounds_ptr[1] = size;
6770 #endif
6772 if (type->t & VT_VLA) {
6773 int a;
6775 /* save current stack pointer */
6776 if (vlas_in_scope == 0) {
6777 if (vla_sp_root_loc == -1)
6778 vla_sp_root_loc = (loc -= PTR_SIZE);
6779 gen_vla_sp_save(vla_sp_root_loc);
6782 vla_runtime_type_size(type, &a);
6783 gen_vla_alloc(type, a);
6784 gen_vla_sp_save(addr);
6785 vla_sp_loc = addr;
6786 vlas_in_scope++;
6787 } else if (has_init) {
6788 size_t oldreloc_offset = 0;
6789 if (sec && sec->reloc)
6790 oldreloc_offset = sec->reloc->data_offset;
6791 decl_initializer(type, sec, addr, 1, 0);
6792 if (sec && sec->reloc)
6793 squeeze_multi_relocs(sec, oldreloc_offset);
6794 /* patch flexible array member size back to -1, */
6795 /* for possible subsequent similar declarations */
6796 if (flexible_array)
6797 flexible_array->type.ref->c = -1;
6799 no_alloc: ;
6800 /* restore parse state if needed */
6801 if (init_str) {
6802 end_macro();
6803 restore_parse_state(&saved_parse_state);
6807 static void put_func_debug(Sym *sym)
6809 char buf[512];
6811 /* stabs info */
6812 /* XXX: we put here a dummy type */
6813 snprintf(buf, sizeof(buf), "%s:%c1",
6814 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6815 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6816 cur_text_section, sym->c);
6817 /* //gr gdb wants a line at the function */
6818 put_stabn(N_SLINE, 0, file->line_num, 0);
6819 last_ind = 0;
6820 last_line_num = 0;
6823 /* parse an old style function declaration list */
6824 /* XXX: check multiple parameter */
6825 static void func_decl_list(Sym *func_sym)
6827 AttributeDef ad;
6828 int v;
6829 Sym *s;
6830 CType btype, type;
6832 /* parse each declaration */
6833 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6834 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6835 if (!parse_btype(&btype, &ad))
6836 expect("declaration list");
6837 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6838 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6839 tok == ';') {
6840 /* we accept no variable after */
6841 } else {
6842 for(;;) {
6843 type = btype;
6844 type_decl(&type, &ad, &v, TYPE_DIRECT);
6845 /* find parameter in function parameter list */
6846 s = func_sym->next;
6847 while (s != NULL) {
6848 if ((s->v & ~SYM_FIELD) == v)
6849 goto found;
6850 s = s->next;
6852 tcc_error("declaration for parameter '%s' but no such parameter",
6853 get_tok_str(v, NULL));
6854 found:
6855 /* check that no storage specifier except 'register' was given */
6856 if (type.t & VT_STORAGE)
6857 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6858 convert_parameter_type(&type);
6859 /* we can add the type (NOTE: it could be local to the function) */
6860 s->type = type;
6861 /* accept other parameters */
6862 if (tok == ',')
6863 next();
6864 else
6865 break;
6868 skip(';');
6872 /* parse a function defined by symbol 'sym' and generate its code in
6873 'cur_text_section' */
6874 static void gen_function(Sym *sym)
6876 int saved_nocode_wanted = nocode_wanted;
6878 nocode_wanted = 0;
6879 ind = cur_text_section->data_offset;
6880 /* NOTE: we patch the symbol size later */
6881 put_extern_sym(sym, cur_text_section, ind, 0);
6882 funcname = get_tok_str(sym->v, NULL);
6883 func_ind = ind;
6884 /* Initialize VLA state */
6885 vla_sp_loc = -1;
6886 vla_sp_root_loc = -1;
6887 /* put debug symbol */
6888 if (tcc_state->do_debug)
6889 put_func_debug(sym);
6891 /* push a dummy symbol to enable local sym storage */
6892 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6893 local_scope = 1; /* for function parameters */
6894 gfunc_prolog(&sym->type);
6895 local_scope = 0;
6897 rsym = 0;
6898 block(NULL, NULL, 0);
6899 gsym(rsym);
6900 gfunc_epilog();
6901 cur_text_section->data_offset = ind;
6902 label_pop(&global_label_stack, NULL);
6903 /* reset local stack */
6904 local_scope = 0;
6905 sym_pop(&local_stack, NULL, 0);
6906 /* end of function */
6907 /* patch symbol size */
6908 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6909 ind - func_ind;
6910 /* patch symbol weakness (this definition overrules any prototype) */
6911 if (sym->type.t & VT_WEAK)
6912 weaken_symbol(sym);
6913 apply_visibility(sym, &sym->type);
6914 if (tcc_state->do_debug) {
6915 put_stabn(N_FUN, 0, 0, ind - func_ind);
6917 /* It's better to crash than to generate wrong code */
6918 cur_text_section = NULL;
6919 funcname = ""; /* for safety */
6920 func_vt.t = VT_VOID; /* for safety */
6921 func_var = 0; /* for safety */
6922 ind = 0; /* for safety */
6923 nocode_wanted = saved_nocode_wanted;
6924 check_vstack();
6927 static void gen_inline_functions(TCCState *s)
6929 Sym *sym;
6930 int inline_generated, i, ln;
6931 struct InlineFunc *fn;
6933 ln = file->line_num;
6934 /* iterate while inline function are referenced */
6935 for(;;) {
6936 inline_generated = 0;
6937 for (i = 0; i < s->nb_inline_fns; ++i) {
6938 fn = s->inline_fns[i];
6939 sym = fn->sym;
6940 if (sym && sym->c) {
6941 /* the function was used: generate its code and
6942 convert it to a normal function */
6943 fn->sym = NULL;
6944 if (file)
6945 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6946 sym->r = VT_SYM | VT_CONST;
6947 sym->type.t &= ~VT_INLINE;
6949 begin_macro(fn->func_str, 1);
6950 next();
6951 cur_text_section = text_section;
6952 gen_function(sym);
6953 end_macro();
6955 inline_generated = 1;
6958 if (!inline_generated)
6959 break;
6961 file->line_num = ln;
6964 ST_FUNC void free_inline_functions(TCCState *s)
6966 int i;
6967 /* free tokens of unused inline functions */
6968 for (i = 0; i < s->nb_inline_fns; ++i) {
6969 struct InlineFunc *fn = s->inline_fns[i];
6970 if (fn->sym)
6971 tok_str_free(fn->func_str);
6973 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6976 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6977 static int decl0(int l, int is_for_loop_init)
6979 int v, has_init, r;
6980 CType type, btype;
6981 Sym *sym;
6982 AttributeDef ad;
6984 while (1) {
6985 if (!parse_btype(&btype, &ad)) {
6986 if (is_for_loop_init)
6987 return 0;
6988 /* skip redundant ';' */
6989 /* XXX: find more elegant solution */
6990 if (tok == ';') {
6991 next();
6992 continue;
6994 if (l == VT_CONST &&
6995 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6996 /* global asm block */
6997 asm_global_instr();
6998 continue;
7000 /* special test for old K&R protos without explicit int
7001 type. Only accepted when defining global data */
7002 if (l == VT_LOCAL || tok < TOK_UIDENT)
7003 break;
7004 btype.t = VT_INT;
7006 if (((btype.t & VT_BTYPE) == VT_ENUM ||
7007 (btype.t & VT_BTYPE) == VT_STRUCT) &&
7008 tok == ';') {
7009 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7010 int v = btype.ref->v;
7011 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7012 tcc_warning("unnamed struct/union that defines no instances");
7014 next();
7015 continue;
7017 while (1) { /* iterate thru each declaration */
7018 type = btype;
7019 /* If the base type itself was an array type of unspecified
7020 size (like in 'typedef int arr[]; arr x = {1};') then
7021 we will overwrite the unknown size by the real one for
7022 this decl. We need to unshare the ref symbol holding
7023 that size. */
7024 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7025 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7027 type_decl(&type, &ad, &v, TYPE_DIRECT);
7028 #if 0
7030 char buf[500];
7031 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
7032 printf("type = '%s'\n", buf);
7034 #endif
7035 if ((type.t & VT_BTYPE) == VT_FUNC) {
7036 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7037 tcc_error("function without file scope cannot be static");
7039 /* if old style function prototype, we accept a
7040 declaration list */
7041 sym = type.ref;
7042 if (sym->c == FUNC_OLD)
7043 func_decl_list(sym);
7046 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7047 ad.asm_label = asm_label_instr();
7048 /* parse one last attribute list, after asm label */
7049 parse_attribute(&ad);
7050 if (tok == '{')
7051 expect(";");
7054 if (ad.a.weak)
7055 type.t |= VT_WEAK;
7056 #ifdef TCC_TARGET_PE
7057 if (ad.a.func_import)
7058 type.t |= VT_IMPORT;
7059 if (ad.a.func_export)
7060 type.t |= VT_EXPORT;
7061 #endif
7062 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7064 if (tok == '{') {
7065 if (l == VT_LOCAL)
7066 tcc_error("cannot use local functions");
7067 if ((type.t & VT_BTYPE) != VT_FUNC)
7068 expect("function definition");
7070 /* reject abstract declarators in function definition */
7071 sym = type.ref;
7072 while ((sym = sym->next) != NULL)
7073 if (!(sym->v & ~SYM_FIELD))
7074 expect("identifier");
7076 /* XXX: cannot do better now: convert extern line to static inline */
7077 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7078 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7080 sym = sym_find(v);
7081 if (sym) {
7082 Sym *ref;
7083 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7084 goto func_error1;
7086 ref = sym->type.ref;
7087 if (0 == ref->a.func_proto)
7088 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7090 /* use func_call from prototype if not defined */
7091 if (ref->a.func_call != FUNC_CDECL
7092 && type.ref->a.func_call == FUNC_CDECL)
7093 type.ref->a.func_call = ref->a.func_call;
7095 /* use export from prototype */
7096 if (ref->a.func_export)
7097 type.ref->a.func_export = 1;
7099 /* use static from prototype */
7100 if (sym->type.t & VT_STATIC)
7101 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7103 /* If the definition has no visibility use the
7104 one from prototype. */
7105 if (! (type.t & VT_VIS_MASK))
7106 type.t |= sym->type.t & VT_VIS_MASK;
7108 if (!is_compatible_types(&sym->type, &type)) {
7109 func_error1:
7110 tcc_error("incompatible types for redefinition of '%s'",
7111 get_tok_str(v, NULL));
7113 type.ref->a.func_proto = 0;
7114 /* if symbol is already defined, then put complete type */
7115 sym->type = type;
7116 } else {
7117 /* put function symbol */
7118 sym = global_identifier_push(v, type.t, 0);
7119 sym->type.ref = type.ref;
7122 /* static inline functions are just recorded as a kind
7123 of macro. Their code will be emitted at the end of
7124 the compilation unit only if they are used */
7125 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7126 (VT_INLINE | VT_STATIC)) {
7127 int block_level;
7128 struct InlineFunc *fn;
7129 const char *filename;
7131 filename = file ? file->filename : "";
7132 fn = tcc_malloc(sizeof *fn + strlen(filename));
7133 strcpy(fn->filename, filename);
7134 fn->sym = sym;
7135 fn->func_str = tok_str_alloc();
7137 block_level = 0;
7138 for(;;) {
7139 int t;
7140 if (tok == TOK_EOF)
7141 tcc_error("unexpected end of file");
7142 tok_str_add_tok(fn->func_str);
7143 t = tok;
7144 next();
7145 if (t == '{') {
7146 block_level++;
7147 } else if (t == '}') {
7148 block_level--;
7149 if (block_level == 0)
7150 break;
7153 tok_str_add(fn->func_str, -1);
7154 tok_str_add(fn->func_str, 0);
7155 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7157 } else {
7158 /* compute text section */
7159 cur_text_section = ad.section;
7160 if (!cur_text_section)
7161 cur_text_section = text_section;
7162 sym->r = VT_SYM | VT_CONST;
7163 gen_function(sym);
7165 break;
7166 } else {
7167 if (btype.t & VT_TYPEDEF) {
7168 /* save typedefed type */
7169 /* XXX: test storage specifiers ? */
7170 sym = sym_find(v);
7171 if (sym && sym->scope == local_scope) {
7172 if (!is_compatible_types(&sym->type, &type)
7173 || !(sym->type.t & VT_TYPEDEF))
7174 tcc_error("incompatible redefinition of '%s'",
7175 get_tok_str(v, NULL));
7176 sym->type = type;
7177 } else {
7178 sym = sym_push(v, &type, 0, 0);
7180 sym->a = ad.a;
7181 sym->type.t |= VT_TYPEDEF;
7182 } else {
7183 r = 0;
7184 if ((type.t & VT_BTYPE) == VT_FUNC) {
7185 /* external function definition */
7186 /* specific case for func_call attribute */
7187 ad.a.func_proto = 1;
7188 type.ref->a = ad.a;
7189 } else if (!(type.t & VT_ARRAY)) {
7190 /* not lvalue if array */
7191 r |= lvalue_type(type.t);
7193 has_init = (tok == '=');
7194 if (has_init && (type.t & VT_VLA))
7195 tcc_error("variable length array cannot be initialized");
7196 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7197 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7198 !has_init && l == VT_CONST && type.ref->c < 0)) {
7199 /* external variable or function */
7200 /* NOTE: as GCC, uninitialized global static
7201 arrays of null size are considered as
7202 extern */
7203 sym = external_sym(v, &type, r);
7204 sym->asm_label = ad.asm_label;
7206 if (ad.alias_target) {
7207 Section tsec;
7208 ElfW(Sym) *esym;
7209 Sym *alias_target;
7211 alias_target = sym_find(ad.alias_target);
7212 if (!alias_target || !alias_target->c)
7213 tcc_error("unsupported forward __alias__ attribute");
7214 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7215 tsec.sh_num = esym->st_shndx;
7216 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7218 } else {
7219 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7220 if (type.t & VT_STATIC)
7221 r |= VT_CONST;
7222 else
7223 r |= l;
7224 if (has_init)
7225 next();
7226 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7229 if (tok != ',') {
7230 if (is_for_loop_init)
7231 return 1;
7232 skip(';');
7233 break;
7235 next();
7237 ad.a.aligned = 0;
7240 return 0;
7243 ST_FUNC void decl(int l)
7245 decl0(l, 0);
7248 /* ------------------------------------------------------------------------- */