Split off record layouting
[tinycc.git] / tccgen.c
blobb4bbc1bc0f26047b56e9f09f30cdc1b1320dd321
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
72 static void gen_cast(CType *type);
73 static inline CType *pointed_type(CType *type);
74 static int is_compatible_types(CType *type1, CType *type2);
75 static int parse_btype(CType *type, AttributeDef *ad);
76 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
77 static void parse_expr_type(CType *type);
78 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
79 static void block(int *bsym, int *csym, int is_expr);
80 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
81 static int decl0(int l, int is_for_loop_init);
82 static void expr_eq(void);
83 static void expr_lor_const(void);
84 static void unary_type(CType *type);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static void expr_type(CType *type);
90 static inline int64_t expr_const64(void);
91 ST_FUNC void vpush64(int ty, unsigned long long v);
92 ST_FUNC void vpush(CType *type);
93 ST_FUNC int gvtst(int inv, int t);
94 ST_FUNC int is_btype_size(int bt);
95 static void gen_inline_functions(TCCState *s);
97 ST_INLN int is_float(int t)
99 int bt;
100 bt = t & VT_BTYPE;
101 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC int ieee_finite(double d)
109 int p[4];
110 memcpy(p, &d, sizeof(double));
111 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC void test_lvalue(void)
116 if (!(vtop->r & VT_LVAL))
117 expect("lvalue");
120 ST_FUNC void check_vstack(void)
122 if (pvtop != vtop)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
129 #if 0
130 void pv (const char *lbl, int a, int b)
132 int i;
133 for (i = a; i < a + b; ++i) {
134 SValue *p = &vtop[-i];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
139 #endif
141 /* ------------------------------------------------------------------------- */
142 ST_FUNC void tccgen_start(TCCState *s1)
144 cur_text_section = NULL;
145 funcname = "";
146 anon_sym = SYM_FIRST_ANOM;
147 section_sym = 0;
148 nocode_wanted = 1;
150 /* define some often used types */
151 int_type.t = VT_INT;
152 char_pointer_type.t = VT_BYTE;
153 mk_pointer(&char_pointer_type);
154 #if PTR_SIZE == 4
155 size_type.t = VT_INT;
156 #else
157 size_type.t = VT_LLONG;
158 #endif
159 func_old_type.t = VT_FUNC;
160 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
162 if (s1->do_debug) {
163 char buf[512];
165 /* file info: full path + filename */
166 section_sym = put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
168 text_section->sh_num, NULL);
169 getcwd(buf, sizeof(buf));
170 #ifdef _WIN32
171 normalize_slashes(buf);
172 #endif
173 pstrcat(buf, sizeof(buf), "/");
174 put_stabs_r(buf, N_SO, 0, 0,
175 text_section->data_offset, text_section, section_sym);
176 put_stabs_r(file->filename, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
179 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
180 symbols can be safely used */
181 put_elf_sym(symtab_section, 0, 0,
182 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
183 SHN_ABS, file->filename);
185 #ifdef TCC_TARGET_ARM
186 arm_init(s1);
187 #endif
190 ST_FUNC void tccgen_end(TCCState *s1)
192 gen_inline_functions(s1);
193 check_vstack();
194 /* end of translation unit info */
195 if (s1->do_debug) {
196 put_stabs_r(NULL, N_SO, 0, 0,
197 text_section->data_offset, text_section, section_sym);
201 /* ------------------------------------------------------------------------- */
202 /* update sym->c so that it points to an external symbol in section
203 'section' with value 'value' */
205 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
206 addr_t value, unsigned long size,
207 int can_add_underscore)
209 int sym_type, sym_bind, sh_num, info, other;
210 ElfW(Sym) *esym;
211 const char *name;
212 char buf1[256];
214 #ifdef CONFIG_TCC_BCHECK
215 char buf[32];
216 #endif
218 if (section == NULL)
219 sh_num = SHN_UNDEF;
220 else if (section == SECTION_ABS)
221 sh_num = SHN_ABS;
222 else
223 sh_num = section->sh_num;
225 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
226 sym_type = STT_FUNC;
227 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
228 sym_type = STT_NOTYPE;
229 } else {
230 sym_type = STT_OBJECT;
233 if (sym->type.t & VT_STATIC)
234 sym_bind = STB_LOCAL;
235 else {
236 if (sym->type.t & VT_WEAK)
237 sym_bind = STB_WEAK;
238 else
239 sym_bind = STB_GLOBAL;
242 if (!sym->c) {
243 name = get_tok_str(sym->v, NULL);
244 #ifdef CONFIG_TCC_BCHECK
245 if (tcc_state->do_bounds_check) {
246 /* XXX: avoid doing that for statics ? */
247 /* if bound checking is activated, we change some function
248 names by adding the "__bound" prefix */
249 switch(sym->v) {
250 #ifdef TCC_TARGET_PE
251 /* XXX: we rely only on malloc hooks */
252 case TOK_malloc:
253 case TOK_free:
254 case TOK_realloc:
255 case TOK_memalign:
256 case TOK_calloc:
257 #endif
258 case TOK_memcpy:
259 case TOK_memmove:
260 case TOK_memset:
261 case TOK_strlen:
262 case TOK_strcpy:
263 case TOK_alloca:
264 strcpy(buf, "__bound_");
265 strcat(buf, name);
266 name = buf;
267 break;
270 #endif
271 other = 0;
273 #ifdef TCC_TARGET_PE
274 if (sym->type.t & VT_EXPORT)
275 other |= ST_PE_EXPORT;
276 if (sym_type == STT_FUNC && sym->type.ref) {
277 Sym *ref = sym->type.ref;
278 if (ref->a.func_export)
279 other |= ST_PE_EXPORT;
280 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
281 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
282 name = buf1;
283 other |= ST_PE_STDCALL;
284 can_add_underscore = 0;
286 } else {
287 if (find_elf_sym(tcc_state->dynsymtab_section, name))
288 other |= ST_PE_IMPORT;
289 if (sym->type.t & VT_IMPORT)
290 other |= ST_PE_IMPORT;
292 #else
293 if (! (sym->type.t & VT_STATIC))
294 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
295 #endif
296 if (tcc_state->leading_underscore && can_add_underscore) {
297 buf1[0] = '_';
298 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
299 name = buf1;
301 if (sym->asm_label) {
302 name = get_tok_str(sym->asm_label, NULL);
304 info = ELFW(ST_INFO)(sym_bind, sym_type);
305 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
306 } else {
307 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
308 esym->st_value = value;
309 esym->st_size = size;
310 esym->st_shndx = sh_num;
314 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
315 addr_t value, unsigned long size)
317 put_extern_sym2(sym, section, value, size, 1);
320 /* add a new relocation entry to symbol 'sym' in section 's' */
321 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
322 addr_t addend)
324 int c = 0;
325 if (sym) {
326 if (0 == sym->c)
327 put_extern_sym(sym, NULL, 0, 0);
328 c = sym->c;
330 /* now we can add ELF relocation info */
331 put_elf_reloca(symtab_section, s, offset, type, c, addend);
334 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
336 greloca(s, sym, offset, type, 0);
339 /* ------------------------------------------------------------------------- */
340 /* symbol allocator */
341 static Sym *__sym_malloc(void)
343 Sym *sym_pool, *sym, *last_sym;
344 int i;
346 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
347 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
349 last_sym = sym_free_first;
350 sym = sym_pool;
351 for(i = 0; i < SYM_POOL_NB; i++) {
352 sym->next = last_sym;
353 last_sym = sym;
354 sym++;
356 sym_free_first = last_sym;
357 return last_sym;
360 static inline Sym *sym_malloc(void)
362 Sym *sym;
363 #ifndef SYM_DEBUG
364 sym = sym_free_first;
365 if (!sym)
366 sym = __sym_malloc();
367 sym_free_first = sym->next;
368 return sym;
369 #else
370 sym = tcc_malloc(sizeof(Sym));
371 return sym;
372 #endif
375 ST_INLN void sym_free(Sym *sym)
377 #ifndef SYM_DEBUG
378 sym->next = sym_free_first;
379 sym_free_first = sym;
380 #else
381 tcc_free(sym);
382 #endif
385 /* push, without hashing */
386 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
388 Sym *s;
390 s = sym_malloc();
391 s->scope = 0;
392 s->v = v;
393 s->type.t = t;
394 s->type.ref = NULL;
395 #ifdef _WIN64
396 s->d = NULL;
397 #endif
398 s->c = c;
399 s->next = NULL;
400 /* add in stack */
401 s->prev = *ps;
402 *ps = s;
403 return s;
406 /* find a symbol and return its associated structure. 's' is the top
407 of the symbol stack */
408 ST_FUNC Sym *sym_find2(Sym *s, int v)
410 while (s) {
411 if (s->v == v)
412 return s;
413 else if (s->v == -1)
414 return NULL;
415 s = s->prev;
417 return NULL;
420 /* structure lookup */
421 ST_INLN Sym *struct_find(int v)
423 v -= TOK_IDENT;
424 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
425 return NULL;
426 return table_ident[v]->sym_struct;
429 /* find an identifier */
430 ST_INLN Sym *sym_find(int v)
432 v -= TOK_IDENT;
433 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
434 return NULL;
435 return table_ident[v]->sym_identifier;
438 /* push a given symbol on the symbol stack */
439 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
441 Sym *s, **ps;
442 TokenSym *ts;
444 if (local_stack)
445 ps = &local_stack;
446 else
447 ps = &global_stack;
448 s = sym_push2(ps, v, type->t, c);
449 s->type.ref = type->ref;
450 s->r = r;
451 /* don't record fields or anonymous symbols */
452 /* XXX: simplify */
453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
454 /* record symbol in token array */
455 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
456 if (v & SYM_STRUCT)
457 ps = &ts->sym_struct;
458 else
459 ps = &ts->sym_identifier;
460 s->prev_tok = *ps;
461 *ps = s;
462 s->scope = local_scope;
463 if (s->prev_tok && s->prev_tok->scope == s->scope)
464 tcc_error("redeclaration of '%s'",
465 get_tok_str(v & ~SYM_STRUCT, NULL));
467 return s;
470 /* push a global identifier */
471 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
473 Sym *s, **ps;
474 s = sym_push2(&global_stack, v, t, c);
475 /* don't record anonymous symbol */
476 if (v < SYM_FIRST_ANOM) {
477 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
478 /* modify the top most local identifier, so that
479 sym_identifier will point to 's' when popped */
480 while (*ps != NULL)
481 ps = &(*ps)->prev_tok;
482 s->prev_tok = NULL;
483 *ps = s;
485 return s;
488 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
489 pop them yet from the list, but do remove them from the token array. */
490 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
492 Sym *s, *ss, **ps;
493 TokenSym *ts;
494 int v;
496 s = *ptop;
497 while(s != b) {
498 ss = s->prev;
499 v = s->v;
500 /* remove symbol in token array */
501 /* XXX: simplify */
502 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
503 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
504 if (v & SYM_STRUCT)
505 ps = &ts->sym_struct;
506 else
507 ps = &ts->sym_identifier;
508 *ps = s->prev_tok;
510 if (!keep)
511 sym_free(s);
512 s = ss;
514 if (!keep)
515 *ptop = b;
518 static void weaken_symbol(Sym *sym)
520 sym->type.t |= VT_WEAK;
521 if (sym->c > 0) {
522 int esym_type;
523 ElfW(Sym) *esym;
525 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
526 esym_type = ELFW(ST_TYPE)(esym->st_info);
527 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
531 static void apply_visibility(Sym *sym, CType *type)
533 int vis = sym->type.t & VT_VIS_MASK;
534 int vis2 = type->t & VT_VIS_MASK;
535 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
536 vis = vis2;
537 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
539 else
540 vis = (vis < vis2) ? vis : vis2;
541 sym->type.t &= ~VT_VIS_MASK;
542 sym->type.t |= vis;
544 if (sym->c > 0) {
545 ElfW(Sym) *esym;
547 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
548 vis >>= VT_VIS_SHIFT;
549 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
553 /* ------------------------------------------------------------------------- */
555 ST_FUNC void swap(int *p, int *q)
557 int t;
558 t = *p;
559 *p = *q;
560 *q = t;
563 static void vsetc(CType *type, int r, CValue *vc)
565 int v;
567 if (vtop >= vstack + (VSTACK_SIZE - 1))
568 tcc_error("memory full (vstack)");
569 /* cannot let cpu flags if other instruction are generated. Also
570 avoid leaving VT_JMP anywhere except on the top of the stack
571 because it would complicate the code generator. */
572 if (vtop >= vstack) {
573 v = vtop->r & VT_VALMASK;
574 if (v == VT_CMP || (v & ~1) == VT_JMP)
575 gv(RC_INT);
577 vtop++;
578 vtop->type = *type;
579 vtop->r = r;
580 vtop->r2 = VT_CONST;
581 vtop->c = *vc;
582 vtop->sym = NULL;
585 /* push constant of type "type" with useless value */
586 ST_FUNC void vpush(CType *type)
588 CValue cval;
589 vsetc(type, VT_CONST, &cval);
592 /* push integer constant */
593 ST_FUNC void vpushi(int v)
595 CValue cval;
596 cval.i = v;
597 vsetc(&int_type, VT_CONST, &cval);
600 /* push a pointer sized constant */
601 static void vpushs(addr_t v)
603 CValue cval;
604 cval.i = v;
605 vsetc(&size_type, VT_CONST, &cval);
608 /* push arbitrary 64bit constant */
609 ST_FUNC void vpush64(int ty, unsigned long long v)
611 CValue cval;
612 CType ctype;
613 ctype.t = ty;
614 ctype.ref = NULL;
615 cval.i = v;
616 vsetc(&ctype, VT_CONST, &cval);
619 /* push long long constant */
620 static inline void vpushll(long long v)
622 vpush64(VT_LLONG, v);
625 /* push a symbol value of TYPE */
626 static inline void vpushsym(CType *type, Sym *sym)
628 CValue cval;
629 cval.i = 0;
630 vsetc(type, VT_CONST | VT_SYM, &cval);
631 vtop->sym = sym;
634 /* Return a static symbol pointing to a section */
635 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
637 int v;
638 Sym *sym;
640 v = anon_sym++;
641 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
642 sym->type.ref = type->ref;
643 sym->r = VT_CONST | VT_SYM;
644 put_extern_sym(sym, sec, offset, size);
645 return sym;
648 /* push a reference to a section offset by adding a dummy symbol */
649 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
651 vpushsym(type, get_sym_ref(type, sec, offset, size));
654 /* define a new external reference to a symbol 'v' of type 'u' */
655 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
657 Sym *s;
659 s = sym_find(v);
660 if (!s) {
661 /* push forward reference */
662 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
663 s->type.ref = type->ref;
664 s->r = r | VT_CONST | VT_SYM;
666 return s;
669 /* define a new external reference to a symbol 'v' */
670 static Sym *external_sym(int v, CType *type, int r)
672 Sym *s;
674 s = sym_find(v);
675 if (!s) {
676 /* push forward reference */
677 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
678 s->type.t |= VT_EXTERN;
679 } else if (s->type.ref == func_old_type.ref) {
680 s->type.ref = type->ref;
681 s->r = r | VT_CONST | VT_SYM;
682 s->type.t |= VT_EXTERN;
683 } else if (!is_compatible_types(&s->type, type)) {
684 tcc_error("incompatible types for redefinition of '%s'",
685 get_tok_str(v, NULL));
687 /* Merge some storage attributes. */
688 if (type->t & VT_WEAK)
689 weaken_symbol(s);
691 if (type->t & VT_VIS_MASK)
692 apply_visibility(s, type);
694 return s;
697 /* push a reference to global symbol v */
698 ST_FUNC void vpush_global_sym(CType *type, int v)
700 vpushsym(type, external_global_sym(v, type, 0));
703 ST_FUNC void vset(CType *type, int r, long v)
705 CValue cval;
707 cval.i = v;
708 vsetc(type, r, &cval);
711 static void vseti(int r, int v)
713 CType type;
714 type.t = VT_INT;
715 type.ref = 0;
716 vset(&type, r, v);
719 ST_FUNC void vswap(void)
721 SValue tmp;
722 /* cannot let cpu flags if other instruction are generated. Also
723 avoid leaving VT_JMP anywhere except on the top of the stack
724 because it would complicate the code generator. */
725 if (vtop >= vstack) {
726 int v = vtop->r & VT_VALMASK;
727 if (v == VT_CMP || (v & ~1) == VT_JMP)
728 gv(RC_INT);
730 tmp = vtop[0];
731 vtop[0] = vtop[-1];
732 vtop[-1] = tmp;
734 /* XXX: +2% overall speed possible with optimized memswap
736 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
740 ST_FUNC void vpushv(SValue *v)
742 if (vtop >= vstack + (VSTACK_SIZE - 1))
743 tcc_error("memory full (vstack)");
744 vtop++;
745 *vtop = *v;
748 static void vdup(void)
750 vpushv(vtop);
753 /* save registers up to (vtop - n) stack entry */
754 ST_FUNC void save_regs(int n)
756 SValue *p, *p1;
757 for(p = vstack, p1 = vtop - n; p <= p1; p++)
758 save_reg(p->r);
761 /* save r to the memory stack, and mark it as being free */
762 ST_FUNC void save_reg(int r)
764 save_reg_upstack(r, 0);
767 /* save r to the memory stack, and mark it as being free,
768 if seen up to (vtop - n) stack entry */
769 ST_FUNC void save_reg_upstack(int r, int n)
771 int l, saved, size, align;
772 SValue *p, *p1, sv;
773 CType *type;
775 if ((r &= VT_VALMASK) >= VT_CONST)
776 return;
778 /* modify all stack values */
779 saved = 0;
780 l = 0;
781 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
782 if ((p->r & VT_VALMASK) == r ||
783 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
784 /* must save value on stack if not already done */
785 if (!saved) {
786 /* NOTE: must reload 'r' because r might be equal to r2 */
787 r = p->r & VT_VALMASK;
788 /* store register in the stack */
789 type = &p->type;
790 if ((p->r & VT_LVAL) ||
791 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
792 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
793 type = &char_pointer_type;
794 #else
795 type = &int_type;
796 #endif
797 size = type_size(type, &align);
798 loc = (loc - size) & -align;
799 sv.type.t = type->t;
800 sv.r = VT_LOCAL | VT_LVAL;
801 sv.c.i = loc;
802 store(r, &sv);
803 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
804 /* x86 specific: need to pop fp register ST0 if saved */
805 if (r == TREG_ST0) {
806 o(0xd8dd); /* fstp %st(0) */
808 #endif
809 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
810 /* special long long case */
811 if ((type->t & VT_BTYPE) == VT_LLONG) {
812 sv.c.i += 4;
813 store(p->r2, &sv);
815 #endif
816 l = loc;
817 saved = 1;
819 /* mark that stack entry as being saved on the stack */
820 if (p->r & VT_LVAL) {
821 /* also clear the bounded flag because the
822 relocation address of the function was stored in
823 p->c.i */
824 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
825 } else {
826 p->r = lvalue_type(p->type.t) | VT_LOCAL;
828 p->r2 = VT_CONST;
829 p->c.i = l;
834 #ifdef TCC_TARGET_ARM
835 /* find a register of class 'rc2' with at most one reference on stack.
836 * If none, call get_reg(rc) */
837 ST_FUNC int get_reg_ex(int rc, int rc2)
839 int r;
840 SValue *p;
842 for(r=0;r<NB_REGS;r++) {
843 if (reg_classes[r] & rc2) {
844 int n;
845 n=0;
846 for(p = vstack; p <= vtop; p++) {
847 if ((p->r & VT_VALMASK) == r ||
848 (p->r2 & VT_VALMASK) == r)
849 n++;
851 if (n <= 1)
852 return r;
855 return get_reg(rc);
857 #endif
859 /* find a free register of class 'rc'. If none, save one register */
860 ST_FUNC int get_reg(int rc)
862 int r;
863 SValue *p;
865 /* find a free register */
866 for(r=0;r<NB_REGS;r++) {
867 if (reg_classes[r] & rc) {
868 for(p=vstack;p<=vtop;p++) {
869 if ((p->r & VT_VALMASK) == r ||
870 (p->r2 & VT_VALMASK) == r)
871 goto notfound;
873 return r;
875 notfound: ;
878 /* no register left : free the first one on the stack (VERY
879 IMPORTANT to start from the bottom to ensure that we don't
880 spill registers used in gen_opi()) */
881 for(p=vstack;p<=vtop;p++) {
882 /* look at second register (if long long) */
883 r = p->r2 & VT_VALMASK;
884 if (r < VT_CONST && (reg_classes[r] & rc))
885 goto save_found;
886 r = p->r & VT_VALMASK;
887 if (r < VT_CONST && (reg_classes[r] & rc)) {
888 save_found:
889 save_reg(r);
890 return r;
893 /* Should never comes here */
894 return -1;
897 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
898 if needed */
899 static void move_reg(int r, int s, int t)
901 SValue sv;
903 if (r != s) {
904 save_reg(r);
905 sv.type.t = t;
906 sv.type.ref = NULL;
907 sv.r = s;
908 sv.c.i = 0;
909 load(r, &sv);
913 /* get address of vtop (vtop MUST BE an lvalue) */
914 ST_FUNC void gaddrof(void)
916 if (vtop->r & VT_REF && !nocode_wanted)
917 gv(RC_INT);
918 vtop->r &= ~VT_LVAL;
919 /* tricky: if saved lvalue, then we can go back to lvalue */
920 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
921 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
926 #ifdef CONFIG_TCC_BCHECK
927 /* generate lvalue bound code */
928 static void gbound(void)
930 int lval_type;
931 CType type1;
933 vtop->r &= ~VT_MUSTBOUND;
934 /* if lvalue, then use checking code before dereferencing */
935 if (vtop->r & VT_LVAL) {
936 /* if not VT_BOUNDED value, then make one */
937 if (!(vtop->r & VT_BOUNDED)) {
938 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
939 /* must save type because we must set it to int to get pointer */
940 type1 = vtop->type;
941 vtop->type.t = VT_PTR;
942 gaddrof();
943 vpushi(0);
944 gen_bounded_ptr_add();
945 vtop->r |= lval_type;
946 vtop->type = type1;
948 /* then check for dereferencing */
949 gen_bounded_ptr_deref();
952 #endif
954 /* store vtop a register belonging to class 'rc'. lvalues are
955 converted to values. Cannot be used if cannot be converted to
956 register value (such as structures). */
957 ST_FUNC int gv(int rc)
959 int r, bit_pos, bit_size, size, align, i;
960 int rc2;
962 /* NOTE: get_reg can modify vstack[] */
963 if (vtop->type.t & VT_BITFIELD) {
964 CType type;
965 int bits = 32;
966 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
967 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
968 /* remove bit field info to avoid loops */
969 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
970 /* cast to int to propagate signedness in following ops */
971 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
972 type.t = VT_LLONG;
973 bits = 64;
974 } else
975 type.t = VT_INT;
976 if((vtop->type.t & VT_UNSIGNED) ||
977 (vtop->type.t & VT_BTYPE) == VT_BOOL)
978 type.t |= VT_UNSIGNED;
979 gen_cast(&type);
980 /* generate shifts */
981 vpushi(bits - (bit_pos + bit_size));
982 gen_op(TOK_SHL);
983 vpushi(bits - bit_size);
984 /* NOTE: transformed to SHR if unsigned */
985 gen_op(TOK_SAR);
986 r = gv(rc);
987 } else {
988 if (is_float(vtop->type.t) &&
989 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
990 Sym *sym;
991 int *ptr;
992 unsigned long offset;
993 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
994 CValue check;
995 #endif
997 /* XXX: unify with initializers handling ? */
998 /* CPUs usually cannot use float constants, so we store them
999 generically in data segment */
1000 size = type_size(&vtop->type, &align);
1001 offset = (data_section->data_offset + align - 1) & -align;
1002 data_section->data_offset = offset;
1003 /* XXX: not portable yet */
1004 #if defined(__i386__) || defined(__x86_64__)
1005 /* Zero pad x87 tenbyte long doubles */
1006 if (size == LDOUBLE_SIZE) {
1007 vtop->c.tab[2] &= 0xffff;
1008 #if LDOUBLE_SIZE == 16
1009 vtop->c.tab[3] = 0;
1010 #endif
1012 #endif
1013 ptr = section_ptr_add(data_section, size);
1014 size = size >> 2;
1015 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1016 check.d = 1;
1017 if(check.tab[0])
1018 for(i=0;i<size;i++)
1019 ptr[i] = vtop->c.tab[size-1-i];
1020 else
1021 #endif
1022 for(i=0;i<size;i++)
1023 ptr[i] = vtop->c.tab[i];
1024 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1025 vtop->r |= VT_LVAL | VT_SYM;
1026 vtop->sym = sym;
1027 vtop->c.i = 0;
1029 #ifdef CONFIG_TCC_BCHECK
1030 if (vtop->r & VT_MUSTBOUND)
1031 gbound();
1032 #endif
1034 r = vtop->r & VT_VALMASK;
1035 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1036 #ifndef TCC_TARGET_ARM64
1037 if (rc == RC_IRET)
1038 rc2 = RC_LRET;
1039 #ifdef TCC_TARGET_X86_64
1040 else if (rc == RC_FRET)
1041 rc2 = RC_QRET;
1042 #endif
1043 #endif
1045 /* need to reload if:
1046 - constant
1047 - lvalue (need to dereference pointer)
1048 - already a register, but not in the right class */
1049 if (r >= VT_CONST
1050 || (vtop->r & VT_LVAL)
1051 || !(reg_classes[r] & rc)
1052 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1053 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1054 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1055 #else
1056 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1057 #endif
1060 r = get_reg(rc);
1061 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1062 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1063 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1064 #else
1065 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1066 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1067 unsigned long long ll;
1068 #endif
1069 int r2, original_type;
1070 original_type = vtop->type.t;
1071 /* two register type load : expand to two words
1072 temporarily */
1073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1074 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1075 /* load constant */
1076 ll = vtop->c.i;
1077 vtop->c.i = ll; /* first word */
1078 load(r, vtop);
1079 vtop->r = r; /* save register value */
1080 vpushi(ll >> 32); /* second word */
1081 } else
1082 #endif
1083 if (vtop->r & VT_LVAL) {
1084 /* We do not want to modifier the long long
1085 pointer here, so the safest (and less
1086 efficient) is to save all the other registers
1087 in the stack. XXX: totally inefficient. */
1088 #if 0
1089 save_regs(1);
1090 #else
1091 /* lvalue_save: save only if used further down the stack */
1092 save_reg_upstack(vtop->r, 1);
1093 #endif
1094 /* load from memory */
1095 vtop->type.t = load_type;
1096 load(r, vtop);
1097 vdup();
1098 vtop[-1].r = r; /* save register value */
1099 /* increment pointer to get second word */
1100 vtop->type.t = addr_type;
1101 gaddrof();
1102 vpushi(load_size);
1103 gen_op('+');
1104 vtop->r |= VT_LVAL;
1105 vtop->type.t = load_type;
1106 } else {
1107 /* move registers */
1108 load(r, vtop);
1109 vdup();
1110 vtop[-1].r = r; /* save register value */
1111 vtop->r = vtop[-1].r2;
1113 /* Allocate second register. Here we rely on the fact that
1114 get_reg() tries first to free r2 of an SValue. */
1115 r2 = get_reg(rc2);
1116 load(r2, vtop);
1117 vpop();
1118 /* write second register */
1119 vtop->r2 = r2;
1120 vtop->type.t = original_type;
1121 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1122 int t1, t;
1123 /* lvalue of scalar type : need to use lvalue type
1124 because of possible cast */
1125 t = vtop->type.t;
1126 t1 = t;
1127 /* compute memory access type */
1128 if (vtop->r & VT_REF)
1129 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1130 t = VT_PTR;
1131 #else
1132 t = VT_INT;
1133 #endif
1134 else if (vtop->r & VT_LVAL_BYTE)
1135 t = VT_BYTE;
1136 else if (vtop->r & VT_LVAL_SHORT)
1137 t = VT_SHORT;
1138 if (vtop->r & VT_LVAL_UNSIGNED)
1139 t |= VT_UNSIGNED;
1140 vtop->type.t = t;
1141 load(r, vtop);
1142 /* restore wanted type */
1143 vtop->type.t = t1;
1144 } else {
1145 /* one register type load */
1146 load(r, vtop);
1149 vtop->r = r;
1150 #ifdef TCC_TARGET_C67
1151 /* uses register pairs for doubles */
1152 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1153 vtop->r2 = r+1;
1154 #endif
1156 return r;
1159 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1160 ST_FUNC void gv2(int rc1, int rc2)
1162 int v;
1164 /* generate more generic register first. But VT_JMP or VT_CMP
1165 values must be generated first in all cases to avoid possible
1166 reload errors */
1167 v = vtop[0].r & VT_VALMASK;
1168 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1169 vswap();
1170 gv(rc1);
1171 vswap();
1172 gv(rc2);
1173 /* test if reload is needed for first register */
1174 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1175 vswap();
1176 gv(rc1);
1177 vswap();
1179 } else {
1180 gv(rc2);
1181 vswap();
1182 gv(rc1);
1183 vswap();
1184 /* test if reload is needed for first register */
1185 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1186 gv(rc2);
1191 #ifndef TCC_TARGET_ARM64
1192 /* wrapper around RC_FRET to return a register by type */
1193 static int rc_fret(int t)
1195 #ifdef TCC_TARGET_X86_64
1196 if (t == VT_LDOUBLE) {
1197 return RC_ST0;
1199 #endif
1200 return RC_FRET;
1202 #endif
1204 /* wrapper around REG_FRET to return a register by type */
1205 static int reg_fret(int t)
1207 #ifdef TCC_TARGET_X86_64
1208 if (t == VT_LDOUBLE) {
1209 return TREG_ST0;
1211 #endif
1212 return REG_FRET;
1215 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1216 /* expand 64bit on stack in two ints */
1217 static void lexpand(void)
1219 int u, v;
1220 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1221 v = vtop->r & (VT_VALMASK | VT_LVAL);
1222 if (v == VT_CONST) {
1223 vdup();
1224 vtop[0].c.i >>= 32;
1225 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1226 vdup();
1227 vtop[0].c.i += 4;
1228 } else {
1229 gv(RC_INT);
1230 vdup();
1231 vtop[0].r = vtop[-1].r2;
1232 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1234 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1236 #endif
1238 #ifdef TCC_TARGET_ARM
1239 /* expand long long on stack */
1240 ST_FUNC void lexpand_nr(void)
1242 int u,v;
1244 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1245 vdup();
1246 vtop->r2 = VT_CONST;
1247 vtop->type.t = VT_INT | u;
1248 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1249 if (v == VT_CONST) {
1250 vtop[-1].c.i = vtop->c.i;
1251 vtop->c.i = vtop->c.i >> 32;
1252 vtop->r = VT_CONST;
1253 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1254 vtop->c.i += 4;
1255 vtop->r = vtop[-1].r;
1256 } else if (v > VT_CONST) {
1257 vtop--;
1258 lexpand();
1259 } else
1260 vtop->r = vtop[-1].r2;
1261 vtop[-1].r2 = VT_CONST;
1262 vtop[-1].type.t = VT_INT | u;
1264 #endif
1266 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1267 /* build a long long from two ints */
1268 static void lbuild(int t)
1270 gv2(RC_INT, RC_INT);
1271 vtop[-1].r2 = vtop[0].r;
1272 vtop[-1].type.t = t;
1273 vpop();
1275 #endif
1277 /* rotate n first stack elements to the bottom
1278 I1 ... In -> I2 ... In I1 [top is right]
1280 ST_FUNC void vrotb(int n)
1282 int i;
1283 SValue tmp;
1285 tmp = vtop[-n + 1];
1286 for(i=-n+1;i!=0;i++)
1287 vtop[i] = vtop[i+1];
1288 vtop[0] = tmp;
1291 /* rotate the n elements before entry e towards the top
1292 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1294 ST_FUNC void vrote(SValue *e, int n)
1296 int i;
1297 SValue tmp;
1299 tmp = *e;
1300 for(i = 0;i < n - 1; i++)
1301 e[-i] = e[-i - 1];
1302 e[-n + 1] = tmp;
1305 /* rotate n first stack elements to the top
1306 I1 ... In -> In I1 ... I(n-1) [top is right]
1308 ST_FUNC void vrott(int n)
1310 vrote(vtop, n);
1313 /* pop stack value */
1314 ST_FUNC void vpop(void)
1316 int v;
1317 v = vtop->r & VT_VALMASK;
1318 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1319 /* for x86, we need to pop the FP stack */
1320 if (v == TREG_ST0 && !nocode_wanted) {
1321 o(0xd8dd); /* fstp %st(0) */
1322 } else
1323 #endif
1324 if (v == VT_JMP || v == VT_JMPI) {
1325 /* need to put correct jump if && or || without test */
1326 gsym(vtop->c.i);
1328 vtop--;
1331 /* convert stack entry to register and duplicate its value in another
1332 register */
1333 static void gv_dup(void)
1335 int rc, t, r, r1;
1336 SValue sv;
1338 t = vtop->type.t;
1339 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1340 if ((t & VT_BTYPE) == VT_LLONG) {
1341 lexpand();
1342 gv_dup();
1343 vswap();
1344 vrotb(3);
1345 gv_dup();
1346 vrotb(4);
1347 /* stack: H L L1 H1 */
1348 lbuild(t);
1349 vrotb(3);
1350 vrotb(3);
1351 vswap();
1352 lbuild(t);
1353 vswap();
1354 } else
1355 #endif
1357 /* duplicate value */
1358 rc = RC_INT;
1359 sv.type.t = VT_INT;
1360 if (is_float(t)) {
1361 rc = RC_FLOAT;
1362 #ifdef TCC_TARGET_X86_64
1363 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1364 rc = RC_ST0;
1366 #endif
1367 sv.type.t = t;
1369 r = gv(rc);
1370 r1 = get_reg(rc);
1371 sv.r = r;
1372 sv.c.i = 0;
1373 load(r1, &sv); /* move r to r1 */
1374 vdup();
1375 /* duplicates value */
1376 if (r != r1)
1377 vtop->r = r1;
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 ST_FUNC int gvtst(int inv, int t)
1386 int v = vtop->r & VT_VALMASK;
1387 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1388 vpushi(0);
1389 gen_op(TOK_NE);
1391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1392 /* constant jmp optimization */
1393 if ((vtop->c.i != 0) != inv)
1394 t = gjmp(t);
1395 vtop--;
1396 return t;
1398 return gtst(inv, t);
1401 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1402 /* generate CPU independent (unsigned) long long operations */
1403 static void gen_opl(int op)
1405 int t, a, b, op1, c, i;
1406 int func;
1407 unsigned short reg_iret = REG_IRET;
1408 unsigned short reg_lret = REG_LRET;
1409 SValue tmp;
1411 switch(op) {
1412 case '/':
1413 case TOK_PDIV:
1414 func = TOK___divdi3;
1415 goto gen_func;
1416 case TOK_UDIV:
1417 func = TOK___udivdi3;
1418 goto gen_func;
1419 case '%':
1420 func = TOK___moddi3;
1421 goto gen_mod_func;
1422 case TOK_UMOD:
1423 func = TOK___umoddi3;
1424 gen_mod_func:
1425 #ifdef TCC_ARM_EABI
1426 reg_iret = TREG_R2;
1427 reg_lret = TREG_R3;
1428 #endif
1429 gen_func:
1430 /* call generic long long function */
1431 vpush_global_sym(&func_old_type, func);
1432 vrott(3);
1433 gfunc_call(2);
1434 vpushi(0);
1435 vtop->r = reg_iret;
1436 vtop->r2 = reg_lret;
1437 break;
1438 case '^':
1439 case '&':
1440 case '|':
1441 case '*':
1442 case '+':
1443 case '-':
1444 //pv("gen_opl A",0,2);
1445 t = vtop->type.t;
1446 vswap();
1447 lexpand();
1448 vrotb(3);
1449 lexpand();
1450 /* stack: L1 H1 L2 H2 */
1451 tmp = vtop[0];
1452 vtop[0] = vtop[-3];
1453 vtop[-3] = tmp;
1454 tmp = vtop[-2];
1455 vtop[-2] = vtop[-3];
1456 vtop[-3] = tmp;
1457 vswap();
1458 /* stack: H1 H2 L1 L2 */
1459 //pv("gen_opl B",0,4);
1460 if (op == '*') {
1461 vpushv(vtop - 1);
1462 vpushv(vtop - 1);
1463 gen_op(TOK_UMULL);
1464 lexpand();
1465 /* stack: H1 H2 L1 L2 ML MH */
1466 for(i=0;i<4;i++)
1467 vrotb(6);
1468 /* stack: ML MH H1 H2 L1 L2 */
1469 tmp = vtop[0];
1470 vtop[0] = vtop[-2];
1471 vtop[-2] = tmp;
1472 /* stack: ML MH H1 L2 H2 L1 */
1473 gen_op('*');
1474 vrotb(3);
1475 vrotb(3);
1476 gen_op('*');
1477 /* stack: ML MH M1 M2 */
1478 gen_op('+');
1479 gen_op('+');
1480 } else if (op == '+' || op == '-') {
1481 /* XXX: add non carry method too (for MIPS or alpha) */
1482 if (op == '+')
1483 op1 = TOK_ADDC1;
1484 else
1485 op1 = TOK_SUBC1;
1486 gen_op(op1);
1487 /* stack: H1 H2 (L1 op L2) */
1488 vrotb(3);
1489 vrotb(3);
1490 gen_op(op1 + 1); /* TOK_xxxC2 */
1491 } else {
1492 gen_op(op);
1493 /* stack: H1 H2 (L1 op L2) */
1494 vrotb(3);
1495 vrotb(3);
1496 /* stack: (L1 op L2) H1 H2 */
1497 gen_op(op);
1498 /* stack: (L1 op L2) (H1 op H2) */
1500 /* stack: L H */
1501 lbuild(t);
1502 break;
1503 case TOK_SAR:
1504 case TOK_SHR:
1505 case TOK_SHL:
1506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1507 t = vtop[-1].type.t;
1508 vswap();
1509 lexpand();
1510 vrotb(3);
1511 /* stack: L H shift */
1512 c = (int)vtop->c.i;
1513 /* constant: simpler */
1514 /* NOTE: all comments are for SHL. the other cases are
1515 done by swaping words */
1516 vpop();
1517 if (op != TOK_SHL)
1518 vswap();
1519 if (c >= 32) {
1520 /* stack: L H */
1521 vpop();
1522 if (c > 32) {
1523 vpushi(c - 32);
1524 gen_op(op);
1526 if (op != TOK_SAR) {
1527 vpushi(0);
1528 } else {
1529 gv_dup();
1530 vpushi(31);
1531 gen_op(TOK_SAR);
1533 vswap();
1534 } else {
1535 vswap();
1536 gv_dup();
1537 /* stack: H L L */
1538 vpushi(c);
1539 gen_op(op);
1540 vswap();
1541 vpushi(32 - c);
1542 if (op == TOK_SHL)
1543 gen_op(TOK_SHR);
1544 else
1545 gen_op(TOK_SHL);
1546 vrotb(3);
1547 /* stack: L L H */
1548 vpushi(c);
1549 if (op == TOK_SHL)
1550 gen_op(TOK_SHL);
1551 else
1552 gen_op(TOK_SHR);
1553 gen_op('|');
1555 if (op != TOK_SHL)
1556 vswap();
1557 lbuild(t);
1558 } else {
1559 /* XXX: should provide a faster fallback on x86 ? */
1560 switch(op) {
1561 case TOK_SAR:
1562 func = TOK___ashrdi3;
1563 goto gen_func;
1564 case TOK_SHR:
1565 func = TOK___lshrdi3;
1566 goto gen_func;
1567 case TOK_SHL:
1568 func = TOK___ashldi3;
1569 goto gen_func;
1572 break;
1573 default:
1574 /* compare operations */
1575 t = vtop->type.t;
1576 vswap();
1577 lexpand();
1578 vrotb(3);
1579 lexpand();
1580 /* stack: L1 H1 L2 H2 */
1581 tmp = vtop[-1];
1582 vtop[-1] = vtop[-2];
1583 vtop[-2] = tmp;
1584 /* stack: L1 L2 H1 H2 */
1585 /* compare high */
1586 op1 = op;
1587 /* when values are equal, we need to compare low words. since
1588 the jump is inverted, we invert the test too. */
1589 if (op1 == TOK_LT)
1590 op1 = TOK_LE;
1591 else if (op1 == TOK_GT)
1592 op1 = TOK_GE;
1593 else if (op1 == TOK_ULT)
1594 op1 = TOK_ULE;
1595 else if (op1 == TOK_UGT)
1596 op1 = TOK_UGE;
1597 a = 0;
1598 b = 0;
1599 gen_op(op1);
1600 if (op1 != TOK_NE) {
1601 a = gvtst(1, 0);
1603 if (op != TOK_EQ) {
1604 /* generate non equal test */
1605 /* XXX: NOT PORTABLE yet */
1606 if (a == 0) {
1607 b = gvtst(0, 0);
1608 } else {
1609 #if defined(TCC_TARGET_I386)
1610 b = psym(0x850f, 0);
1611 #elif defined(TCC_TARGET_ARM)
1612 b = ind;
1613 o(0x1A000000 | encbranch(ind, 0, 1));
1614 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1615 tcc_error("not implemented");
1616 #else
1617 #error not supported
1618 #endif
1621 /* compare low. Always unsigned */
1622 op1 = op;
1623 if (op1 == TOK_LT)
1624 op1 = TOK_ULT;
1625 else if (op1 == TOK_LE)
1626 op1 = TOK_ULE;
1627 else if (op1 == TOK_GT)
1628 op1 = TOK_UGT;
1629 else if (op1 == TOK_GE)
1630 op1 = TOK_UGE;
1631 gen_op(op1);
1632 a = gvtst(1, a);
1633 gsym(b);
1634 vseti(VT_JMPI, a);
1635 break;
1638 #endif
1640 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1642 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1643 return (a ^ b) >> 63 ? -x : x;
1646 static int gen_opic_lt(uint64_t a, uint64_t b)
1648 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1651 /* handle integer constant optimizations and various machine
1652 independent opt */
1653 static void gen_opic(int op)
1655 SValue *v1 = vtop - 1;
1656 SValue *v2 = vtop;
1657 int t1 = v1->type.t & VT_BTYPE;
1658 int t2 = v2->type.t & VT_BTYPE;
1659 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1661 uint64_t l1 = c1 ? v1->c.i : 0;
1662 uint64_t l2 = c2 ? v2->c.i : 0;
1663 int shm = (t1 == VT_LLONG) ? 63 : 31;
1665 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1666 l1 = ((uint32_t)l1 |
1667 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1668 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1669 l2 = ((uint32_t)l2 |
1670 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1672 if (c1 && c2) {
1673 switch(op) {
1674 case '+': l1 += l2; break;
1675 case '-': l1 -= l2; break;
1676 case '&': l1 &= l2; break;
1677 case '^': l1 ^= l2; break;
1678 case '|': l1 |= l2; break;
1679 case '*': l1 *= l2; break;
1681 case TOK_PDIV:
1682 case '/':
1683 case '%':
1684 case TOK_UDIV:
1685 case TOK_UMOD:
1686 /* if division by zero, generate explicit division */
1687 if (l2 == 0) {
1688 if (const_wanted)
1689 tcc_error("division by zero in constant");
1690 goto general_case;
1692 switch(op) {
1693 default: l1 = gen_opic_sdiv(l1, l2); break;
1694 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1695 case TOK_UDIV: l1 = l1 / l2; break;
1696 case TOK_UMOD: l1 = l1 % l2; break;
1698 break;
1699 case TOK_SHL: l1 <<= (l2 & shm); break;
1700 case TOK_SHR: l1 >>= (l2 & shm); break;
1701 case TOK_SAR:
1702 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1703 break;
1704 /* tests */
1705 case TOK_ULT: l1 = l1 < l2; break;
1706 case TOK_UGE: l1 = l1 >= l2; break;
1707 case TOK_EQ: l1 = l1 == l2; break;
1708 case TOK_NE: l1 = l1 != l2; break;
1709 case TOK_ULE: l1 = l1 <= l2; break;
1710 case TOK_UGT: l1 = l1 > l2; break;
1711 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1712 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1713 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1714 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1715 /* logical */
1716 case TOK_LAND: l1 = l1 && l2; break;
1717 case TOK_LOR: l1 = l1 || l2; break;
1718 default:
1719 goto general_case;
1721 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1722 l1 = ((uint32_t)l1 |
1723 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1724 v1->c.i = l1;
1725 vtop--;
1726 } else {
1727 /* if commutative ops, put c2 as constant */
1728 if (c1 && (op == '+' || op == '&' || op == '^' ||
1729 op == '|' || op == '*')) {
1730 vswap();
1731 c2 = c1; //c = c1, c1 = c2, c2 = c;
1732 l2 = l1; //l = l1, l1 = l2, l2 = l;
1734 if (!const_wanted &&
1735 c1 && ((l1 == 0 &&
1736 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1737 (l1 == -1 && op == TOK_SAR))) {
1738 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1739 vtop--;
1740 } else if (!const_wanted &&
1741 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1742 (l2 == -1 && op == '|') ||
1743 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1744 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1745 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1746 if (l2 == 1)
1747 vtop->c.i = 0;
1748 vswap();
1749 vtop--;
1750 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1751 op == TOK_PDIV) &&
1752 l2 == 1) ||
1753 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1754 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1755 l2 == 0) ||
1756 (op == '&' &&
1757 l2 == -1))) {
1758 /* filter out NOP operations like x*1, x-0, x&-1... */
1759 vtop--;
1760 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1761 /* try to use shifts instead of muls or divs */
1762 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1763 int n = -1;
1764 while (l2) {
1765 l2 >>= 1;
1766 n++;
1768 vtop->c.i = n;
1769 if (op == '*')
1770 op = TOK_SHL;
1771 else if (op == TOK_PDIV)
1772 op = TOK_SAR;
1773 else
1774 op = TOK_SHR;
1776 goto general_case;
1777 } else if (c2 && (op == '+' || op == '-') &&
1778 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1779 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1780 /* symbol + constant case */
1781 if (op == '-')
1782 l2 = -l2;
1783 l2 += vtop[-1].c.i;
1784 /* The backends can't always deal with addends to symbols
1785 larger than +-1<<31. Don't construct such. */
1786 if ((int)l2 != l2)
1787 goto general_case;
1788 vtop--;
1789 vtop->c.i = l2;
1790 } else {
1791 general_case:
1792 if (!nocode_wanted) {
1793 /* call low level op generator */
1794 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1795 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1796 gen_opl(op);
1797 else
1798 gen_opi(op);
1799 } else {
1800 vtop--;
1801 /* Ensure vtop isn't marked VT_CONST in case something
1802 up our callchain is interested in const-ness of the
1803 expression. Also make it a non-LVAL if it was,
1804 so that further code can't accidentally generate
1805 a deref (happen only for buggy uses of e.g.
1806 gv() under nocode_wanted). */
1807 vtop->r &= ~(VT_VALMASK | VT_LVAL);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 if (!nocode_wanted) {
1870 gen_opf(op);
1871 } else {
1872 vtop--;
1877 static int pointed_size(CType *type)
1879 int align;
1880 return type_size(pointed_type(type), &align);
1883 static void vla_runtime_pointed_size(CType *type)
1885 int align;
1886 vla_runtime_type_size(pointed_type(type), &align);
1889 static inline int is_null_pointer(SValue *p)
1891 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1892 return 0;
1893 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1894 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1895 ((p->type.t & VT_BTYPE) == VT_PTR &&
1896 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1899 static inline int is_integer_btype(int bt)
1901 return (bt == VT_BYTE || bt == VT_SHORT ||
1902 bt == VT_INT || bt == VT_LLONG);
1905 /* check types for comparison or subtraction of pointers */
1906 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1908 CType *type1, *type2, tmp_type1, tmp_type2;
1909 int bt1, bt2;
1911 /* null pointers are accepted for all comparisons as gcc */
1912 if (is_null_pointer(p1) || is_null_pointer(p2))
1913 return;
1914 type1 = &p1->type;
1915 type2 = &p2->type;
1916 bt1 = type1->t & VT_BTYPE;
1917 bt2 = type2->t & VT_BTYPE;
1918 /* accept comparison between pointer and integer with a warning */
1919 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1920 if (op != TOK_LOR && op != TOK_LAND )
1921 tcc_warning("comparison between pointer and integer");
1922 return;
1925 /* both must be pointers or implicit function pointers */
1926 if (bt1 == VT_PTR) {
1927 type1 = pointed_type(type1);
1928 } else if (bt1 != VT_FUNC)
1929 goto invalid_operands;
1931 if (bt2 == VT_PTR) {
1932 type2 = pointed_type(type2);
1933 } else if (bt2 != VT_FUNC) {
1934 invalid_operands:
1935 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1937 if ((type1->t & VT_BTYPE) == VT_VOID ||
1938 (type2->t & VT_BTYPE) == VT_VOID)
1939 return;
1940 tmp_type1 = *type1;
1941 tmp_type2 = *type2;
1942 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1943 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1944 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1945 /* gcc-like error if '-' is used */
1946 if (op == '-')
1947 goto invalid_operands;
1948 else
1949 tcc_warning("comparison of distinct pointer types lacks a cast");
1953 /* generic gen_op: handles types problems */
1954 ST_FUNC void gen_op(int op)
1956 int u, t1, t2, bt1, bt2, t;
1957 CType type1;
1959 redo:
1960 t1 = vtop[-1].type.t;
1961 t2 = vtop[0].type.t;
1962 bt1 = t1 & VT_BTYPE;
1963 bt2 = t2 & VT_BTYPE;
1965 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1966 tcc_error("operation on a struct");
1967 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1968 if (bt2 == VT_FUNC) {
1969 mk_pointer(&vtop->type);
1970 gaddrof();
1972 if (bt1 == VT_FUNC) {
1973 vswap();
1974 mk_pointer(&vtop->type);
1975 gaddrof();
1976 vswap();
1978 goto redo;
1979 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1980 /* at least one operand is a pointer */
1981 /* relationnal op: must be both pointers */
1982 if (op >= TOK_ULT && op <= TOK_LOR) {
1983 check_comparison_pointer_types(vtop - 1, vtop, op);
1984 /* pointers are handled are unsigned */
1985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1986 t = VT_LLONG | VT_UNSIGNED;
1987 #else
1988 t = VT_INT | VT_UNSIGNED;
1989 #endif
1990 goto std_op;
1992 /* if both pointers, then it must be the '-' op */
1993 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1994 if (op != '-')
1995 tcc_error("cannot use pointers here");
1996 check_comparison_pointer_types(vtop - 1, vtop, op);
1997 /* XXX: check that types are compatible */
1998 if (vtop[-1].type.t & VT_VLA) {
1999 vla_runtime_pointed_size(&vtop[-1].type);
2000 } else {
2001 vpushi(pointed_size(&vtop[-1].type));
2003 vrott(3);
2004 gen_opic(op);
2005 /* set to integer type */
2006 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2007 vtop->type.t = VT_LLONG;
2008 #else
2009 vtop->type.t = VT_INT;
2010 #endif
2011 vswap();
2012 gen_op(TOK_PDIV);
2013 } else {
2014 /* exactly one pointer : must be '+' or '-'. */
2015 if (op != '-' && op != '+')
2016 tcc_error("cannot use pointers here");
2017 /* Put pointer as first operand */
2018 if (bt2 == VT_PTR) {
2019 vswap();
2020 swap(&t1, &t2);
2022 #if PTR_SIZE == 4
2023 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2024 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2025 gen_cast(&int_type);
2026 #endif
2027 type1 = vtop[-1].type;
2028 type1.t &= ~VT_ARRAY;
2029 if (vtop[-1].type.t & VT_VLA)
2030 vla_runtime_pointed_size(&vtop[-1].type);
2031 else {
2032 u = pointed_size(&vtop[-1].type);
2033 if (u < 0)
2034 tcc_error("unknown array element size");
2035 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2036 vpushll(u);
2037 #else
2038 /* XXX: cast to int ? (long long case) */
2039 vpushi(u);
2040 #endif
2042 gen_op('*');
2043 #if 0
2044 /* #ifdef CONFIG_TCC_BCHECK
2045 The main reason to removing this code:
2046 #include <stdio.h>
2047 int main ()
2049 int v[10];
2050 int i = 10;
2051 int j = 9;
2052 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2053 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2055 When this code is on. then the output looks like
2056 v+i-j = 0xfffffffe
2057 v+(i-j) = 0xbff84000
2059 /* if evaluating constant expression, no code should be
2060 generated, so no bound check */
2061 if (tcc_state->do_bounds_check && !const_wanted) {
2062 /* if bounded pointers, we generate a special code to
2063 test bounds */
2064 if (op == '-') {
2065 vpushi(0);
2066 vswap();
2067 gen_op('-');
2069 gen_bounded_ptr_add();
2070 } else
2071 #endif
2073 gen_opic(op);
2075 /* put again type if gen_opic() swaped operands */
2076 vtop->type = type1;
2078 } else if (is_float(bt1) || is_float(bt2)) {
2079 /* compute bigger type and do implicit casts */
2080 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2081 t = VT_LDOUBLE;
2082 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2083 t = VT_DOUBLE;
2084 } else {
2085 t = VT_FLOAT;
2087 /* floats can only be used for a few operations */
2088 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2089 (op < TOK_ULT || op > TOK_GT))
2090 tcc_error("invalid operands for binary operation");
2091 goto std_op;
2092 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2093 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2094 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2095 t |= VT_UNSIGNED;
2096 goto std_op;
2097 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2098 /* cast to biggest op */
2099 t = VT_LLONG;
2100 /* convert to unsigned if it does not fit in a long long */
2101 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2102 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2103 t |= VT_UNSIGNED;
2104 goto std_op;
2105 } else {
2106 /* integer operations */
2107 t = VT_INT;
2108 /* convert to unsigned if it does not fit in an integer */
2109 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2110 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2111 t |= VT_UNSIGNED;
2112 std_op:
2113 /* XXX: currently, some unsigned operations are explicit, so
2114 we modify them here */
2115 if (t & VT_UNSIGNED) {
2116 if (op == TOK_SAR)
2117 op = TOK_SHR;
2118 else if (op == '/')
2119 op = TOK_UDIV;
2120 else if (op == '%')
2121 op = TOK_UMOD;
2122 else if (op == TOK_LT)
2123 op = TOK_ULT;
2124 else if (op == TOK_GT)
2125 op = TOK_UGT;
2126 else if (op == TOK_LE)
2127 op = TOK_ULE;
2128 else if (op == TOK_GE)
2129 op = TOK_UGE;
2131 vswap();
2132 type1.t = t;
2133 gen_cast(&type1);
2134 vswap();
2135 /* special case for shifts and long long: we keep the shift as
2136 an integer */
2137 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2138 type1.t = VT_INT;
2139 gen_cast(&type1);
2140 if (is_float(t))
2141 gen_opif(op);
2142 else
2143 gen_opic(op);
2144 if (op >= TOK_ULT && op <= TOK_GT) {
2145 /* relationnal op: the result is an int */
2146 vtop->type.t = VT_INT;
2147 } else {
2148 vtop->type.t = t;
2151 // Make sure that we have converted to an rvalue:
2152 if (vtop->r & VT_LVAL && !nocode_wanted)
2153 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2156 #ifndef TCC_TARGET_ARM
2157 /* generic itof for unsigned long long case */
2158 static void gen_cvt_itof1(int t)
2160 #ifdef TCC_TARGET_ARM64
2161 gen_cvt_itof(t);
2162 #else
2163 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2164 (VT_LLONG | VT_UNSIGNED)) {
2166 if (t == VT_FLOAT)
2167 vpush_global_sym(&func_old_type, TOK___floatundisf);
2168 #if LDOUBLE_SIZE != 8
2169 else if (t == VT_LDOUBLE)
2170 vpush_global_sym(&func_old_type, TOK___floatundixf);
2171 #endif
2172 else
2173 vpush_global_sym(&func_old_type, TOK___floatundidf);
2174 vrott(2);
2175 gfunc_call(1);
2176 vpushi(0);
2177 vtop->r = reg_fret(t);
2178 } else {
2179 gen_cvt_itof(t);
2181 #endif
2183 #endif
2185 /* generic ftoi for unsigned long long case */
2186 static void gen_cvt_ftoi1(int t)
2188 #ifdef TCC_TARGET_ARM64
2189 gen_cvt_ftoi(t);
2190 #else
2191 int st;
2193 if (t == (VT_LLONG | VT_UNSIGNED)) {
2194 /* not handled natively */
2195 st = vtop->type.t & VT_BTYPE;
2196 if (st == VT_FLOAT)
2197 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2198 #if LDOUBLE_SIZE != 8
2199 else if (st == VT_LDOUBLE)
2200 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2201 #endif
2202 else
2203 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2204 vrott(2);
2205 gfunc_call(1);
2206 vpushi(0);
2207 vtop->r = REG_IRET;
2208 vtop->r2 = REG_LRET;
2209 } else {
2210 gen_cvt_ftoi(t);
2212 #endif
2215 /* force char or short cast */
2216 static void force_charshort_cast(int t)
2218 int bits, dbt;
2219 dbt = t & VT_BTYPE;
2220 /* XXX: add optimization if lvalue : just change type and offset */
2221 if (dbt == VT_BYTE)
2222 bits = 8;
2223 else
2224 bits = 16;
2225 if (t & VT_UNSIGNED) {
2226 vpushi((1 << bits) - 1);
2227 gen_op('&');
2228 } else {
2229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2230 bits = 64 - bits;
2231 else
2232 bits = 32 - bits;
2233 vpushi(bits);
2234 gen_op(TOK_SHL);
2235 /* result must be signed or the SAR is converted to an SHL
2236 This was not the case when "t" was a signed short
2237 and the last value on the stack was an unsigned int */
2238 vtop->type.t &= ~VT_UNSIGNED;
2239 vpushi(bits);
2240 gen_op(TOK_SAR);
2244 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2245 static void gen_cast(CType *type)
2247 int sbt, dbt, sf, df, c, p;
2249 /* special delayed cast for char/short */
2250 /* XXX: in some cases (multiple cascaded casts), it may still
2251 be incorrect */
2252 if (vtop->r & VT_MUSTCAST) {
2253 vtop->r &= ~VT_MUSTCAST;
2254 force_charshort_cast(vtop->type.t);
2257 /* bitfields first get cast to ints */
2258 if (vtop->type.t & VT_BITFIELD && !nocode_wanted) {
2259 gv(RC_INT);
2262 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2263 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2265 if (sbt != dbt) {
2266 sf = is_float(sbt);
2267 df = is_float(dbt);
2268 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2269 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2270 if (c) {
2271 /* constant case: we can do it now */
2272 /* XXX: in ISOC, cannot do it if error in convert */
2273 if (sbt == VT_FLOAT)
2274 vtop->c.ld = vtop->c.f;
2275 else if (sbt == VT_DOUBLE)
2276 vtop->c.ld = vtop->c.d;
2278 if (df) {
2279 if ((sbt & VT_BTYPE) == VT_LLONG) {
2280 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2281 vtop->c.ld = vtop->c.i;
2282 else
2283 vtop->c.ld = -(long double)-vtop->c.i;
2284 } else if(!sf) {
2285 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2286 vtop->c.ld = (uint32_t)vtop->c.i;
2287 else
2288 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2291 if (dbt == VT_FLOAT)
2292 vtop->c.f = (float)vtop->c.ld;
2293 else if (dbt == VT_DOUBLE)
2294 vtop->c.d = (double)vtop->c.ld;
2295 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2296 vtop->c.i = vtop->c.ld;
2297 } else if (sf && dbt == VT_BOOL) {
2298 vtop->c.i = (vtop->c.ld != 0);
2299 } else {
2300 if(sf)
2301 vtop->c.i = vtop->c.ld;
2302 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2304 else if (sbt & VT_UNSIGNED)
2305 vtop->c.i = (uint32_t)vtop->c.i;
2306 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2307 else if (sbt == VT_PTR)
2309 #endif
2310 else if (sbt != VT_LLONG)
2311 vtop->c.i = ((uint32_t)vtop->c.i |
2312 -(vtop->c.i & 0x80000000));
2314 if (dbt == (VT_LLONG|VT_UNSIGNED))
2316 else if (dbt == VT_BOOL)
2317 vtop->c.i = (vtop->c.i != 0);
2318 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2319 else if (dbt == VT_PTR)
2321 #endif
2322 else if (dbt != VT_LLONG) {
2323 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2324 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2325 0xffffffff);
2326 vtop->c.i &= m;
2327 if (!(dbt & VT_UNSIGNED))
2328 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2331 } else if (p && dbt == VT_BOOL) {
2332 vtop->r = VT_CONST;
2333 vtop->c.i = 1;
2334 } else if (!nocode_wanted) {
2335 /* non constant case: generate code */
2336 if (sf && df) {
2337 /* convert from fp to fp */
2338 gen_cvt_ftof(dbt);
2339 } else if (df) {
2340 /* convert int to fp */
2341 gen_cvt_itof1(dbt);
2342 } else if (sf) {
2343 /* convert fp to int */
2344 if (dbt == VT_BOOL) {
2345 vpushi(0);
2346 gen_op(TOK_NE);
2347 } else {
2348 /* we handle char/short/etc... with generic code */
2349 if (dbt != (VT_INT | VT_UNSIGNED) &&
2350 dbt != (VT_LLONG | VT_UNSIGNED) &&
2351 dbt != VT_LLONG)
2352 dbt = VT_INT;
2353 gen_cvt_ftoi1(dbt);
2354 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2355 /* additional cast for char/short... */
2356 vtop->type.t = dbt;
2357 gen_cast(type);
2360 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2361 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2362 if ((sbt & VT_BTYPE) != VT_LLONG) {
2363 /* scalar to long long */
2364 /* machine independent conversion */
2365 gv(RC_INT);
2366 /* generate high word */
2367 if (sbt == (VT_INT | VT_UNSIGNED)) {
2368 vpushi(0);
2369 gv(RC_INT);
2370 } else {
2371 if (sbt == VT_PTR) {
2372 /* cast from pointer to int before we apply
2373 shift operation, which pointers don't support*/
2374 gen_cast(&int_type);
2376 gv_dup();
2377 vpushi(31);
2378 gen_op(TOK_SAR);
2380 /* patch second register */
2381 vtop[-1].r2 = vtop->r;
2382 vpop();
2384 #else
2385 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2386 (dbt & VT_BTYPE) == VT_PTR ||
2387 (dbt & VT_BTYPE) == VT_FUNC) {
2388 if ((sbt & VT_BTYPE) != VT_LLONG &&
2389 (sbt & VT_BTYPE) != VT_PTR &&
2390 (sbt & VT_BTYPE) != VT_FUNC) {
2391 /* need to convert from 32bit to 64bit */
2392 gv(RC_INT);
2393 if (sbt != (VT_INT | VT_UNSIGNED)) {
2394 #if defined(TCC_TARGET_ARM64)
2395 gen_cvt_sxtw();
2396 #elif defined(TCC_TARGET_X86_64)
2397 int r = gv(RC_INT);
2398 /* x86_64 specific: movslq */
2399 o(0x6348);
2400 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2401 #else
2402 #error
2403 #endif
2406 #endif
2407 } else if (dbt == VT_BOOL) {
2408 /* scalar to bool */
2409 vpushi(0);
2410 gen_op(TOK_NE);
2411 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2412 (dbt & VT_BTYPE) == VT_SHORT) {
2413 if (sbt == VT_PTR) {
2414 vtop->type.t = VT_INT;
2415 tcc_warning("nonportable conversion from pointer to char/short");
2417 force_charshort_cast(dbt);
2418 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2419 } else if ((dbt & VT_BTYPE) == VT_INT) {
2420 /* scalar to int */
2421 if ((sbt & VT_BTYPE) == VT_LLONG) {
2422 /* from long long: just take low order word */
2423 lexpand();
2424 vpop();
2426 /* if lvalue and single word type, nothing to do because
2427 the lvalue already contains the real type size (see
2428 VT_LVAL_xxx constants) */
2429 #endif
2432 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2433 /* if we are casting between pointer types,
2434 we must update the VT_LVAL_xxx size */
2435 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2436 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2438 vtop->type = *type;
2441 /* return type size as known at compile time. Put alignment at 'a' */
2442 ST_FUNC int type_size(CType *type, int *a)
2444 Sym *s;
2445 int bt;
2447 bt = type->t & VT_BTYPE;
2448 if (bt == VT_STRUCT) {
2449 /* struct/union */
2450 s = type->ref;
2451 *a = s->r;
2452 return s->c;
2453 } else if (bt == VT_PTR) {
2454 if (type->t & VT_ARRAY) {
2455 int ts;
2457 s = type->ref;
2458 ts = type_size(&s->type, a);
2460 if (ts < 0 && s->c < 0)
2461 ts = -ts;
2463 return ts * s->c;
2464 } else {
2465 *a = PTR_SIZE;
2466 return PTR_SIZE;
2468 } else if (bt == VT_LDOUBLE) {
2469 *a = LDOUBLE_ALIGN;
2470 return LDOUBLE_SIZE;
2471 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2472 #ifdef TCC_TARGET_I386
2473 #ifdef TCC_TARGET_PE
2474 *a = 8;
2475 #else
2476 *a = 4;
2477 #endif
2478 #elif defined(TCC_TARGET_ARM)
2479 #ifdef TCC_ARM_EABI
2480 *a = 8;
2481 #else
2482 *a = 4;
2483 #endif
2484 #else
2485 *a = 8;
2486 #endif
2487 return 8;
2488 } else if (bt == VT_INT || bt == VT_FLOAT) {
2489 *a = 4;
2490 return 4;
2491 } else if (bt == VT_SHORT) {
2492 *a = 2;
2493 return 2;
2494 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2495 *a = 8;
2496 return 16;
2497 } else if (bt == VT_ENUM) {
2498 *a = 4;
2499 /* Enums might be incomplete, so don't just return '4' here. */
2500 return type->ref->c;
2501 } else {
2502 /* char, void, function, _Bool */
2503 *a = 1;
2504 return 1;
2508 /* push type size as known at runtime time on top of value stack. Put
2509 alignment at 'a' */
2510 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2512 if (type->t & VT_VLA) {
2513 type_size(&type->ref->type, a);
2514 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2515 } else {
2516 vpushi(type_size(type, a));
2520 static void vla_sp_restore(void) {
2521 if (vlas_in_scope) {
2522 gen_vla_sp_restore(vla_sp_loc);
2526 static void vla_sp_restore_root(void) {
2527 if (vlas_in_scope) {
2528 gen_vla_sp_restore(vla_sp_root_loc);
2532 /* return the pointed type of t */
2533 static inline CType *pointed_type(CType *type)
2535 return &type->ref->type;
2538 /* modify type so that its it is a pointer to type. */
2539 ST_FUNC void mk_pointer(CType *type)
2541 Sym *s;
2542 s = sym_push(SYM_FIELD, type, 0, -1);
2543 type->t = VT_PTR | (type->t & ~VT_TYPE);
2544 type->ref = s;
2547 /* compare function types. OLD functions match any new functions */
2548 static int is_compatible_func(CType *type1, CType *type2)
2550 Sym *s1, *s2;
2552 s1 = type1->ref;
2553 s2 = type2->ref;
2554 if (!is_compatible_types(&s1->type, &s2->type))
2555 return 0;
2556 /* check func_call */
2557 if (s1->a.func_call != s2->a.func_call)
2558 return 0;
2559 /* XXX: not complete */
2560 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2561 return 1;
2562 if (s1->c != s2->c)
2563 return 0;
2564 while (s1 != NULL) {
2565 if (s2 == NULL)
2566 return 0;
2567 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2568 return 0;
2569 s1 = s1->next;
2570 s2 = s2->next;
2572 if (s2)
2573 return 0;
2574 return 1;
2577 /* return true if type1 and type2 are the same. If unqualified is
2578 true, qualifiers on the types are ignored.
2580 - enums are not checked as gcc __builtin_types_compatible_p ()
2582 static int compare_types(CType *type1, CType *type2, int unqualified)
2584 int bt1, t1, t2;
2586 t1 = type1->t & VT_TYPE;
2587 t2 = type2->t & VT_TYPE;
2588 if (unqualified) {
2589 /* strip qualifiers before comparing */
2590 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2591 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2593 /* Default Vs explicit signedness only matters for char */
2594 if ((t1 & VT_BTYPE) != VT_BYTE) {
2595 t1 &= ~VT_DEFSIGN;
2596 t2 &= ~VT_DEFSIGN;
2598 /* An enum is compatible with (unsigned) int. Ideally we would
2599 store the enums signedness in type->ref.a.<some_bit> and
2600 only accept unsigned enums with unsigned int and vice versa.
2601 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2602 from pointer target types, so we can't add it here either. */
2603 if ((t1 & VT_BTYPE) == VT_ENUM) {
2604 t1 = VT_INT;
2605 if (type1->ref->a.unsigned_enum)
2606 t1 |= VT_UNSIGNED;
2608 if ((t2 & VT_BTYPE) == VT_ENUM) {
2609 t2 = VT_INT;
2610 if (type2->ref->a.unsigned_enum)
2611 t2 |= VT_UNSIGNED;
2613 /* XXX: bitfields ? */
2614 if (t1 != t2)
2615 return 0;
2616 /* test more complicated cases */
2617 bt1 = t1 & VT_BTYPE;
2618 if (bt1 == VT_PTR) {
2619 type1 = pointed_type(type1);
2620 type2 = pointed_type(type2);
2621 return is_compatible_types(type1, type2);
2622 } else if (bt1 == VT_STRUCT) {
2623 return (type1->ref == type2->ref);
2624 } else if (bt1 == VT_FUNC) {
2625 return is_compatible_func(type1, type2);
2626 } else {
2627 return 1;
2631 /* return true if type1 and type2 are exactly the same (including
2632 qualifiers).
2634 static int is_compatible_types(CType *type1, CType *type2)
2636 return compare_types(type1,type2,0);
2639 /* return true if type1 and type2 are the same (ignoring qualifiers).
2641 static int is_compatible_parameter_types(CType *type1, CType *type2)
2643 return compare_types(type1,type2,1);
2646 /* print a type. If 'varstr' is not NULL, then the variable is also
2647 printed in the type */
2648 /* XXX: union */
2649 /* XXX: add array and function pointers */
2650 static void type_to_str(char *buf, int buf_size,
2651 CType *type, const char *varstr)
2653 int bt, v, t;
2654 Sym *s, *sa;
2655 char buf1[256];
2656 const char *tstr;
2658 t = type->t & VT_TYPE;
2659 bt = t & VT_BTYPE;
2660 buf[0] = '\0';
2661 if (t & VT_CONSTANT)
2662 pstrcat(buf, buf_size, "const ");
2663 if (t & VT_VOLATILE)
2664 pstrcat(buf, buf_size, "volatile ");
2665 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2666 pstrcat(buf, buf_size, "unsigned ");
2667 else if (t & VT_DEFSIGN)
2668 pstrcat(buf, buf_size, "signed ");
2669 switch(bt) {
2670 case VT_VOID:
2671 tstr = "void";
2672 goto add_tstr;
2673 case VT_BOOL:
2674 tstr = "_Bool";
2675 goto add_tstr;
2676 case VT_BYTE:
2677 tstr = "char";
2678 goto add_tstr;
2679 case VT_SHORT:
2680 tstr = "short";
2681 goto add_tstr;
2682 case VT_INT:
2683 tstr = "int";
2684 goto add_tstr;
2685 case VT_LONG:
2686 tstr = "long";
2687 goto add_tstr;
2688 case VT_LLONG:
2689 tstr = "long long";
2690 goto add_tstr;
2691 case VT_FLOAT:
2692 tstr = "float";
2693 goto add_tstr;
2694 case VT_DOUBLE:
2695 tstr = "double";
2696 goto add_tstr;
2697 case VT_LDOUBLE:
2698 tstr = "long double";
2699 add_tstr:
2700 pstrcat(buf, buf_size, tstr);
2701 break;
2702 case VT_ENUM:
2703 case VT_STRUCT:
2704 if (bt == VT_STRUCT)
2705 tstr = "struct ";
2706 else
2707 tstr = "enum ";
2708 pstrcat(buf, buf_size, tstr);
2709 v = type->ref->v & ~SYM_STRUCT;
2710 if (v >= SYM_FIRST_ANOM)
2711 pstrcat(buf, buf_size, "<anonymous>");
2712 else
2713 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2714 break;
2715 case VT_FUNC:
2716 s = type->ref;
2717 type_to_str(buf, buf_size, &s->type, varstr);
2718 pstrcat(buf, buf_size, "(");
2719 sa = s->next;
2720 while (sa != NULL) {
2721 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2722 pstrcat(buf, buf_size, buf1);
2723 sa = sa->next;
2724 if (sa)
2725 pstrcat(buf, buf_size, ", ");
2727 pstrcat(buf, buf_size, ")");
2728 goto no_var;
2729 case VT_PTR:
2730 s = type->ref;
2731 if (t & VT_ARRAY) {
2732 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2733 type_to_str(buf, buf_size, &s->type, buf1);
2734 goto no_var;
2736 pstrcpy(buf1, sizeof(buf1), "*");
2737 if (t & VT_CONSTANT)
2738 pstrcat(buf1, buf_size, "const ");
2739 if (t & VT_VOLATILE)
2740 pstrcat(buf1, buf_size, "volatile ");
2741 if (varstr)
2742 pstrcat(buf1, sizeof(buf1), varstr);
2743 type_to_str(buf, buf_size, &s->type, buf1);
2744 goto no_var;
2746 if (varstr) {
2747 pstrcat(buf, buf_size, " ");
2748 pstrcat(buf, buf_size, varstr);
2750 no_var: ;
2753 /* verify type compatibility to store vtop in 'dt' type, and generate
2754 casts if needed. */
2755 static void gen_assign_cast(CType *dt)
2757 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2758 char buf1[256], buf2[256];
2759 int dbt, sbt;
2761 st = &vtop->type; /* source type */
2762 dbt = dt->t & VT_BTYPE;
2763 sbt = st->t & VT_BTYPE;
2764 if (sbt == VT_VOID || dbt == VT_VOID) {
2765 if (sbt == VT_VOID && dbt == VT_VOID)
2766 ; /*
2767 It is Ok if both are void
2768 A test program:
2769 void func1() {}
2770 void func2() {
2771 return func1();
2773 gcc accepts this program
2775 else
2776 tcc_error("cannot cast from/to void");
2778 if (dt->t & VT_CONSTANT)
2779 tcc_warning("assignment of read-only location");
2780 switch(dbt) {
2781 case VT_PTR:
2782 /* special cases for pointers */
2783 /* '0' can also be a pointer */
2784 if (is_null_pointer(vtop))
2785 goto type_ok;
2786 /* accept implicit pointer to integer cast with warning */
2787 if (is_integer_btype(sbt)) {
2788 tcc_warning("assignment makes pointer from integer without a cast");
2789 goto type_ok;
2791 type1 = pointed_type(dt);
2792 /* a function is implicitely a function pointer */
2793 if (sbt == VT_FUNC) {
2794 if ((type1->t & VT_BTYPE) != VT_VOID &&
2795 !is_compatible_types(pointed_type(dt), st))
2796 tcc_warning("assignment from incompatible pointer type");
2797 goto type_ok;
2799 if (sbt != VT_PTR)
2800 goto error;
2801 type2 = pointed_type(st);
2802 if ((type1->t & VT_BTYPE) == VT_VOID ||
2803 (type2->t & VT_BTYPE) == VT_VOID) {
2804 /* void * can match anything */
2805 } else {
2806 /* exact type match, except for qualifiers */
2807 tmp_type1 = *type1;
2808 tmp_type2 = *type2;
2809 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2812 /* Like GCC don't warn by default for merely changes
2813 in pointer target signedness. Do warn for different
2814 base types, though, in particular for unsigned enums
2815 and signed int targets. */
2816 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2817 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2818 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2820 else
2821 tcc_warning("assignment from incompatible pointer type");
2824 /* check const and volatile */
2825 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2826 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2827 tcc_warning("assignment discards qualifiers from pointer target type");
2828 break;
2829 case VT_BYTE:
2830 case VT_SHORT:
2831 case VT_INT:
2832 case VT_LLONG:
2833 if (sbt == VT_PTR || sbt == VT_FUNC) {
2834 tcc_warning("assignment makes integer from pointer without a cast");
2835 } else if (sbt == VT_STRUCT) {
2836 goto case_VT_STRUCT;
2838 /* XXX: more tests */
2839 break;
2840 case VT_STRUCT:
2841 case_VT_STRUCT:
2842 tmp_type1 = *dt;
2843 tmp_type2 = *st;
2844 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2847 error:
2848 type_to_str(buf1, sizeof(buf1), st, NULL);
2849 type_to_str(buf2, sizeof(buf2), dt, NULL);
2850 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2852 break;
2854 type_ok:
2855 gen_cast(dt);
2858 /* store vtop in lvalue pushed on stack */
2859 ST_FUNC void vstore(void)
2861 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2863 ft = vtop[-1].type.t;
2864 sbt = vtop->type.t & VT_BTYPE;
2865 dbt = ft & VT_BTYPE;
2866 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2867 (sbt == VT_INT && dbt == VT_SHORT))
2868 && !(vtop->type.t & VT_BITFIELD)) {
2869 /* optimize char/short casts */
2870 delayed_cast = VT_MUSTCAST;
2871 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2872 ((1 << VT_STRUCT_SHIFT) - 1));
2873 /* XXX: factorize */
2874 if (ft & VT_CONSTANT)
2875 tcc_warning("assignment of read-only location");
2876 } else {
2877 delayed_cast = 0;
2878 if (!(ft & VT_BITFIELD))
2879 gen_assign_cast(&vtop[-1].type);
2882 if (sbt == VT_STRUCT) {
2883 /* if structure, only generate pointer */
2884 /* structure assignment : generate memcpy */
2885 /* XXX: optimize if small size */
2886 if (!nocode_wanted) {
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2913 } else {
2914 vswap();
2915 vpop();
2917 /* leave source on stack */
2918 } else if (ft & VT_BITFIELD) {
2919 /* bitfield store handling */
2921 /* save lvalue as expression result (example: s.b = s.a = n;) */
2922 vdup(), vtop[-1] = vtop[-2];
2924 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2925 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2926 /* remove bit field info to avoid loops */
2927 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2929 if((ft & VT_BTYPE) == VT_BOOL) {
2930 gen_cast(&vtop[-1].type);
2931 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2934 /* duplicate destination */
2935 vdup();
2936 vtop[-1] = vtop[-2];
2938 /* mask and shift source */
2939 if((ft & VT_BTYPE) != VT_BOOL) {
2940 if((ft & VT_BTYPE) == VT_LLONG) {
2941 vpushll((1ULL << bit_size) - 1ULL);
2942 } else {
2943 vpushi((1 << bit_size) - 1);
2945 gen_op('&');
2947 vpushi(bit_pos);
2948 gen_op(TOK_SHL);
2949 /* load destination, mask and or with source */
2950 vswap();
2951 if((ft & VT_BTYPE) == VT_LLONG) {
2952 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2953 } else {
2954 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2956 gen_op('&');
2957 gen_op('|');
2958 /* store result */
2959 vstore();
2960 /* ... and discard */
2961 vpop();
2963 } else {
2964 if (!nocode_wanted) {
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop[-1].r & VT_MUSTBOUND) {
2968 vswap();
2969 gbound();
2970 vswap();
2972 #endif
2973 rc = RC_INT;
2974 if (is_float(ft)) {
2975 rc = RC_FLOAT;
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2978 rc = RC_ST0;
2979 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2980 rc = RC_FRET;
2982 #endif
2984 r = gv(rc); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2987 SValue sv;
2988 t = get_reg(RC_INT);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2990 sv.type.t = VT_PTR;
2991 #else
2992 sv.type.t = VT_INT;
2993 #endif
2994 sv.r = VT_LOCAL | VT_LVAL;
2995 sv.c.i = vtop[-1].c.i;
2996 load(t, &sv);
2997 vtop[-1].r = t | VT_LVAL;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3002 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3003 #else
3004 if ((ft & VT_BTYPE) == VT_LLONG) {
3005 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3006 #endif
3007 vtop[-1].type.t = load_type;
3008 store(r, vtop - 1);
3009 vswap();
3010 /* convert to int to increment easily */
3011 vtop->type.t = addr_type;
3012 gaddrof();
3013 vpushi(load_size);
3014 gen_op('+');
3015 vtop->r |= VT_LVAL;
3016 vswap();
3017 vtop[-1].type.t = load_type;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop->r2, vtop - 1);
3020 } else {
3021 store(r, vtop - 1);
3024 vswap();
3025 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop->r |= delayed_cast;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC void inc(int post, int c)
3033 test_lvalue();
3034 vdup(); /* save lvalue */
3035 if (post) {
3036 if (!nocode_wanted)
3037 gv_dup(); /* duplicate value */
3038 else
3039 vdup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* Parse GNUC __attribute__ extension. Currently, the following
3066 extensions are recognized:
3067 - aligned(n) : set data/function alignment.
3068 - packed : force data alignment to 1
3069 - section(x) : generate data/code in this section.
3070 - unused : currently ignored, but may be used someday.
3071 - regparm(n) : pass function parameters in registers (i386 only)
3073 static void parse_attribute(AttributeDef *ad)
3075 int t, n;
3076 CString astr;
3078 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3079 next();
3080 skip('(');
3081 skip('(');
3082 while (tok != ')') {
3083 if (tok < TOK_IDENT)
3084 expect("attribute name");
3085 t = tok;
3086 next();
3087 switch(t) {
3088 case TOK_SECTION1:
3089 case TOK_SECTION2:
3090 skip('(');
3091 parse_mult_str(&astr, "section name");
3092 ad->section = find_section(tcc_state, (char *)astr.data);
3093 skip(')');
3094 cstr_free(&astr);
3095 break;
3096 case TOK_ALIAS1:
3097 case TOK_ALIAS2:
3098 skip('(');
3099 parse_mult_str(&astr, "alias(\"target\")");
3100 ad->alias_target = /* save string as token, for later */
3101 tok_alloc((char*)astr.data, astr.size-1)->tok;
3102 skip(')');
3103 cstr_free(&astr);
3104 break;
3105 case TOK_VISIBILITY1:
3106 case TOK_VISIBILITY2:
3107 skip('(');
3108 parse_mult_str(&astr,
3109 "visibility(\"default|hidden|internal|protected\")");
3110 if (!strcmp (astr.data, "default"))
3111 ad->a.visibility = STV_DEFAULT;
3112 else if (!strcmp (astr.data, "hidden"))
3113 ad->a.visibility = STV_HIDDEN;
3114 else if (!strcmp (astr.data, "internal"))
3115 ad->a.visibility = STV_INTERNAL;
3116 else if (!strcmp (astr.data, "protected"))
3117 ad->a.visibility = STV_PROTECTED;
3118 else
3119 expect("visibility(\"default|hidden|internal|protected\")");
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_ALIGNED1:
3124 case TOK_ALIGNED2:
3125 if (tok == '(') {
3126 next();
3127 n = expr_const();
3128 if (n <= 0 || (n & (n - 1)) != 0)
3129 tcc_error("alignment must be a positive power of two");
3130 skip(')');
3131 } else {
3132 n = MAX_ALIGN;
3134 ad->a.aligned = n;
3135 break;
3136 case TOK_PACKED1:
3137 case TOK_PACKED2:
3138 ad->a.packed = 1;
3139 break;
3140 case TOK_WEAK1:
3141 case TOK_WEAK2:
3142 ad->a.weak = 1;
3143 break;
3144 case TOK_UNUSED1:
3145 case TOK_UNUSED2:
3146 /* currently, no need to handle it because tcc does not
3147 track unused objects */
3148 break;
3149 case TOK_NORETURN1:
3150 case TOK_NORETURN2:
3151 /* currently, no need to handle it because tcc does not
3152 track unused objects */
3153 break;
3154 case TOK_CDECL1:
3155 case TOK_CDECL2:
3156 case TOK_CDECL3:
3157 ad->a.func_call = FUNC_CDECL;
3158 break;
3159 case TOK_STDCALL1:
3160 case TOK_STDCALL2:
3161 case TOK_STDCALL3:
3162 ad->a.func_call = FUNC_STDCALL;
3163 break;
3164 #ifdef TCC_TARGET_I386
3165 case TOK_REGPARM1:
3166 case TOK_REGPARM2:
3167 skip('(');
3168 n = expr_const();
3169 if (n > 3)
3170 n = 3;
3171 else if (n < 0)
3172 n = 0;
3173 if (n > 0)
3174 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3175 skip(')');
3176 break;
3177 case TOK_FASTCALL1:
3178 case TOK_FASTCALL2:
3179 case TOK_FASTCALL3:
3180 ad->a.func_call = FUNC_FASTCALLW;
3181 break;
3182 #endif
3183 case TOK_MODE:
3184 skip('(');
3185 switch(tok) {
3186 case TOK_MODE_DI:
3187 ad->a.mode = VT_LLONG + 1;
3188 break;
3189 case TOK_MODE_QI:
3190 ad->a.mode = VT_BYTE + 1;
3191 break;
3192 case TOK_MODE_HI:
3193 ad->a.mode = VT_SHORT + 1;
3194 break;
3195 case TOK_MODE_SI:
3196 case TOK_MODE_word:
3197 ad->a.mode = VT_INT + 1;
3198 break;
3199 default:
3200 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3201 break;
3203 next();
3204 skip(')');
3205 break;
3206 case TOK_DLLEXPORT:
3207 ad->a.func_export = 1;
3208 break;
3209 case TOK_DLLIMPORT:
3210 ad->a.func_import = 1;
3211 break;
3212 default:
3213 if (tcc_state->warn_unsupported)
3214 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3215 /* skip parameters */
3216 if (tok == '(') {
3217 int parenthesis = 0;
3218 do {
3219 if (tok == '(')
3220 parenthesis++;
3221 else if (tok == ')')
3222 parenthesis--;
3223 next();
3224 } while (parenthesis && tok != -1);
3226 break;
3228 if (tok != ',')
3229 break;
3230 next();
3232 skip(')');
3233 skip(')');
3237 static Sym * find_field (CType *type, int v)
3239 Sym *s = type->ref;
3240 v |= SYM_FIELD;
3241 while ((s = s->next) != NULL) {
3242 if ((s->v & SYM_FIELD) && (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3243 Sym *ret = find_field (&s->type, v);
3244 if (ret)
3245 return ret;
3247 if (s->v == v)
3248 break;
3250 return s;
3253 static void struct_layout(CType *type, AttributeDef *ad)
3255 int align, maxalign, offset, c;
3256 Sym *f;
3257 maxalign = 1;
3258 offset = 0;
3259 c = 0;
3260 for (f = type->ref->next; f; f = f->next) {
3261 int extra_bytes = f->c;
3262 int bit_pos;
3263 int size = type_size(&f->type, &align);
3264 if (f->type.t & VT_BITFIELD)
3265 bit_pos = (f->type.t >> VT_STRUCT_SHIFT) & 0x3f;
3266 else
3267 bit_pos = 0;
3268 if (f->r) {
3269 align = f->r;
3270 } else if (ad->a.packed) {
3271 align = 1;
3273 if (extra_bytes) c += extra_bytes;
3274 else if (bit_pos == 0) {
3275 if (type->ref->type.t == TOK_STRUCT) {
3276 c = (c + align - 1) & -align;
3277 offset = c;
3278 if (size > 0)
3279 c += size;
3280 } else {
3281 offset = 0;
3282 if (size > c)
3283 c = size;
3285 if (align > maxalign)
3286 maxalign = align;
3288 #if 0
3289 printf("set field %s offset=%d",
3290 get_tok_str(f->v & ~SYM_FIELD, NULL), offset);
3291 if (f->type.t & VT_BITFIELD) {
3292 printf(" pos=%d size=%d",
3293 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3294 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3296 printf("\n");
3297 #endif
3299 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3300 Sym *ass;
3301 /* An anonymous struct/union. Adjust member offsets
3302 to reflect the real offset of our containing struct.
3303 Also set the offset of this anon member inside
3304 the outer struct to be zero. Via this it
3305 works when accessing the field offset directly
3306 (from base object), as well as when recursing
3307 members in initializer handling. */
3308 int v2 = f->type.ref->v;
3309 if (!(v2 & SYM_FIELD) &&
3310 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3311 Sym **pps;
3312 /* This happens only with MS extensions. The
3313 anon member has a named struct type, so it
3314 potentially is shared with other references.
3315 We need to unshare members so we can modify
3316 them. */
3317 ass = f->type.ref;
3318 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3319 &f->type.ref->type, 0,
3320 f->type.ref->c);
3321 pps = &f->type.ref->next;
3322 while ((ass = ass->next) != NULL) {
3323 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3324 pps = &((*pps)->next);
3326 *pps = NULL;
3328 ass = f->type.ref;
3329 while ((ass = ass->next) != NULL)
3330 ass->c += offset;
3331 f->c = 0;
3332 } else {
3333 f->c = offset;
3336 f->r = 0;
3338 /* store size and alignment */
3339 type->ref->c = (c + maxalign - 1) & -maxalign;
3340 type->ref->r = maxalign;
3343 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3344 static void struct_decl(CType *type, AttributeDef *ad, int u)
3346 int extra_bytes;
3347 int a, v, size, align, flexible, alignoverride;
3348 long c;
3349 int bit_size, bit_pos, bsize, bt, prevbt;
3350 Sym *s, *ss, **ps;
3351 AttributeDef ad1;
3352 CType type1, btype;
3354 a = tok; /* save decl type */
3355 next();
3356 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3357 parse_attribute(ad);
3358 if (tok != '{') {
3359 v = tok;
3360 next();
3361 /* struct already defined ? return it */
3362 if (v < TOK_IDENT)
3363 expect("struct/union/enum name");
3364 s = struct_find(v);
3365 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3366 if (s->type.t != a)
3367 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3368 goto do_decl;
3370 } else {
3371 v = anon_sym++;
3373 /* Record the original enum/struct/union token. */
3374 type1.t = a;
3375 type1.ref = NULL;
3376 /* we put an undefined size for struct/union */
3377 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3378 s->r = 0; /* default alignment is zero as gcc */
3379 /* put struct/union/enum name in type */
3380 do_decl:
3381 type->t = u;
3382 type->ref = s;
3384 if (tok == '{') {
3385 next();
3386 if (s->c != -1)
3387 tcc_error("struct/union/enum already defined");
3388 /* cannot be empty */
3389 c = 0;
3390 /* non empty enums are not allowed */
3391 if (a == TOK_ENUM) {
3392 int seen_neg = 0;
3393 int seen_wide = 0;
3394 for(;;) {
3395 CType *t = &int_type;
3396 v = tok;
3397 if (v < TOK_UIDENT)
3398 expect("identifier");
3399 ss = sym_find(v);
3400 if (ss && !local_stack)
3401 tcc_error("redefinition of enumerator '%s'",
3402 get_tok_str(v, NULL));
3403 next();
3404 if (tok == '=') {
3405 next();
3406 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3407 c = expr_const64();
3408 #else
3409 /* We really want to support long long enums
3410 on i386 as well, but the Sym structure only
3411 holds a 'long' for associated constants,
3412 and enlarging it would bump its size (no
3413 available padding). So punt for now. */
3414 c = expr_const();
3415 #endif
3417 if (c < 0)
3418 seen_neg = 1;
3419 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3420 seen_wide = 1, t = &size_type;
3421 /* enum symbols have static storage */
3422 ss = sym_push(v, t, VT_CONST, c);
3423 ss->type.t |= VT_STATIC;
3424 if (tok != ',')
3425 break;
3426 next();
3427 c++;
3428 /* NOTE: we accept a trailing comma */
3429 if (tok == '}')
3430 break;
3432 if (!seen_neg)
3433 s->a.unsigned_enum = 1;
3434 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3435 skip('}');
3436 } else {
3437 ps = &s->next;
3438 prevbt = VT_INT;
3439 bit_pos = 0;
3440 flexible = 0;
3441 while (tok != '}') {
3442 if (!parse_btype(&btype, &ad1)) {
3443 skip(';');
3444 continue;
3446 while (1) {
3447 extra_bytes = 0;
3448 if (flexible)
3449 tcc_error("flexible array member '%s' not at the end of struct",
3450 get_tok_str(v, NULL));
3451 bit_size = -1;
3452 v = 0;
3453 type1 = btype;
3454 if (tok != ':') {
3455 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3456 if (v == 0) {
3457 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3458 expect("identifier");
3459 else {
3460 int v = btype.ref->v;
3461 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3462 if (tcc_state->ms_extensions == 0)
3463 expect("identifier");
3467 if (type_size(&type1, &align) < 0) {
3468 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3469 flexible = 1;
3470 else
3471 tcc_error("field '%s' has incomplete type",
3472 get_tok_str(v, NULL));
3474 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3475 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3476 tcc_error("invalid type for '%s'",
3477 get_tok_str(v, NULL));
3479 if (tok == ':') {
3480 next();
3481 bit_size = expr_const();
3482 /* XXX: handle v = 0 case for messages */
3483 if (bit_size < 0)
3484 tcc_error("negative width in bit-field '%s'",
3485 get_tok_str(v, NULL));
3486 if (v && bit_size == 0)
3487 tcc_error("zero width for bit-field '%s'",
3488 get_tok_str(v, NULL));
3490 size = type_size(&type1, &align);
3491 /* Only remember non-default alignment. */
3492 alignoverride = 0;
3493 if (ad1.a.aligned) {
3494 if (align < ad1.a.aligned)
3495 alignoverride = ad1.a.aligned;
3496 } else if (ad1.a.packed || ad->a.packed) {
3497 alignoverride = 1;
3498 } else if (*tcc_state->pack_stack_ptr) {
3499 if (align > *tcc_state->pack_stack_ptr)
3500 alignoverride = *tcc_state->pack_stack_ptr;
3502 if (bit_size >= 0) {
3503 bt = type1.t & VT_BTYPE;
3504 if (bt != VT_INT &&
3505 bt != VT_BYTE &&
3506 bt != VT_SHORT &&
3507 bt != VT_BOOL &&
3508 bt != VT_ENUM &&
3509 bt != VT_LLONG)
3510 tcc_error("bitfields must have scalar type");
3511 bsize = size * 8;
3512 if (bit_size > bsize) {
3513 tcc_error("width of '%s' exceeds its type",
3514 get_tok_str(v, NULL));
3515 } else if (bit_size == bsize) {
3516 /* no need for bit fields */
3517 bit_pos = 0;
3518 } else if (bit_size == 0) {
3519 /* XXX: what to do if only padding in a
3520 structure ? */
3521 /* zero size: means to pad */
3522 bit_pos = 0;
3523 } else {
3524 /* if type change, union, or will overrun
3525 * allignment slot, start at a newly
3526 * alligned slot */
3527 if ((bit_pos + bit_size) > bsize ||
3528 bt != prevbt || a == TOK_UNION)
3529 bit_pos = 0;
3530 /* XXX: handle LSB first */
3531 type1.t |= VT_BITFIELD |
3532 (bit_pos << VT_STRUCT_SHIFT) |
3533 (bit_size << (VT_STRUCT_SHIFT + 6));
3534 /* without ms-bitfields, allocate the
3535 * minimum number of bytes necessary,
3536 * adding single bytes as needed */
3537 if (!tcc_state->ms_bitfields) {
3538 if (bit_pos == 0)
3539 /* minimum bytes for new bitfield */
3540 size = (bit_size + 7) / 8;
3541 else {
3542 /* enough spare bits already allocated? */
3543 int add_size = (bit_pos - 1) % 8 + 1 + bit_size;
3544 if (add_size > 8) /* doesn't fit */
3545 extra_bytes = (add_size - 1) / 8;
3548 bit_pos += bit_size;
3550 prevbt = bt;
3551 } else {
3552 bit_pos = 0;
3554 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3555 /* Remember we've seen a real field to check
3556 for placement of flexible array member. */
3557 c = 1;
3559 if (v == 0 && (type1.t & VT_BTYPE) == VT_STRUCT) {
3560 /* See struct_layout for special casing
3561 anonymous member of struct type. */
3562 v = anon_sym++;
3564 if (v) {
3565 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, extra_bytes);
3566 *ps = ss;
3567 ps = &ss->next;
3569 if (tok == ';' || tok == TOK_EOF)
3570 break;
3571 skip(',');
3573 skip(';');
3575 skip('}');
3576 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3577 parse_attribute(ad);
3578 struct_layout(type, ad);
3583 /* return 1 if basic type is a type size (short, long, long long) */
3584 ST_FUNC int is_btype_size(int bt)
3586 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3589 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3590 are added to the element type, copied because it could be a typedef. */
3591 static void parse_btype_qualify(CType *type, int qualifiers)
3593 while (type->t & VT_ARRAY) {
3594 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3595 type = &type->ref->type;
3597 type->t |= qualifiers;
3600 /* return 0 if no type declaration. otherwise, return the basic type
3601 and skip it.
3603 static int parse_btype(CType *type, AttributeDef *ad)
3605 int t, u, bt_size, complete, type_found, typespec_found;
3606 Sym *s;
3607 CType type1;
3609 memset(ad, 0, sizeof(AttributeDef));
3610 complete = 0;
3611 type_found = 0;
3612 typespec_found = 0;
3613 t = 0;
3614 while(1) {
3615 switch(tok) {
3616 case TOK_EXTENSION:
3617 /* currently, we really ignore extension */
3618 next();
3619 continue;
3621 /* basic types */
3622 case TOK_CHAR:
3623 u = VT_BYTE;
3624 basic_type:
3625 next();
3626 basic_type1:
3627 if (complete)
3628 tcc_error("too many basic types");
3629 t |= u;
3630 bt_size = is_btype_size (u & VT_BTYPE);
3631 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3632 complete = 1;
3633 typespec_found = 1;
3634 break;
3635 case TOK_VOID:
3636 u = VT_VOID;
3637 goto basic_type;
3638 case TOK_SHORT:
3639 u = VT_SHORT;
3640 goto basic_type;
3641 case TOK_INT:
3642 u = VT_INT;
3643 goto basic_type;
3644 case TOK_LONG:
3645 next();
3646 if ((t & VT_BTYPE) == VT_DOUBLE) {
3647 #ifndef TCC_TARGET_PE
3648 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3649 #endif
3650 } else if ((t & VT_BTYPE) == VT_LONG) {
3651 t = (t & ~VT_BTYPE) | VT_LLONG;
3652 } else {
3653 u = VT_LONG;
3654 goto basic_type1;
3656 break;
3657 #ifdef TCC_TARGET_ARM64
3658 case TOK_UINT128:
3659 /* GCC's __uint128_t appears in some Linux header files. Make it a
3660 synonym for long double to get the size and alignment right. */
3661 u = VT_LDOUBLE;
3662 goto basic_type;
3663 #endif
3664 case TOK_BOOL:
3665 u = VT_BOOL;
3666 goto basic_type;
3667 case TOK_FLOAT:
3668 u = VT_FLOAT;
3669 goto basic_type;
3670 case TOK_DOUBLE:
3671 next();
3672 if ((t & VT_BTYPE) == VT_LONG) {
3673 #ifdef TCC_TARGET_PE
3674 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3675 #else
3676 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3677 #endif
3678 } else {
3679 u = VT_DOUBLE;
3680 goto basic_type1;
3682 break;
3683 case TOK_ENUM:
3684 struct_decl(&type1, ad, VT_ENUM);
3685 basic_type2:
3686 u = type1.t;
3687 type->ref = type1.ref;
3688 goto basic_type1;
3689 case TOK_STRUCT:
3690 case TOK_UNION:
3691 struct_decl(&type1, ad, VT_STRUCT);
3692 goto basic_type2;
3694 /* type modifiers */
3695 case TOK_CONST1:
3696 case TOK_CONST2:
3697 case TOK_CONST3:
3698 type->t = t;
3699 parse_btype_qualify(type, VT_CONSTANT);
3700 t = type->t;
3701 next();
3702 break;
3703 case TOK_VOLATILE1:
3704 case TOK_VOLATILE2:
3705 case TOK_VOLATILE3:
3706 type->t = t;
3707 parse_btype_qualify(type, VT_VOLATILE);
3708 t = type->t;
3709 next();
3710 break;
3711 case TOK_SIGNED1:
3712 case TOK_SIGNED2:
3713 case TOK_SIGNED3:
3714 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3715 tcc_error("signed and unsigned modifier");
3716 typespec_found = 1;
3717 t |= VT_DEFSIGN;
3718 next();
3719 break;
3720 case TOK_REGISTER:
3721 case TOK_AUTO:
3722 case TOK_RESTRICT1:
3723 case TOK_RESTRICT2:
3724 case TOK_RESTRICT3:
3725 next();
3726 break;
3727 case TOK_UNSIGNED:
3728 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3729 tcc_error("signed and unsigned modifier");
3730 t |= VT_DEFSIGN | VT_UNSIGNED;
3731 next();
3732 typespec_found = 1;
3733 break;
3735 /* storage */
3736 case TOK_EXTERN:
3737 t |= VT_EXTERN;
3738 next();
3739 break;
3740 case TOK_STATIC:
3741 t |= VT_STATIC;
3742 next();
3743 break;
3744 case TOK_TYPEDEF:
3745 t |= VT_TYPEDEF;
3746 next();
3747 break;
3748 case TOK_INLINE1:
3749 case TOK_INLINE2:
3750 case TOK_INLINE3:
3751 t |= VT_INLINE;
3752 next();
3753 break;
3755 /* GNUC attribute */
3756 case TOK_ATTRIBUTE1:
3757 case TOK_ATTRIBUTE2:
3758 parse_attribute(ad);
3759 if (ad->a.mode) {
3760 u = ad->a.mode -1;
3761 t = (t & ~VT_BTYPE) | u;
3763 break;
3764 /* GNUC typeof */
3765 case TOK_TYPEOF1:
3766 case TOK_TYPEOF2:
3767 case TOK_TYPEOF3:
3768 next();
3769 parse_expr_type(&type1);
3770 /* remove all storage modifiers except typedef */
3771 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3772 goto basic_type2;
3773 default:
3774 if (typespec_found)
3775 goto the_end;
3776 s = sym_find(tok);
3777 if (!s || !(s->type.t & VT_TYPEDEF))
3778 goto the_end;
3780 type->t = ((s->type.t & ~VT_TYPEDEF) |
3781 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3782 type->ref = s->type.ref;
3783 if (t & (VT_CONSTANT | VT_VOLATILE))
3784 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3785 t = type->t;
3787 if (s->r) {
3788 /* get attributes from typedef */
3789 if (0 == ad->a.aligned)
3790 ad->a.aligned = s->a.aligned;
3791 if (0 == ad->a.func_call)
3792 ad->a.func_call = s->a.func_call;
3793 ad->a.packed |= s->a.packed;
3795 next();
3796 typespec_found = 1;
3797 break;
3799 type_found = 1;
3801 the_end:
3802 if (tcc_state->char_is_unsigned) {
3803 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3804 t |= VT_UNSIGNED;
3807 /* long is never used as type */
3808 if ((t & VT_BTYPE) == VT_LONG)
3809 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3810 defined TCC_TARGET_PE
3811 t = (t & ~VT_BTYPE) | VT_INT;
3812 #else
3813 t = (t & ~VT_BTYPE) | VT_LLONG;
3814 #endif
3815 type->t = t;
3816 return type_found;
3819 /* convert a function parameter type (array to pointer and function to
3820 function pointer) */
3821 static inline void convert_parameter_type(CType *pt)
3823 /* remove const and volatile qualifiers (XXX: const could be used
3824 to indicate a const function parameter */
3825 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3826 /* array must be transformed to pointer according to ANSI C */
3827 pt->t &= ~VT_ARRAY;
3828 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3829 mk_pointer(pt);
3833 ST_FUNC void parse_asm_str(CString *astr)
3835 skip('(');
3836 parse_mult_str(astr, "string constant");
3839 /* Parse an asm label and return the token */
3840 static int asm_label_instr(void)
3842 int v;
3843 CString astr;
3845 next();
3846 parse_asm_str(&astr);
3847 skip(')');
3848 #ifdef ASM_DEBUG
3849 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3850 #endif
3851 v = tok_alloc(astr.data, astr.size - 1)->tok;
3852 cstr_free(&astr);
3853 return v;
3856 static void post_type(CType *type, AttributeDef *ad, int storage)
3858 int n, l, t1, arg_size, align;
3859 Sym **plast, *s, *first;
3860 AttributeDef ad1;
3861 CType pt;
3863 if (tok == '(') {
3864 /* function declaration */
3865 next();
3866 l = 0;
3867 first = NULL;
3868 plast = &first;
3869 arg_size = 0;
3870 if (tok != ')') {
3871 for(;;) {
3872 /* read param name and compute offset */
3873 if (l != FUNC_OLD) {
3874 if (!parse_btype(&pt, &ad1)) {
3875 if (l) {
3876 tcc_error("invalid type");
3877 } else {
3878 l = FUNC_OLD;
3879 goto old_proto;
3882 l = FUNC_NEW;
3883 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3884 break;
3885 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3886 if ((pt.t & VT_BTYPE) == VT_VOID)
3887 tcc_error("parameter declared as void");
3888 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3889 } else {
3890 old_proto:
3891 n = tok;
3892 if (n < TOK_UIDENT)
3893 expect("identifier");
3894 pt.t = VT_INT;
3895 next();
3897 convert_parameter_type(&pt);
3898 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3899 *plast = s;
3900 plast = &s->next;
3901 if (tok == ')')
3902 break;
3903 skip(',');
3904 if (l == FUNC_NEW && tok == TOK_DOTS) {
3905 l = FUNC_ELLIPSIS;
3906 next();
3907 break;
3911 /* if no parameters, then old type prototype */
3912 if (l == 0)
3913 l = FUNC_OLD;
3914 skip(')');
3915 /* NOTE: const is ignored in returned type as it has a special
3916 meaning in gcc / C++ */
3917 type->t &= ~VT_CONSTANT;
3918 /* some ancient pre-K&R C allows a function to return an array
3919 and the array brackets to be put after the arguments, such
3920 that "int c()[]" means something like "int[] c()" */
3921 if (tok == '[') {
3922 next();
3923 skip(']'); /* only handle simple "[]" */
3924 type->t |= VT_PTR;
3926 /* we push a anonymous symbol which will contain the function prototype */
3927 ad->a.func_args = arg_size;
3928 s = sym_push(SYM_FIELD, type, 0, l);
3929 s->a = ad->a;
3930 s->next = first;
3931 type->t = VT_FUNC;
3932 type->ref = s;
3933 } else if (tok == '[') {
3934 int saved_nocode_wanted = nocode_wanted;
3935 /* array definition */
3936 next();
3937 if (tok == TOK_RESTRICT1)
3938 next();
3939 n = -1;
3940 t1 = 0;
3941 if (tok != ']') {
3942 if (!local_stack || (storage & VT_STATIC))
3943 vpushi(expr_const());
3944 else {
3945 /* VLAs (which can only happen with local_stack && !VT_STATIC)
3946 length must always be evaluated, even under nocode_wanted,
3947 so that its size slot is initialized (e.g. under sizeof
3948 or typeof). */
3949 nocode_wanted = 0;
3950 gexpr();
3952 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
3953 n = vtop->c.i;
3954 if (n < 0)
3955 tcc_error("invalid array size");
3956 } else {
3957 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
3958 tcc_error("size of variable length array should be an integer");
3959 t1 = VT_VLA;
3962 skip(']');
3963 /* parse next post type */
3964 post_type(type, ad, storage);
3965 if (type->t == VT_FUNC)
3966 tcc_error("declaration of an array of functions");
3967 t1 |= type->t & VT_VLA;
3969 if (t1 & VT_VLA) {
3970 loc -= type_size(&int_type, &align);
3971 loc &= -align;
3972 n = loc;
3974 vla_runtime_type_size(type, &align);
3975 gen_op('*');
3976 vset(&int_type, VT_LOCAL|VT_LVAL, n);
3977 vswap();
3978 vstore();
3980 if (n != -1)
3981 vpop();
3982 nocode_wanted = saved_nocode_wanted;
3984 /* we push an anonymous symbol which will contain the array
3985 element type */
3986 s = sym_push(SYM_FIELD, type, 0, n);
3987 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
3988 type->ref = s;
3992 /* Parse a type declaration (except basic type), and return the type
3993 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3994 expected. 'type' should contain the basic type. 'ad' is the
3995 attribute definition of the basic type. It can be modified by
3996 type_decl().
3998 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4000 Sym *s;
4001 CType type1, *type2;
4002 int qualifiers, storage;
4004 while (tok == '*') {
4005 qualifiers = 0;
4006 redo:
4007 next();
4008 switch(tok) {
4009 case TOK_CONST1:
4010 case TOK_CONST2:
4011 case TOK_CONST3:
4012 qualifiers |= VT_CONSTANT;
4013 goto redo;
4014 case TOK_VOLATILE1:
4015 case TOK_VOLATILE2:
4016 case TOK_VOLATILE3:
4017 qualifiers |= VT_VOLATILE;
4018 goto redo;
4019 case TOK_RESTRICT1:
4020 case TOK_RESTRICT2:
4021 case TOK_RESTRICT3:
4022 goto redo;
4023 /* XXX: clarify attribute handling */
4024 case TOK_ATTRIBUTE1:
4025 case TOK_ATTRIBUTE2:
4026 parse_attribute(ad);
4027 break;
4029 mk_pointer(type);
4030 type->t |= qualifiers;
4033 /* recursive type */
4034 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4035 type1.t = 0; /* XXX: same as int */
4036 if (tok == '(') {
4037 next();
4038 /* XXX: this is not correct to modify 'ad' at this point, but
4039 the syntax is not clear */
4040 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4041 parse_attribute(ad);
4042 type_decl(&type1, ad, v, td);
4043 skip(')');
4044 } else {
4045 /* type identifier */
4046 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4047 *v = tok;
4048 next();
4049 } else {
4050 if (!(td & TYPE_ABSTRACT))
4051 expect("identifier");
4052 *v = 0;
4055 storage = type->t & VT_STORAGE;
4056 type->t &= ~VT_STORAGE;
4057 post_type(type, ad, storage);
4058 type->t |= storage;
4059 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4060 parse_attribute(ad);
4062 if (!type1.t)
4063 return;
4064 /* append type at the end of type1 */
4065 type2 = &type1;
4066 for(;;) {
4067 s = type2->ref;
4068 type2 = &s->type;
4069 if (!type2->t) {
4070 *type2 = *type;
4071 break;
4074 *type = type1;
4077 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4078 ST_FUNC int lvalue_type(int t)
4080 int bt, r;
4081 r = VT_LVAL;
4082 bt = t & VT_BTYPE;
4083 if (bt == VT_BYTE || bt == VT_BOOL)
4084 r |= VT_LVAL_BYTE;
4085 else if (bt == VT_SHORT)
4086 r |= VT_LVAL_SHORT;
4087 else
4088 return r;
4089 if (t & VT_UNSIGNED)
4090 r |= VT_LVAL_UNSIGNED;
4091 return r;
4094 /* indirection with full error checking and bound check */
4095 ST_FUNC void indir(void)
4097 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4098 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4099 return;
4100 expect("pointer");
4102 if ((vtop->r & VT_LVAL) && !nocode_wanted)
4103 gv(RC_INT);
4104 vtop->type = *pointed_type(&vtop->type);
4105 /* Arrays and functions are never lvalues */
4106 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4107 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4108 vtop->r |= lvalue_type(vtop->type.t);
4109 /* if bound checking, the referenced pointer must be checked */
4110 #ifdef CONFIG_TCC_BCHECK
4111 if (tcc_state->do_bounds_check)
4112 vtop->r |= VT_MUSTBOUND;
4113 #endif
4117 /* pass a parameter to a function and do type checking and casting */
4118 static void gfunc_param_typed(Sym *func, Sym *arg)
4120 int func_type;
4121 CType type;
4123 func_type = func->c;
4124 if (func_type == FUNC_OLD ||
4125 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4126 /* default casting : only need to convert float to double */
4127 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4128 type.t = VT_DOUBLE;
4129 gen_cast(&type);
4130 } else if (vtop->type.t & VT_BITFIELD) {
4131 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4132 type.ref = vtop->type.ref;
4133 gen_cast(&type);
4135 } else if (arg == NULL) {
4136 tcc_error("too many arguments to function");
4137 } else {
4138 type = arg->type;
4139 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4140 gen_assign_cast(&type);
4144 /* parse an expression of the form '(type)' or '(expr)' and return its
4145 type */
4146 static void parse_expr_type(CType *type)
4148 int n;
4149 AttributeDef ad;
4151 skip('(');
4152 if (parse_btype(type, &ad)) {
4153 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4154 } else {
4155 expr_type(type);
4157 skip(')');
4160 static void parse_type(CType *type)
4162 AttributeDef ad;
4163 int n;
4165 if (!parse_btype(type, &ad)) {
4166 expect("type");
4168 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4171 static void vpush_tokc(int t)
4173 CType type;
4174 type.t = t;
4175 type.ref = 0;
4176 vsetc(&type, VT_CONST, &tokc);
4179 ST_FUNC void unary(void)
4181 int n, t, align, size, r, sizeof_caller;
4182 CType type;
4183 Sym *s;
4184 AttributeDef ad;
4186 sizeof_caller = in_sizeof;
4187 in_sizeof = 0;
4188 /* XXX: GCC 2.95.3 does not generate a table although it should be
4189 better here */
4190 tok_next:
4191 switch(tok) {
4192 case TOK_EXTENSION:
4193 next();
4194 goto tok_next;
4195 case TOK_CINT:
4196 case TOK_CCHAR:
4197 case TOK_LCHAR:
4198 vpushi(tokc.i);
4199 next();
4200 break;
4201 case TOK_CUINT:
4202 vpush_tokc(VT_INT | VT_UNSIGNED);
4203 next();
4204 break;
4205 case TOK_CLLONG:
4206 vpush_tokc(VT_LLONG);
4207 next();
4208 break;
4209 case TOK_CULLONG:
4210 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4211 next();
4212 break;
4213 case TOK_CFLOAT:
4214 vpush_tokc(VT_FLOAT);
4215 next();
4216 break;
4217 case TOK_CDOUBLE:
4218 vpush_tokc(VT_DOUBLE);
4219 next();
4220 break;
4221 case TOK_CLDOUBLE:
4222 vpush_tokc(VT_LDOUBLE);
4223 next();
4224 break;
4225 case TOK___FUNCTION__:
4226 if (!gnu_ext)
4227 goto tok_identifier;
4228 /* fall thru */
4229 case TOK___FUNC__:
4231 void *ptr;
4232 int len;
4233 /* special function name identifier */
4234 len = strlen(funcname) + 1;
4235 /* generate char[len] type */
4236 type.t = VT_BYTE;
4237 mk_pointer(&type);
4238 type.t |= VT_ARRAY;
4239 type.ref->c = len;
4240 vpush_ref(&type, data_section, data_section->data_offset, len);
4241 ptr = section_ptr_add(data_section, len);
4242 memcpy(ptr, funcname, len);
4243 next();
4245 break;
4246 case TOK_LSTR:
4247 #ifdef TCC_TARGET_PE
4248 t = VT_SHORT | VT_UNSIGNED;
4249 #else
4250 t = VT_INT;
4251 #endif
4252 goto str_init;
4253 case TOK_STR:
4254 /* string parsing */
4255 t = VT_BYTE;
4256 str_init:
4257 if (tcc_state->warn_write_strings)
4258 t |= VT_CONSTANT;
4259 type.t = t;
4260 mk_pointer(&type);
4261 type.t |= VT_ARRAY;
4262 memset(&ad, 0, sizeof(AttributeDef));
4263 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4264 break;
4265 case '(':
4266 next();
4267 /* cast ? */
4268 if (parse_btype(&type, &ad)) {
4269 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4270 skip(')');
4271 /* check ISOC99 compound literal */
4272 if (tok == '{') {
4273 /* data is allocated locally by default */
4274 if (global_expr)
4275 r = VT_CONST;
4276 else
4277 r = VT_LOCAL;
4278 /* all except arrays are lvalues */
4279 if (!(type.t & VT_ARRAY))
4280 r |= lvalue_type(type.t);
4281 memset(&ad, 0, sizeof(AttributeDef));
4282 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4283 } else {
4284 if (sizeof_caller) {
4285 vpush(&type);
4286 return;
4288 unary();
4289 gen_cast(&type);
4291 } else if (tok == '{') {
4292 if (const_wanted)
4293 tcc_error("expected constant");
4294 /* save all registers */
4295 if (!nocode_wanted)
4296 save_regs(0);
4297 /* statement expression : we do not accept break/continue
4298 inside as GCC does */
4299 block(NULL, NULL, 1);
4300 skip(')');
4301 } else {
4302 gexpr();
4303 skip(')');
4305 break;
4306 case '*':
4307 next();
4308 unary();
4309 indir();
4310 break;
4311 case '&':
4312 next();
4313 unary();
4314 /* functions names must be treated as function pointers,
4315 except for unary '&' and sizeof. Since we consider that
4316 functions are not lvalues, we only have to handle it
4317 there and in function calls. */
4318 /* arrays can also be used although they are not lvalues */
4319 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4320 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4321 test_lvalue();
4322 mk_pointer(&vtop->type);
4323 gaddrof();
4324 break;
4325 case '!':
4326 next();
4327 unary();
4328 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4329 CType boolean;
4330 boolean.t = VT_BOOL;
4331 gen_cast(&boolean);
4332 vtop->c.i = !vtop->c.i;
4333 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4334 vtop->c.i ^= 1;
4335 else {
4336 save_regs(1);
4337 vseti(VT_JMP, gvtst(1, 0));
4339 break;
4340 case '~':
4341 next();
4342 unary();
4343 vpushi(-1);
4344 gen_op('^');
4345 break;
4346 case '+':
4347 next();
4348 unary();
4349 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4350 tcc_error("pointer not accepted for unary plus");
4351 /* In order to force cast, we add zero, except for floating point
4352 where we really need an noop (otherwise -0.0 will be transformed
4353 into +0.0). */
4354 if (!is_float(vtop->type.t)) {
4355 vpushi(0);
4356 gen_op('+');
4358 break;
4359 case TOK_SIZEOF:
4360 case TOK_ALIGNOF1:
4361 case TOK_ALIGNOF2:
4362 t = tok;
4363 next();
4364 in_sizeof++;
4365 unary_type(&type); // Perform a in_sizeof = 0;
4366 size = type_size(&type, &align);
4367 if (t == TOK_SIZEOF) {
4368 if (!(type.t & VT_VLA)) {
4369 if (size < 0)
4370 tcc_error("sizeof applied to an incomplete type");
4371 vpushs(size);
4372 } else {
4373 vla_runtime_type_size(&type, &align);
4375 } else {
4376 vpushs(align);
4378 vtop->type.t |= VT_UNSIGNED;
4379 break;
4381 case TOK_builtin_expect:
4383 /* __builtin_expect is a no-op for now */
4384 int saved_nocode_wanted;
4385 next();
4386 skip('(');
4387 expr_eq();
4388 skip(',');
4389 saved_nocode_wanted = nocode_wanted;
4390 nocode_wanted = 1;
4391 expr_lor_const();
4392 vpop();
4393 nocode_wanted = saved_nocode_wanted;
4394 skip(')');
4396 break;
4397 case TOK_builtin_types_compatible_p:
4399 CType type1, type2;
4400 next();
4401 skip('(');
4402 parse_type(&type1);
4403 skip(',');
4404 parse_type(&type2);
4405 skip(')');
4406 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4407 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4408 vpushi(is_compatible_types(&type1, &type2));
4410 break;
4411 case TOK_builtin_choose_expr:
4413 int saved_nocode_wanted;
4414 int64_t c;
4415 next();
4416 skip('(');
4417 c = expr_const64();
4418 skip(',');
4419 if (!c) {
4420 saved_nocode_wanted = nocode_wanted;
4421 nocode_wanted = 1;
4423 expr_eq();
4424 if (!c) {
4425 vpop();
4426 nocode_wanted = saved_nocode_wanted;
4428 skip(',');
4429 if (c) {
4430 saved_nocode_wanted = nocode_wanted;
4431 nocode_wanted = 1;
4433 expr_eq();
4434 if (c) {
4435 vpop();
4436 nocode_wanted = saved_nocode_wanted;
4438 skip(')');
4440 break;
4441 case TOK_builtin_constant_p:
4443 int saved_nocode_wanted, res;
4444 next();
4445 skip('(');
4446 saved_nocode_wanted = nocode_wanted;
4447 nocode_wanted = 1;
4448 gexpr();
4449 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4450 vpop();
4451 nocode_wanted = saved_nocode_wanted;
4452 skip(')');
4453 vpushi(res);
4455 break;
4456 case TOK_builtin_frame_address:
4457 case TOK_builtin_return_address:
4459 int tok1 = tok;
4460 int level;
4461 CType type;
4462 next();
4463 skip('(');
4464 if (tok != TOK_CINT) {
4465 tcc_error("%s only takes positive integers",
4466 tok1 == TOK_builtin_return_address ?
4467 "__builtin_return_address" :
4468 "__builtin_frame_address");
4470 level = (uint32_t)tokc.i;
4471 next();
4472 skip(')');
4473 type.t = VT_VOID;
4474 mk_pointer(&type);
4475 vset(&type, VT_LOCAL, 0); /* local frame */
4476 while (level--) {
4477 mk_pointer(&vtop->type);
4478 indir(); /* -> parent frame */
4480 if (tok1 == TOK_builtin_return_address) {
4481 // assume return address is just above frame pointer on stack
4482 vpushi(PTR_SIZE);
4483 gen_op('+');
4484 mk_pointer(&vtop->type);
4485 indir();
4488 break;
4489 #ifdef TCC_TARGET_X86_64
4490 #ifdef TCC_TARGET_PE
4491 case TOK_builtin_va_start:
4493 next();
4494 skip('(');
4495 expr_eq();
4496 skip(',');
4497 expr_eq();
4498 skip(')');
4499 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4500 tcc_error("__builtin_va_start expects a local variable");
4501 vtop->r &= ~(VT_LVAL | VT_REF);
4502 vtop->type = char_pointer_type;
4503 vtop->c.i += 8;
4504 vstore();
4506 break;
4507 #else
4508 case TOK_builtin_va_arg_types:
4510 CType type;
4511 next();
4512 skip('(');
4513 parse_type(&type);
4514 skip(')');
4515 vpushi(classify_x86_64_va_arg(&type));
4517 break;
4518 #endif
4519 #endif
4521 #ifdef TCC_TARGET_ARM64
4522 case TOK___va_start: {
4523 if (nocode_wanted)
4524 tcc_error("statement in global scope");
4525 next();
4526 skip('(');
4527 expr_eq();
4528 skip(',');
4529 expr_eq();
4530 skip(')');
4531 //xx check types
4532 gen_va_start();
4533 vpushi(0);
4534 vtop->type.t = VT_VOID;
4535 break;
4537 case TOK___va_arg: {
4538 CType type;
4539 if (nocode_wanted)
4540 tcc_error("statement in global scope");
4541 next();
4542 skip('(');
4543 expr_eq();
4544 skip(',');
4545 parse_type(&type);
4546 skip(')');
4547 //xx check types
4548 gen_va_arg(&type);
4549 vtop->type = type;
4550 break;
4552 case TOK___arm64_clear_cache: {
4553 next();
4554 skip('(');
4555 expr_eq();
4556 skip(',');
4557 expr_eq();
4558 skip(')');
4559 gen_clear_cache();
4560 vpushi(0);
4561 vtop->type.t = VT_VOID;
4562 break;
4564 #endif
4565 /* pre operations */
4566 case TOK_INC:
4567 case TOK_DEC:
4568 t = tok;
4569 next();
4570 unary();
4571 inc(0, t);
4572 break;
4573 case '-':
4574 next();
4575 unary();
4576 t = vtop->type.t & VT_BTYPE;
4577 if (is_float(t)) {
4578 /* In IEEE negate(x) isn't subtract(0,x), but rather
4579 subtract(-0, x). */
4580 vpush(&vtop->type);
4581 if (t == VT_FLOAT)
4582 vtop->c.f = -0.0f;
4583 else if (t == VT_DOUBLE)
4584 vtop->c.d = -0.0;
4585 else
4586 vtop->c.ld = -0.0;
4587 } else
4588 vpushi(0);
4589 vswap();
4590 gen_op('-');
4591 break;
4592 case TOK_LAND:
4593 if (!gnu_ext)
4594 goto tok_identifier;
4595 next();
4596 /* allow to take the address of a label */
4597 if (tok < TOK_UIDENT)
4598 expect("label identifier");
4599 s = label_find(tok);
4600 if (!s) {
4601 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4602 } else {
4603 if (s->r == LABEL_DECLARED)
4604 s->r = LABEL_FORWARD;
4606 if (!s->type.t) {
4607 s->type.t = VT_VOID;
4608 mk_pointer(&s->type);
4609 s->type.t |= VT_STATIC;
4611 vpushsym(&s->type, s);
4612 next();
4613 break;
4615 // special qnan , snan and infinity values
4616 case TOK___NAN__:
4617 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4618 next();
4619 break;
4620 case TOK___SNAN__:
4621 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4622 next();
4623 break;
4624 case TOK___INF__:
4625 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4626 next();
4627 break;
4629 default:
4630 tok_identifier:
4631 t = tok;
4632 next();
4633 if (t < TOK_UIDENT)
4634 expect("identifier");
4635 s = sym_find(t);
4636 if (!s) {
4637 const char *name = get_tok_str(t, NULL);
4638 if (tok != '(')
4639 tcc_error("'%s' undeclared", name);
4640 /* for simple function calls, we tolerate undeclared
4641 external reference to int() function */
4642 if (tcc_state->warn_implicit_function_declaration
4643 #ifdef TCC_TARGET_PE
4644 /* people must be warned about using undeclared WINAPI functions
4645 (which usually start with uppercase letter) */
4646 || (name[0] >= 'A' && name[0] <= 'Z')
4647 #endif
4649 tcc_warning("implicit declaration of function '%s'", name);
4650 s = external_global_sym(t, &func_old_type, 0);
4652 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4653 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4654 /* if referencing an inline function, then we generate a
4655 symbol to it if not already done. It will have the
4656 effect to generate code for it at the end of the
4657 compilation unit. Inline function as always
4658 generated in the text section. */
4659 if (!s->c && !nocode_wanted)
4660 put_extern_sym(s, text_section, 0, 0);
4661 r = VT_SYM | VT_CONST;
4662 } else {
4663 r = s->r;
4664 /* A symbol that has a register is a local register variable,
4665 which starts out as VT_LOCAL value. */
4666 if ((r & VT_VALMASK) < VT_CONST)
4667 r = (r & ~VT_VALMASK) | VT_LOCAL;
4669 vset(&s->type, r, s->c);
4670 /* Point to s as backpointer (even without r&VT_SYM).
4671 Will be used by at least the x86 inline asm parser for
4672 regvars. */
4673 vtop->sym = s;
4674 if (vtop->r & VT_SYM) {
4675 vtop->c.i = 0;
4677 break;
4680 /* post operations */
4681 while (1) {
4682 if (tok == TOK_INC || tok == TOK_DEC) {
4683 inc(1, tok);
4684 next();
4685 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4686 int qualifiers;
4687 /* field */
4688 if (tok == TOK_ARROW)
4689 indir();
4690 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4691 test_lvalue();
4692 gaddrof();
4693 /* expect pointer on structure */
4694 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4695 expect("struct or union");
4696 if (tok == TOK_CDOUBLE)
4697 expect("field name");
4698 next();
4699 if (tok == TOK_CINT || tok == TOK_CUINT)
4700 expect("field name");
4701 s = find_field(&vtop->type, tok);
4702 if (!s)
4703 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4704 /* add field offset to pointer */
4705 vtop->type = char_pointer_type; /* change type to 'char *' */
4706 vpushi(s->c);
4707 gen_op('+');
4708 /* change type to field type, and set to lvalue */
4709 vtop->type = s->type;
4710 vtop->type.t |= qualifiers;
4711 /* an array is never an lvalue */
4712 if (!(vtop->type.t & VT_ARRAY)) {
4713 vtop->r |= lvalue_type(vtop->type.t);
4714 #ifdef CONFIG_TCC_BCHECK
4715 /* if bound checking, the referenced pointer must be checked */
4716 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4717 vtop->r |= VT_MUSTBOUND;
4718 #endif
4720 next();
4721 } else if (tok == '[') {
4722 next();
4723 gexpr();
4724 gen_op('+');
4725 indir();
4726 skip(']');
4727 } else if (tok == '(') {
4728 SValue ret;
4729 Sym *sa;
4730 int nb_args, ret_nregs, ret_align, regsize, variadic;
4732 /* function call */
4733 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4734 /* pointer test (no array accepted) */
4735 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4736 vtop->type = *pointed_type(&vtop->type);
4737 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4738 goto error_func;
4739 } else {
4740 error_func:
4741 expect("function pointer");
4743 } else {
4744 vtop->r &= ~VT_LVAL; /* no lvalue */
4746 /* get return type */
4747 s = vtop->type.ref;
4748 next();
4749 sa = s->next; /* first parameter */
4750 nb_args = 0;
4751 ret.r2 = VT_CONST;
4752 /* compute first implicit argument if a structure is returned */
4753 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4754 variadic = (s->c == FUNC_ELLIPSIS);
4755 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4756 &ret_align, &regsize);
4757 if (!ret_nregs) {
4758 /* get some space for the returned structure */
4759 size = type_size(&s->type, &align);
4760 #ifdef TCC_TARGET_ARM64
4761 /* On arm64, a small struct is return in registers.
4762 It is much easier to write it to memory if we know
4763 that we are allowed to write some extra bytes, so
4764 round the allocated space up to a power of 2: */
4765 if (size < 16)
4766 while (size & (size - 1))
4767 size = (size | (size - 1)) + 1;
4768 #endif
4769 loc = (loc - size) & -align;
4770 ret.type = s->type;
4771 ret.r = VT_LOCAL | VT_LVAL;
4772 /* pass it as 'int' to avoid structure arg passing
4773 problems */
4774 vseti(VT_LOCAL, loc);
4775 ret.c = vtop->c;
4776 nb_args++;
4778 } else {
4779 ret_nregs = 1;
4780 ret.type = s->type;
4783 if (ret_nregs) {
4784 /* return in register */
4785 if (is_float(ret.type.t)) {
4786 ret.r = reg_fret(ret.type.t);
4787 #ifdef TCC_TARGET_X86_64
4788 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4789 ret.r2 = REG_QRET;
4790 #endif
4791 } else {
4792 #ifndef TCC_TARGET_ARM64
4793 #ifdef TCC_TARGET_X86_64
4794 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4795 #else
4796 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4797 #endif
4798 ret.r2 = REG_LRET;
4799 #endif
4800 ret.r = REG_IRET;
4802 ret.c.i = 0;
4804 if (tok != ')') {
4805 for(;;) {
4806 expr_eq();
4807 gfunc_param_typed(s, sa);
4808 nb_args++;
4809 if (sa)
4810 sa = sa->next;
4811 if (tok == ')')
4812 break;
4813 skip(',');
4816 if (sa)
4817 tcc_error("too few arguments to function");
4818 skip(')');
4819 if (!nocode_wanted) {
4820 gfunc_call(nb_args);
4821 } else {
4822 vtop -= (nb_args + 1);
4825 /* return value */
4826 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4827 vsetc(&ret.type, r, &ret.c);
4828 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4831 /* handle packed struct return */
4832 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4833 int addr, offset;
4835 size = type_size(&s->type, &align);
4836 /* We're writing whole regs often, make sure there's enough
4837 space. Assume register size is power of 2. */
4838 if (regsize > align)
4839 align = regsize;
4840 loc = (loc - size) & -align;
4841 addr = loc;
4842 offset = 0;
4843 for (;;) {
4844 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4845 vswap();
4846 vstore();
4847 vtop--;
4848 if (--ret_nregs == 0)
4849 break;
4850 offset += regsize;
4852 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4854 } else {
4855 break;
4860 ST_FUNC void expr_prod(void)
4862 int t;
4864 unary();
4865 while (tok == '*' || tok == '/' || tok == '%') {
4866 t = tok;
4867 next();
4868 unary();
4869 gen_op(t);
4873 ST_FUNC void expr_sum(void)
4875 int t;
4877 expr_prod();
4878 while (tok == '+' || tok == '-') {
4879 t = tok;
4880 next();
4881 expr_prod();
4882 gen_op(t);
4886 static void expr_shift(void)
4888 int t;
4890 expr_sum();
4891 while (tok == TOK_SHL || tok == TOK_SAR) {
4892 t = tok;
4893 next();
4894 expr_sum();
4895 gen_op(t);
4899 static void expr_cmp(void)
4901 int t;
4903 expr_shift();
4904 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4905 tok == TOK_ULT || tok == TOK_UGE) {
4906 t = tok;
4907 next();
4908 expr_shift();
4909 gen_op(t);
4913 static void expr_cmpeq(void)
4915 int t;
4917 expr_cmp();
4918 while (tok == TOK_EQ || tok == TOK_NE) {
4919 t = tok;
4920 next();
4921 expr_cmp();
4922 gen_op(t);
4926 static void expr_and(void)
4928 expr_cmpeq();
4929 while (tok == '&') {
4930 next();
4931 expr_cmpeq();
4932 gen_op('&');
4936 static void expr_xor(void)
4938 expr_and();
4939 while (tok == '^') {
4940 next();
4941 expr_and();
4942 gen_op('^');
4946 static void expr_or(void)
4948 expr_xor();
4949 while (tok == '|') {
4950 next();
4951 expr_xor();
4952 gen_op('|');
4956 /* XXX: fix this mess */
4957 static void expr_land_const(void)
4959 expr_or();
4960 while (tok == TOK_LAND) {
4961 next();
4962 expr_or();
4963 gen_op(TOK_LAND);
4966 static void expr_lor_const(void)
4968 expr_land_const();
4969 while (tok == TOK_LOR) {
4970 next();
4971 expr_land_const();
4972 gen_op(TOK_LOR);
4976 static void expr_land(void)
4978 expr_or();
4979 if (tok == TOK_LAND) {
4980 int t = 0;
4981 for(;;) {
4982 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4983 CType ctb;
4984 ctb.t = VT_BOOL;
4985 gen_cast(&ctb);
4986 if (vtop->c.i) {
4987 vpop();
4988 } else {
4989 int saved_nocode_wanted = nocode_wanted;
4990 nocode_wanted = 1;
4991 while (tok == TOK_LAND) {
4992 next();
4993 expr_or();
4994 vpop();
4996 if (t)
4997 gsym(t);
4998 nocode_wanted = saved_nocode_wanted;
4999 gen_cast(&int_type);
5000 break;
5002 } else {
5003 if (!t)
5004 save_regs(1);
5005 t = gvtst(1, t);
5007 if (tok != TOK_LAND) {
5008 if (t)
5009 vseti(VT_JMPI, t);
5010 else
5011 vpushi(1);
5012 break;
5014 next();
5015 expr_or();
5020 static void expr_lor(void)
5022 expr_land();
5023 if (tok == TOK_LOR) {
5024 int t = 0;
5025 for(;;) {
5026 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5027 CType ctb;
5028 ctb.t = VT_BOOL;
5029 gen_cast(&ctb);
5030 if (!vtop->c.i) {
5031 vpop();
5032 } else {
5033 int saved_nocode_wanted = nocode_wanted;
5034 nocode_wanted = 1;
5035 while (tok == TOK_LOR) {
5036 next();
5037 expr_land();
5038 vpop();
5040 if (t)
5041 gsym(t);
5042 nocode_wanted = saved_nocode_wanted;
5043 gen_cast(&int_type);
5044 break;
5046 } else {
5047 if (!t)
5048 save_regs(1);
5049 t = gvtst(0, t);
5051 if (tok != TOK_LOR) {
5052 if (t)
5053 vseti(VT_JMP, t);
5054 else
5055 vpushi(0);
5056 break;
5058 next();
5059 expr_land();
5064 /* Assuming vtop is a value used in a conditional context
5065 (i.e. compared with zero) return 0 if it's false, 1 if
5066 true and -1 if it can't be statically determined. */
5067 static int condition_3way(void)
5069 int c = -1;
5070 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5071 (!(vtop->r & VT_SYM) ||
5072 !(vtop->sym->type.t & VT_WEAK))) {
5073 CType boolean;
5074 boolean.t = VT_BOOL;
5075 vdup();
5076 gen_cast(&boolean);
5077 c = vtop->c.i;
5078 vpop();
5080 return c;
5083 static void expr_cond(void)
5085 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv;
5086 int c;
5087 SValue sv;
5088 CType type, type1, type2;
5090 expr_lor();
5091 if (tok == '?') {
5092 next();
5093 c = condition_3way();
5094 if (c >= 0) {
5095 int saved_nocode_wanted = nocode_wanted;
5096 if (c) {
5097 if (tok != ':' || !gnu_ext) {
5098 vpop();
5099 gexpr();
5101 skip(':');
5102 nocode_wanted = 1;
5103 expr_cond();
5104 vpop();
5105 nocode_wanted = saved_nocode_wanted;
5106 } else {
5107 vpop();
5108 if (tok != ':' || !gnu_ext) {
5109 nocode_wanted = 1;
5110 gexpr();
5111 vpop();
5112 nocode_wanted = saved_nocode_wanted;
5114 skip(':');
5115 expr_cond();
5118 else {
5119 /* XXX This doesn't handle nocode_wanted correctly at all.
5120 It unconditionally calls gv/gvtst and friends. That's
5121 the case for many of the expr_ routines. Currently
5122 that should generate only useless code, but depending
5123 on other operand handling this might also generate
5124 pointer derefs for lvalue conversions whose result
5125 is useless, but nevertheless can lead to segfault.
5127 Somewhen we need to overhaul the whole nocode_wanted
5128 handling. */
5129 if (vtop != vstack) {
5130 /* needed to avoid having different registers saved in
5131 each branch */
5132 if (is_float(vtop->type.t)) {
5133 rc = RC_FLOAT;
5134 #ifdef TCC_TARGET_X86_64
5135 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5136 rc = RC_ST0;
5138 #endif
5140 else
5141 rc = RC_INT;
5142 gv(rc);
5143 save_regs(1);
5145 if (tok == ':' && gnu_ext) {
5146 gv_dup();
5147 tt = gvtst(1, 0);
5148 } else {
5149 tt = gvtst(1, 0);
5150 gexpr();
5152 type1 = vtop->type;
5153 sv = *vtop; /* save value to handle it later */
5154 vtop--; /* no vpop so that FP stack is not flushed */
5155 skip(':');
5156 u = gjmp(0);
5157 gsym(tt);
5158 expr_cond();
5159 type2 = vtop->type;
5161 t1 = type1.t;
5162 bt1 = t1 & VT_BTYPE;
5163 t2 = type2.t;
5164 bt2 = t2 & VT_BTYPE;
5165 /* cast operands to correct type according to ISOC rules */
5166 if (is_float(bt1) || is_float(bt2)) {
5167 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5168 type.t = VT_LDOUBLE;
5169 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5170 type.t = VT_DOUBLE;
5171 } else {
5172 type.t = VT_FLOAT;
5174 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5175 /* cast to biggest op */
5176 type.t = VT_LLONG;
5177 /* convert to unsigned if it does not fit in a long long */
5178 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5179 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5180 type.t |= VT_UNSIGNED;
5181 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5182 /* If one is a null ptr constant the result type
5183 is the other. */
5184 if (is_null_pointer (vtop))
5185 type = type1;
5186 else if (is_null_pointer (&sv))
5187 type = type2;
5188 /* XXX: test pointer compatibility, C99 has more elaborate
5189 rules here. */
5190 else
5191 type = type1;
5192 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5193 /* XXX: test function pointer compatibility */
5194 type = bt1 == VT_FUNC ? type1 : type2;
5195 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5196 /* XXX: test structure compatibility */
5197 type = bt1 == VT_STRUCT ? type1 : type2;
5198 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5199 /* NOTE: as an extension, we accept void on only one side */
5200 type.t = VT_VOID;
5201 } else {
5202 /* integer operations */
5203 type.t = VT_INT;
5204 /* convert to unsigned if it does not fit in an integer */
5205 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5206 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5207 type.t |= VT_UNSIGNED;
5209 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5210 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5211 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5213 /* now we convert second operand */
5214 gen_cast(&type);
5215 if (islv) {
5216 mk_pointer(&vtop->type);
5217 gaddrof();
5219 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5220 gaddrof();
5221 rc = RC_INT;
5222 if (is_float(type.t)) {
5223 rc = RC_FLOAT;
5224 #ifdef TCC_TARGET_X86_64
5225 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5226 rc = RC_ST0;
5228 #endif
5229 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5230 /* for long longs, we use fixed registers to avoid having
5231 to handle a complicated move */
5232 rc = RC_IRET;
5235 r2 = gv(rc);
5236 /* this is horrible, but we must also convert first
5237 operand */
5238 tt = gjmp(0);
5239 gsym(u);
5240 /* put again first value and cast it */
5241 *vtop = sv;
5242 gen_cast(&type);
5243 if (islv) {
5244 mk_pointer(&vtop->type);
5245 gaddrof();
5247 else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5248 gaddrof();
5249 r1 = gv(rc);
5250 move_reg(r2, r1, type.t);
5251 vtop->r = r2;
5252 gsym(tt);
5253 if (islv)
5254 indir();
5259 static void expr_eq(void)
5261 int t;
5263 expr_cond();
5264 if (tok == '=' ||
5265 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5266 tok == TOK_A_XOR || tok == TOK_A_OR ||
5267 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5268 test_lvalue();
5269 t = tok;
5270 next();
5271 if (t == '=') {
5272 expr_eq();
5273 } else {
5274 vdup();
5275 expr_eq();
5276 gen_op(t & 0x7f);
5278 vstore();
5282 ST_FUNC void gexpr(void)
5284 while (1) {
5285 expr_eq();
5286 if (tok != ',')
5287 break;
5288 vpop();
5289 next();
5293 /* parse an expression and return its type without any side effect. */
5294 static void expr_type(CType *type)
5296 int saved_nocode_wanted;
5298 saved_nocode_wanted = nocode_wanted;
5299 nocode_wanted = 1;
5300 gexpr();
5301 *type = vtop->type;
5302 vpop();
5303 nocode_wanted = saved_nocode_wanted;
5306 /* parse a unary expression and return its type without any side
5307 effect. */
5308 static void unary_type(CType *type)
5310 int a;
5312 a = nocode_wanted;
5313 nocode_wanted = 1;
5314 unary();
5315 *type = vtop->type;
5316 vpop();
5317 nocode_wanted = a;
5320 /* parse a constant expression and return value in vtop. */
5321 static void expr_const1(void)
5323 int a;
5324 a = const_wanted;
5325 const_wanted = 1;
5326 expr_cond();
5327 const_wanted = a;
5330 /* parse an integer constant and return its value. */
5331 static inline int64_t expr_const64(void)
5333 int64_t c;
5334 expr_const1();
5335 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5336 expect("constant expression");
5337 c = vtop->c.i;
5338 vpop();
5339 return c;
5342 /* parse an integer constant and return its value.
5343 Complain if it doesn't fit 32bit (signed or unsigned). */
5344 ST_FUNC int expr_const(void)
5346 int c;
5347 int64_t wc = expr_const64();
5348 c = wc;
5349 if (c != wc && (unsigned)c != wc)
5350 tcc_error("constant exceeds 32 bit");
5351 return c;
5354 /* return the label token if current token is a label, otherwise
5355 return zero */
5356 static int is_label(void)
5358 int last_tok;
5360 /* fast test first */
5361 if (tok < TOK_UIDENT)
5362 return 0;
5363 /* no need to save tokc because tok is an identifier */
5364 last_tok = tok;
5365 next();
5366 if (tok == ':') {
5367 next();
5368 return last_tok;
5369 } else {
5370 unget_tok(last_tok);
5371 return 0;
5375 static void label_or_decl(int l)
5377 int last_tok;
5379 /* fast test first */
5380 if (tok >= TOK_UIDENT)
5382 /* no need to save tokc because tok is an identifier */
5383 last_tok = tok;
5384 next();
5385 if (tok == ':') {
5386 unget_tok(last_tok);
5387 return;
5389 unget_tok(last_tok);
5391 decl(l);
5394 static int case_cmp(const void *pa, const void *pb)
5396 int64_t a = (*(struct case_t**) pa)->v1;
5397 int64_t b = (*(struct case_t**) pb)->v1;
5398 return a < b ? -1 : a > b;
5401 static void gcase(struct case_t **base, int len, int *bsym)
5403 struct case_t *p;
5404 int e;
5405 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5406 gv(RC_INT);
5407 while (len > 4) {
5408 /* binary search */
5409 p = base[len/2];
5410 vdup();
5411 if (ll)
5412 vpushll(p->v2);
5413 else
5414 vpushi(p->v2);
5415 gen_op(TOK_LE);
5416 e = gtst(1, 0);
5417 vdup();
5418 if (ll)
5419 vpushll(p->v1);
5420 else
5421 vpushi(p->v1);
5422 gen_op(TOK_GE);
5423 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5424 /* x < v1 */
5425 gcase(base, len/2, bsym);
5426 if (cur_switch->def_sym)
5427 gjmp_addr(cur_switch->def_sym);
5428 else
5429 *bsym = gjmp(*bsym);
5430 /* x > v2 */
5431 gsym(e);
5432 e = len/2 + 1;
5433 base += e; len -= e;
5435 /* linear scan */
5436 while (len--) {
5437 p = *base++;
5438 vdup();
5439 if (ll)
5440 vpushll(p->v2);
5441 else
5442 vpushi(p->v2);
5443 if (p->v1 == p->v2) {
5444 gen_op(TOK_EQ);
5445 gtst_addr(0, p->sym);
5446 } else {
5447 gen_op(TOK_LE);
5448 e = gtst(1, 0);
5449 vdup();
5450 if (ll)
5451 vpushll(p->v1);
5452 else
5453 vpushi(p->v1);
5454 gen_op(TOK_GE);
5455 gtst_addr(0, p->sym);
5456 gsym(e);
5461 static void block(int *bsym, int *csym, int is_expr)
5463 int a, b, c, d, cond;
5464 Sym *s;
5466 /* generate line number info */
5467 if (tcc_state->do_debug &&
5468 (last_line_num != file->line_num || last_ind != ind)) {
5469 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5470 last_ind = ind;
5471 last_line_num = file->line_num;
5474 if (is_expr) {
5475 /* default return value is (void) */
5476 vpushi(0);
5477 vtop->type.t = VT_VOID;
5480 if (tok == TOK_IF) {
5481 /* if test */
5482 int saved_nocode_wanted = nocode_wanted;
5483 next();
5484 skip('(');
5485 gexpr();
5486 skip(')');
5487 cond = condition_3way();
5488 if (cond == 0)
5489 nocode_wanted |= 2;
5490 a = gvtst(1, 0);
5491 block(bsym, csym, 0);
5492 if (cond != 1)
5493 nocode_wanted = saved_nocode_wanted;
5494 c = tok;
5495 if (c == TOK_ELSE) {
5496 next();
5497 if (cond == 1)
5498 nocode_wanted |= 2;
5499 d = gjmp(0);
5500 gsym(a);
5501 block(bsym, csym, 0);
5502 gsym(d); /* patch else jmp */
5503 if (cond != 0)
5504 nocode_wanted = saved_nocode_wanted;
5505 } else
5506 gsym(a);
5507 } else if (tok == TOK_WHILE) {
5508 int saved_nocode_wanted;
5509 nocode_wanted &= ~2;
5510 next();
5511 d = ind;
5512 vla_sp_restore();
5513 skip('(');
5514 gexpr();
5515 skip(')');
5516 a = gvtst(1, 0);
5517 b = 0;
5518 ++local_scope;
5519 saved_nocode_wanted = nocode_wanted;
5520 block(&a, &b, 0);
5521 nocode_wanted = saved_nocode_wanted;
5522 --local_scope;
5523 if(!nocode_wanted)
5524 gjmp_addr(d);
5525 gsym(a);
5526 gsym_addr(b, d);
5527 } else if (tok == '{') {
5528 Sym *llabel;
5529 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5531 next();
5532 /* record local declaration stack position */
5533 s = local_stack;
5534 llabel = local_label_stack;
5535 ++local_scope;
5537 /* handle local labels declarations */
5538 if (tok == TOK_LABEL) {
5539 next();
5540 for(;;) {
5541 if (tok < TOK_UIDENT)
5542 expect("label identifier");
5543 label_push(&local_label_stack, tok, LABEL_DECLARED);
5544 next();
5545 if (tok == ',') {
5546 next();
5547 } else {
5548 skip(';');
5549 break;
5553 while (tok != '}') {
5554 label_or_decl(VT_LOCAL);
5555 if (tok != '}') {
5556 if (is_expr)
5557 vpop();
5558 block(bsym, csym, is_expr);
5561 /* pop locally defined labels */
5562 label_pop(&local_label_stack, llabel);
5563 /* pop locally defined symbols */
5564 --local_scope;
5565 /* In the is_expr case (a statement expression is finished here),
5566 vtop might refer to symbols on the local_stack. Either via the
5567 type or via vtop->sym. We can't pop those nor any that in turn
5568 might be referred to. To make it easier we don't roll back
5569 any symbols in that case; some upper level call to block() will
5570 do that. We do have to remove such symbols from the lookup
5571 tables, though. sym_pop will do that. */
5572 sym_pop(&local_stack, s, is_expr);
5574 /* Pop VLA frames and restore stack pointer if required */
5575 if (vlas_in_scope > saved_vlas_in_scope) {
5576 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5577 vla_sp_restore();
5579 vlas_in_scope = saved_vlas_in_scope;
5581 next();
5582 } else if (tok == TOK_RETURN) {
5583 next();
5584 if (tok != ';') {
5585 gexpr();
5586 gen_assign_cast(&func_vt);
5587 #ifdef TCC_TARGET_ARM64
5588 // Perhaps it would be better to use this for all backends:
5589 greturn();
5590 #else
5591 if ((func_vt.t & VT_BTYPE) == VT_STRUCT) {
5592 CType type, ret_type;
5593 int ret_align, ret_nregs, regsize;
5594 ret_nregs = gfunc_sret(&func_vt, func_var, &ret_type,
5595 &ret_align, &regsize);
5596 if (0 == ret_nregs) {
5597 /* if returning structure, must copy it to implicit
5598 first pointer arg location */
5599 type = func_vt;
5600 mk_pointer(&type);
5601 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5602 indir();
5603 vswap();
5604 /* copy structure value to pointer */
5605 vstore();
5606 } else {
5607 /* returning structure packed into registers */
5608 int r, size, addr, align;
5609 size = type_size(&func_vt,&align);
5610 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5611 (vtop->c.i & (ret_align-1)))
5612 && (align & (ret_align-1))) {
5613 loc = (loc - size) & -ret_align;
5614 addr = loc;
5615 type = func_vt;
5616 vset(&type, VT_LOCAL | VT_LVAL, addr);
5617 vswap();
5618 vstore();
5619 vpop();
5620 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5622 vtop->type = ret_type;
5623 if (is_float(ret_type.t))
5624 r = rc_fret(ret_type.t);
5625 else
5626 r = RC_IRET;
5628 if (ret_nregs == 1)
5629 gv(r);
5630 else {
5631 for (;;) {
5632 vdup();
5633 gv(r);
5634 vpop();
5635 if (--ret_nregs == 0)
5636 break;
5637 /* We assume that when a structure is returned in multiple
5638 registers, their classes are consecutive values of the
5639 suite s(n) = 2^n */
5640 r <<= 1;
5641 vtop->c.i += regsize;
5645 } else if (is_float(func_vt.t)) {
5646 gv(rc_fret(func_vt.t));
5647 } else {
5648 gv(RC_IRET);
5650 #endif
5651 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5653 skip(';');
5654 /* jump unless last stmt in top-level block */
5655 if (tok != '}' || local_scope != 1)
5656 rsym = gjmp(rsym);
5657 nocode_wanted |= 2;
5658 } else if (tok == TOK_BREAK) {
5659 /* compute jump */
5660 if (!bsym)
5661 tcc_error("cannot break");
5662 *bsym = gjmp(*bsym);
5663 next();
5664 skip(';');
5665 nocode_wanted |= 2;
5666 } else if (tok == TOK_CONTINUE) {
5667 /* compute jump */
5668 if (!csym)
5669 tcc_error("cannot continue");
5670 vla_sp_restore_root();
5671 *csym = gjmp(*csym);
5672 next();
5673 skip(';');
5674 } else if (tok == TOK_FOR) {
5675 int e;
5676 int saved_nocode_wanted;
5677 nocode_wanted &= ~2;
5678 next();
5679 skip('(');
5680 s = local_stack;
5681 ++local_scope;
5682 if (tok != ';') {
5683 /* c99 for-loop init decl? */
5684 if (!decl0(VT_LOCAL, 1)) {
5685 /* no, regular for-loop init expr */
5686 gexpr();
5687 vpop();
5690 skip(';');
5691 d = ind;
5692 c = ind;
5693 vla_sp_restore();
5694 a = 0;
5695 b = 0;
5696 if (tok != ';') {
5697 gexpr();
5698 a = gvtst(1, 0);
5700 skip(';');
5701 if (tok != ')') {
5702 e = gjmp(0);
5703 c = ind;
5704 vla_sp_restore();
5705 gexpr();
5706 vpop();
5707 gjmp_addr(d);
5708 gsym(e);
5710 skip(')');
5711 saved_nocode_wanted = nocode_wanted;
5712 block(&a, &b, 0);
5713 nocode_wanted = saved_nocode_wanted;
5714 if(!nocode_wanted)
5715 gjmp_addr(c);
5716 gsym(a);
5717 gsym_addr(b, c);
5718 --local_scope;
5719 sym_pop(&local_stack, s, 0);
5721 } else
5722 if (tok == TOK_DO) {
5723 int saved_nocode_wanted;
5724 nocode_wanted &= ~2;
5725 next();
5726 a = 0;
5727 b = 0;
5728 d = ind;
5729 vla_sp_restore();
5730 saved_nocode_wanted = nocode_wanted;
5731 block(&a, &b, 0);
5732 nocode_wanted = saved_nocode_wanted;
5733 skip(TOK_WHILE);
5734 skip('(');
5735 gsym(b);
5736 gexpr();
5737 c = gvtst(0, 0);
5738 if (!nocode_wanted)
5739 gsym_addr(c, d);
5740 skip(')');
5741 gsym(a);
5742 skip(';');
5743 } else
5744 if (tok == TOK_SWITCH) {
5745 struct switch_t *saved, sw;
5746 int saved_nocode_wanted = nocode_wanted;
5747 SValue switchval;
5748 next();
5749 skip('(');
5750 gexpr();
5751 skip(')');
5752 switchval = *vtop--;
5753 a = 0;
5754 b = gjmp(0); /* jump to first case */
5755 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5756 saved = cur_switch;
5757 cur_switch = &sw;
5758 block(&a, csym, 0);
5759 nocode_wanted = saved_nocode_wanted;
5760 a = gjmp(a); /* add implicit break */
5761 /* case lookup */
5762 gsym(b);
5763 if (!nocode_wanted) {
5764 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5765 for (b = 1; b < sw.n; b++)
5766 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5767 tcc_error("duplicate case value");
5768 /* Our switch table sorting is signed, so the compared
5769 value needs to be as well when it's 64bit. */
5770 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5771 switchval.type.t &= ~VT_UNSIGNED;
5772 vpushv(&switchval);
5773 gcase(sw.p, sw.n, &a);
5774 vpop();
5775 if (sw.def_sym)
5776 gjmp_addr(sw.def_sym);
5778 dynarray_reset(&sw.p, &sw.n);
5779 cur_switch = saved;
5780 /* break label */
5781 gsym(a);
5782 } else
5783 if (tok == TOK_CASE) {
5784 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5785 if (!cur_switch)
5786 expect("switch");
5787 nocode_wanted &= ~2;
5788 next();
5789 cr->v1 = cr->v2 = expr_const64();
5790 if (gnu_ext && tok == TOK_DOTS) {
5791 next();
5792 cr->v2 = expr_const64();
5793 if (cr->v2 < cr->v1)
5794 tcc_warning("empty case range");
5796 cr->sym = ind;
5797 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5798 skip(':');
5799 is_expr = 0;
5800 goto block_after_label;
5801 } else
5802 if (tok == TOK_DEFAULT) {
5803 next();
5804 skip(':');
5805 if (!cur_switch)
5806 expect("switch");
5807 if (cur_switch->def_sym)
5808 tcc_error("too many 'default'");
5809 cur_switch->def_sym = ind;
5810 is_expr = 0;
5811 goto block_after_label;
5812 } else
5813 if (tok == TOK_GOTO) {
5814 next();
5815 if (tok == '*' && gnu_ext) {
5816 /* computed goto */
5817 next();
5818 gexpr();
5819 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5820 expect("pointer");
5821 if (!nocode_wanted)
5822 ggoto();
5823 else
5824 vtop--;
5825 } else if (tok >= TOK_UIDENT) {
5826 s = label_find(tok);
5827 /* put forward definition if needed */
5828 if (!s) {
5829 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5830 } else {
5831 if (s->r == LABEL_DECLARED)
5832 s->r = LABEL_FORWARD;
5834 vla_sp_restore_root();
5835 if (nocode_wanted)
5837 else if (s->r & LABEL_FORWARD)
5838 s->jnext = gjmp(s->jnext);
5839 else
5840 gjmp_addr(s->jnext);
5841 next();
5842 } else {
5843 expect("label identifier");
5845 skip(';');
5846 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5847 asm_instr();
5848 } else {
5849 b = is_label();
5850 if (b) {
5851 /* label case */
5852 s = label_find(b);
5853 if (s) {
5854 if (s->r == LABEL_DEFINED)
5855 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5856 gsym(s->jnext);
5857 s->r = LABEL_DEFINED;
5858 } else {
5859 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5861 s->jnext = ind;
5862 vla_sp_restore();
5863 /* we accept this, but it is a mistake */
5864 block_after_label:
5865 nocode_wanted &= ~2;
5866 if (tok == '}') {
5867 tcc_warning("deprecated use of label at end of compound statement");
5868 } else {
5869 if (is_expr)
5870 vpop();
5871 block(bsym, csym, is_expr);
5873 } else {
5874 /* expression case */
5875 if (tok != ';') {
5876 if (is_expr) {
5877 vpop();
5878 gexpr();
5879 } else {
5880 gexpr();
5881 vpop();
5884 skip(';');
5889 #define EXPR_CONST 1
5890 #define EXPR_ANY 2
5892 static void parse_init_elem(int expr_type)
5894 int saved_global_expr;
5895 switch(expr_type) {
5896 case EXPR_CONST:
5897 /* compound literals must be allocated globally in this case */
5898 saved_global_expr = global_expr;
5899 global_expr = 1;
5900 expr_const1();
5901 global_expr = saved_global_expr;
5902 /* NOTE: symbols are accepted */
5903 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5904 tcc_error("initializer element is not constant");
5905 break;
5906 case EXPR_ANY:
5907 expr_eq();
5908 break;
5912 /* t is the array or struct type. c is the array or struct
5913 address. cur_field is the pointer to the current
5914 value, for arrays the 'c' member contains the current start
5915 index and the 'r' contains the end index (in case of range init).
5916 'size_only' is true if only size info is needed (only used
5917 in arrays) */
5918 static void decl_designator(CType *type, Section *sec, unsigned long c,
5919 Sym **cur_field, int size_only)
5921 Sym *s, *f;
5922 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5923 CType type1;
5925 notfirst = 0;
5926 elem_size = 0;
5927 nb_elems = 1;
5928 if (gnu_ext && (l = is_label()) != 0)
5929 goto struct_field;
5930 while (tok == '[' || tok == '.') {
5931 if (tok == '[') {
5932 if (!(type->t & VT_ARRAY))
5933 expect("array type");
5934 s = type->ref;
5935 next();
5936 index = expr_const();
5937 if (index < 0 || (s->c >= 0 && index >= s->c))
5938 tcc_error("invalid index");
5939 if (tok == TOK_DOTS && gnu_ext) {
5940 next();
5941 index_last = expr_const();
5942 if (index_last < 0 ||
5943 (s->c >= 0 && index_last >= s->c) ||
5944 index_last < index)
5945 tcc_error("invalid index");
5946 } else {
5947 index_last = index;
5949 skip(']');
5950 if (!notfirst) {
5951 (*cur_field)->c = index;
5952 (*cur_field)->r = index_last;
5954 type = pointed_type(type);
5955 elem_size = type_size(type, &align);
5956 c += index * elem_size;
5957 /* NOTE: we only support ranges for last designator */
5958 nb_elems = index_last - index + 1;
5959 if (nb_elems != 1) {
5960 notfirst = 1;
5961 break;
5963 } else {
5964 next();
5965 l = tok;
5966 next();
5967 struct_field:
5968 if ((type->t & VT_BTYPE) != VT_STRUCT)
5969 expect("struct/union type");
5970 f = find_field(type, l);
5971 if (!f)
5972 expect("field");
5973 if (!notfirst)
5974 *cur_field = f;
5975 /* XXX: fix this mess by using explicit storage field */
5976 type1 = f->type;
5977 type1.t |= (type->t & ~VT_TYPE);
5978 type = &type1;
5979 c += f->c;
5981 notfirst = 1;
5983 if (notfirst) {
5984 if (tok == '=') {
5985 next();
5986 } else {
5987 if (!gnu_ext)
5988 expect("=");
5990 } else {
5991 if (type->t & VT_ARRAY) {
5992 index = (*cur_field)->c;
5993 if (type->ref->c >= 0 && index >= type->ref->c)
5994 tcc_error("index too large");
5995 type = pointed_type(type);
5996 c += index * type_size(type, &align);
5997 } else {
5998 f = *cur_field;
5999 if (!f)
6000 tcc_error("too many field init");
6001 /* XXX: fix this mess by using explicit storage field */
6002 type1 = f->type;
6003 type1.t |= (type->t & ~VT_TYPE);
6004 type = &type1;
6005 c += f->c;
6008 decl_initializer(type, sec, c, 0, size_only);
6010 /* XXX: make it more general */
6011 if (!size_only && nb_elems > 1) {
6012 unsigned long c_end;
6013 uint8_t *src, *dst;
6014 int i;
6016 if (!sec) {
6017 vset(type, VT_LOCAL|VT_LVAL, c);
6018 for (i = 1; i < nb_elems; i++) {
6019 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6020 vswap();
6021 vstore();
6023 vpop();
6024 } else {
6025 c_end = c + nb_elems * elem_size;
6026 if (c_end > sec->data_allocated)
6027 section_realloc(sec, c_end);
6028 src = sec->data + c;
6029 dst = src;
6030 for(i = 1; i < nb_elems; i++) {
6031 dst += elem_size;
6032 memcpy(dst, src, elem_size);
6038 /* store a value or an expression directly in global data or in local array */
6039 static void init_putv(CType *type, Section *sec, unsigned long c)
6041 int bt, bit_pos, bit_size;
6042 void *ptr;
6043 unsigned long long bit_mask;
6044 CType dtype;
6046 dtype = *type;
6047 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6049 if (sec) {
6050 int size, align;
6051 /* XXX: not portable */
6052 /* XXX: generate error if incorrect relocation */
6053 gen_assign_cast(&dtype);
6054 bt = type->t & VT_BTYPE;
6055 size = type_size(type, &align);
6056 if (c + size > sec->data_allocated) {
6057 section_realloc(sec, c + size);
6059 ptr = sec->data + c;
6060 /* XXX: make code faster ? */
6061 if (!(type->t & VT_BITFIELD)) {
6062 bit_pos = 0;
6063 bit_size = PTR_SIZE * 8;
6064 bit_mask = -1LL;
6065 } else {
6066 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6067 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6068 bit_mask = (1LL << bit_size) - 1;
6070 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6071 vtop->sym->v >= SYM_FIRST_ANOM &&
6072 /* XXX This rejects compount literals like
6073 '(void *){ptr}'. The problem is that '&sym' is
6074 represented the same way, which would be ruled out
6075 by the SYM_FIRST_ANOM check above, but also '"string"'
6076 in 'char *p = "string"' is represented the same
6077 with the type being VT_PTR and the symbol being an
6078 anonymous one. That is, there's no difference in vtop
6079 between '(void *){x}' and '&(void *){x}'. Ignore
6080 pointer typed entities here. Hopefully no real code
6081 will every use compound literals with scalar type. */
6082 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6083 /* These come from compound literals, memcpy stuff over. */
6084 Section *ssec;
6085 ElfW(Sym) *esym;
6086 ElfW_Rel *rel;
6087 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6088 ssec = tcc_state->sections[esym->st_shndx];
6089 memmove (ptr, ssec->data + esym->st_value, size);
6090 if (ssec->reloc) {
6091 /* We need to copy over all memory contents, and that
6092 includes relocations. Use the fact that relocs are
6093 created it order, so look from the end of relocs
6094 until we hit one before the copied region. */
6095 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6096 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6097 while (num_relocs--) {
6098 rel--;
6099 if (rel->r_offset >= esym->st_value + size)
6100 continue;
6101 if (rel->r_offset < esym->st_value)
6102 break;
6103 /* Note: if the same fields are initialized multiple
6104 times (possible with designators) then we possibly
6105 add multiple relocations for the same offset here.
6106 That would lead to wrong code, the last reloc needs
6107 to win. We clean this up later after the whole
6108 initializer is parsed. */
6109 put_elf_reloca(symtab_section, sec,
6110 c + rel->r_offset - esym->st_value,
6111 ELFW(R_TYPE)(rel->r_info),
6112 ELFW(R_SYM)(rel->r_info),
6113 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6114 rel->r_addend
6115 #else
6117 #endif
6121 } else {
6122 if ((vtop->r & VT_SYM) &&
6123 (bt == VT_BYTE ||
6124 bt == VT_SHORT ||
6125 bt == VT_DOUBLE ||
6126 bt == VT_LDOUBLE ||
6127 #if PTR_SIZE == 8
6128 (bt == VT_LLONG && bit_size != 64) ||
6129 bt == VT_INT
6130 #else
6131 bt == VT_LLONG ||
6132 (bt == VT_INT && bit_size != 32)
6133 #endif
6135 tcc_error("initializer element is not computable at load time");
6136 switch(bt) {
6137 /* XXX: when cross-compiling we assume that each type has the
6138 same representation on host and target, which is likely to
6139 be wrong in the case of long double */
6140 case VT_BOOL:
6141 vtop->c.i = (vtop->c.i != 0);
6142 case VT_BYTE:
6143 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6144 break;
6145 case VT_SHORT:
6146 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6147 break;
6148 case VT_DOUBLE:
6149 *(double *)ptr = vtop->c.d;
6150 break;
6151 case VT_LDOUBLE:
6152 if (sizeof(long double) == LDOUBLE_SIZE)
6153 *(long double *)ptr = vtop->c.ld;
6154 else if (sizeof(double) == LDOUBLE_SIZE)
6155 *(double *)ptr = vtop->c.ld;
6156 else
6157 tcc_error("can't cross compile long double constants");
6158 break;
6159 #if PTR_SIZE != 8
6160 case VT_LLONG:
6161 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6162 break;
6163 #else
6164 case VT_LLONG:
6165 #endif
6166 case VT_PTR:
6168 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6169 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6170 if (vtop->r & VT_SYM)
6171 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6172 else
6173 *(addr_t *)ptr |= val;
6174 #else
6175 if (vtop->r & VT_SYM)
6176 greloc(sec, vtop->sym, c, R_DATA_PTR);
6177 *(addr_t *)ptr |= val;
6178 #endif
6179 break;
6181 default:
6183 int val = (vtop->c.i & bit_mask) << bit_pos;
6184 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6185 if (vtop->r & VT_SYM)
6186 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6187 else
6188 *(int *)ptr |= val;
6189 #else
6190 if (vtop->r & VT_SYM)
6191 greloc(sec, vtop->sym, c, R_DATA_PTR);
6192 *(int *)ptr |= val;
6193 #endif
6194 break;
6198 vtop--;
6199 } else {
6200 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6201 vswap();
6202 vstore();
6203 vpop();
6207 /* put zeros for variable based init */
6208 static void init_putz(Section *sec, unsigned long c, int size)
6210 if (sec) {
6211 /* nothing to do because globals are already set to zero */
6212 } else {
6213 vpush_global_sym(&func_old_type, TOK_memset);
6214 vseti(VT_LOCAL, c);
6215 #ifdef TCC_TARGET_ARM
6216 vpushs(size);
6217 vpushi(0);
6218 #else
6219 vpushi(0);
6220 vpushs(size);
6221 #endif
6222 gfunc_call(3);
6226 /* 't' contains the type and storage info. 'c' is the offset of the
6227 object in section 'sec'. If 'sec' is NULL, it means stack based
6228 allocation. 'first' is true if array '{' must be read (multi
6229 dimension implicit array init handling). 'size_only' is true if
6230 size only evaluation is wanted (only for arrays). */
6231 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6232 int first, int size_only)
6234 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6235 int size1, align1;
6236 int have_elem;
6237 Sym *s, *f;
6238 Sym indexsym;
6239 CType *t1;
6241 /* If we currently are at an '}' or ',' we have read an initializer
6242 element in one of our callers, and not yet consumed it. */
6243 have_elem = tok == '}' || tok == ',';
6244 if (!have_elem && tok != '{' &&
6245 /* In case of strings we have special handling for arrays, so
6246 don't consume them as initializer value (which would commit them
6247 to some anonymous symbol). */
6248 tok != TOK_LSTR && tok != TOK_STR &&
6249 !size_only) {
6250 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6251 have_elem = 1;
6254 if (have_elem &&
6255 !(type->t & VT_ARRAY) &&
6256 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6257 The source type might have VT_CONSTANT set, which is
6258 of course assignable to non-const elements. */
6259 is_compatible_parameter_types(type, &vtop->type)) {
6260 init_putv(type, sec, c);
6261 } else if (type->t & VT_ARRAY) {
6262 s = type->ref;
6263 n = s->c;
6264 array_length = 0;
6265 t1 = pointed_type(type);
6266 size1 = type_size(t1, &align1);
6268 no_oblock = 1;
6269 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6270 tok == '{') {
6271 if (tok != '{')
6272 tcc_error("character array initializer must be a literal,"
6273 " optionally enclosed in braces");
6274 skip('{');
6275 no_oblock = 0;
6278 /* only parse strings here if correct type (otherwise: handle
6279 them as ((w)char *) expressions */
6280 if ((tok == TOK_LSTR &&
6281 #ifdef TCC_TARGET_PE
6282 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6283 #else
6284 (t1->t & VT_BTYPE) == VT_INT
6285 #endif
6286 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6287 while (tok == TOK_STR || tok == TOK_LSTR) {
6288 int cstr_len, ch;
6290 /* compute maximum number of chars wanted */
6291 if (tok == TOK_STR)
6292 cstr_len = tokc.str.size;
6293 else
6294 cstr_len = tokc.str.size / sizeof(nwchar_t);
6295 cstr_len--;
6296 nb = cstr_len;
6297 if (n >= 0 && nb > (n - array_length))
6298 nb = n - array_length;
6299 if (!size_only) {
6300 if (cstr_len > nb)
6301 tcc_warning("initializer-string for array is too long");
6302 /* in order to go faster for common case (char
6303 string in global variable, we handle it
6304 specifically */
6305 if (sec && tok == TOK_STR && size1 == 1) {
6306 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6307 } else {
6308 for(i=0;i<nb;i++) {
6309 if (tok == TOK_STR)
6310 ch = ((unsigned char *)tokc.str.data)[i];
6311 else
6312 ch = ((nwchar_t *)tokc.str.data)[i];
6313 vpushi(ch);
6314 init_putv(t1, sec, c + (array_length + i) * size1);
6318 array_length += nb;
6319 next();
6321 /* only add trailing zero if enough storage (no
6322 warning in this case since it is standard) */
6323 if (n < 0 || array_length < n) {
6324 if (!size_only) {
6325 vpushi(0);
6326 init_putv(t1, sec, c + (array_length * size1));
6328 array_length++;
6330 } else {
6331 indexsym.c = 0;
6332 indexsym.r = 0;
6333 f = &indexsym;
6335 do_init_list:
6336 while (tok != '}' || have_elem) {
6337 decl_designator(type, sec, c, &f, size_only);
6338 have_elem = 0;
6339 index = f->c;
6340 /* must put zero in holes (note that doing it that way
6341 ensures that it even works with designators) */
6342 if (!size_only && array_length < index) {
6343 init_putz(sec, c + array_length * size1,
6344 (index - array_length) * size1);
6346 if (type->t & VT_ARRAY) {
6347 index = indexsym.c = ++indexsym.r;
6348 } else {
6349 index = index + type_size(&f->type, &align1);
6350 if (s->type.t == TOK_UNION)
6351 f = NULL;
6352 else
6353 f = f->next;
6355 if (index > array_length)
6356 array_length = index;
6358 if (type->t & VT_ARRAY) {
6359 /* special test for multi dimensional arrays (may not
6360 be strictly correct if designators are used at the
6361 same time) */
6362 if (no_oblock && index >= n)
6363 break;
6364 } else {
6365 if (no_oblock && f == NULL)
6366 break;
6368 if (tok == '}')
6369 break;
6370 skip(',');
6373 /* put zeros at the end */
6374 if (!size_only && array_length < n) {
6375 init_putz(sec, c + array_length * size1,
6376 (n - array_length) * size1);
6378 if (!no_oblock)
6379 skip('}');
6380 /* patch type size if needed, which happens only for array types */
6381 if (n < 0)
6382 s->c = array_length;
6383 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6384 size1 = 1;
6385 no_oblock = 1;
6386 if (first || tok == '{') {
6387 skip('{');
6388 no_oblock = 0;
6390 s = type->ref;
6391 f = s->next;
6392 array_length = 0;
6393 n = s->c;
6394 goto do_init_list;
6395 } else if (tok == '{') {
6396 next();
6397 decl_initializer(type, sec, c, first, size_only);
6398 skip('}');
6399 } else if (size_only) {
6400 /* If we supported only ISO C we wouldn't have to accept calling
6401 this on anything than an array size_only==1 (and even then
6402 only on the outermost level, so no recursion would be needed),
6403 because initializing a flex array member isn't supported.
6404 But GNU C supports it, so we need to recurse even into
6405 subfields of structs and arrays when size_only is set. */
6406 /* just skip expression */
6407 parlevel = parlevel1 = 0;
6408 while ((parlevel > 0 || parlevel1 > 0 ||
6409 (tok != '}' && tok != ',')) && tok != -1) {
6410 if (tok == '(')
6411 parlevel++;
6412 else if (tok == ')') {
6413 if (parlevel == 0 && parlevel1 == 0)
6414 break;
6415 parlevel--;
6417 else if (tok == '{')
6418 parlevel1++;
6419 else if (tok == '}') {
6420 if (parlevel == 0 && parlevel1 == 0)
6421 break;
6422 parlevel1--;
6424 next();
6426 } else {
6427 if (!have_elem) {
6428 /* This should happen only when we haven't parsed
6429 the init element above for fear of committing a
6430 string constant to memory too early. */
6431 if (tok != TOK_STR && tok != TOK_LSTR)
6432 expect("string constant");
6433 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6435 init_putv(type, sec, c);
6439 /* parse an initializer for type 't' if 'has_init' is non zero, and
6440 allocate space in local or global data space ('r' is either
6441 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6442 variable 'v' of scope 'scope' is declared before initializers
6443 are parsed. If 'v' is zero, then a reference to the new object
6444 is put in the value stack. If 'has_init' is 2, a special parsing
6445 is done to handle string constants. */
6446 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6447 int has_init, int v, int scope)
6449 int size, align, addr, data_offset;
6450 int level;
6451 ParseState saved_parse_state = {0};
6452 TokenString *init_str = NULL;
6453 Section *sec;
6454 Sym *flexible_array;
6456 flexible_array = NULL;
6457 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6458 Sym *field = type->ref->next;
6459 if (field) {
6460 while (field->next)
6461 field = field->next;
6462 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6463 flexible_array = field;
6467 size = type_size(type, &align);
6468 /* If unknown size, we must evaluate it before
6469 evaluating initializers because
6470 initializers can generate global data too
6471 (e.g. string pointers or ISOC99 compound
6472 literals). It also simplifies local
6473 initializers handling */
6474 if (size < 0 || (flexible_array && has_init)) {
6475 if (!has_init)
6476 tcc_error("unknown type size");
6477 /* get all init string */
6478 init_str = tok_str_alloc();
6479 if (has_init == 2) {
6480 /* only get strings */
6481 while (tok == TOK_STR || tok == TOK_LSTR) {
6482 tok_str_add_tok(init_str);
6483 next();
6485 } else {
6486 level = 0;
6487 while (level > 0 || (tok != ',' && tok != ';')) {
6488 if (tok < 0)
6489 tcc_error("unexpected end of file in initializer");
6490 tok_str_add_tok(init_str);
6491 if (tok == '{')
6492 level++;
6493 else if (tok == '}') {
6494 level--;
6495 if (level <= 0) {
6496 next();
6497 break;
6500 next();
6503 tok_str_add(init_str, -1);
6504 tok_str_add(init_str, 0);
6506 /* compute size */
6507 save_parse_state(&saved_parse_state);
6509 begin_macro(init_str, 1);
6510 next();
6511 decl_initializer(type, NULL, 0, 1, 1);
6512 /* prepare second initializer parsing */
6513 macro_ptr = init_str->str;
6514 next();
6516 /* if still unknown size, error */
6517 size = type_size(type, &align);
6518 if (size < 0)
6519 tcc_error("unknown type size");
6521 /* If there's a flex member and it was used in the initializer
6522 adjust size. */
6523 if (flexible_array &&
6524 flexible_array->type.ref->c > 0)
6525 size += flexible_array->type.ref->c
6526 * pointed_size(&flexible_array->type);
6527 /* take into account specified alignment if bigger */
6528 if (ad->a.aligned) {
6529 if (ad->a.aligned > align)
6530 align = ad->a.aligned;
6531 } else if (ad->a.packed) {
6532 align = 1;
6534 if ((r & VT_VALMASK) == VT_LOCAL) {
6535 sec = NULL;
6536 #ifdef CONFIG_TCC_BCHECK
6537 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6538 loc--;
6540 #endif
6541 loc = (loc - size) & -align;
6542 addr = loc;
6543 #ifdef CONFIG_TCC_BCHECK
6544 /* handles bounds */
6545 /* XXX: currently, since we do only one pass, we cannot track
6546 '&' operators, so we add only arrays */
6547 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6548 addr_t *bounds_ptr;
6549 /* add padding between regions */
6550 loc--;
6551 /* then add local bound info */
6552 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6553 bounds_ptr[0] = addr;
6554 bounds_ptr[1] = size;
6556 #endif
6557 if (v) {
6558 /* local variable */
6559 #ifdef CONFIG_TCC_ASM
6560 if (ad->asm_label) {
6561 int reg = asm_parse_regvar(ad->asm_label);
6562 if (reg >= 0)
6563 r = (r & ~VT_VALMASK) | reg;
6565 #endif
6566 sym_push(v, type, r, addr);
6567 } else {
6568 /* push local reference */
6569 vset(type, r, addr);
6571 } else {
6572 Sym *sym;
6574 sym = NULL;
6575 if (v && scope == VT_CONST) {
6576 /* see if the symbol was already defined */
6577 sym = sym_find(v);
6578 if (sym) {
6579 if (!is_compatible_types(&sym->type, type))
6580 tcc_error("incompatible types for redefinition of '%s'",
6581 get_tok_str(v, NULL));
6582 if (sym->type.t & VT_EXTERN) {
6583 /* if the variable is extern, it was not allocated */
6584 sym->type.t &= ~VT_EXTERN;
6585 /* set array size if it was omitted in extern
6586 declaration */
6587 if ((sym->type.t & VT_ARRAY) &&
6588 sym->type.ref->c < 0 &&
6589 type->ref->c >= 0)
6590 sym->type.ref->c = type->ref->c;
6591 } else {
6592 /* we accept several definitions of the same
6593 global variable. this is tricky, because we
6594 must play with the SHN_COMMON type of the symbol */
6595 /* XXX: should check if the variable was already
6596 initialized. It is incorrect to initialized it
6597 twice */
6598 /* no init data, we won't add more to the symbol */
6599 if (!has_init)
6600 goto no_alloc;
6605 /* allocate symbol in corresponding section */
6606 sec = ad->section;
6607 if (!sec) {
6608 if (has_init)
6609 sec = data_section;
6610 else if (tcc_state->nocommon)
6611 sec = bss_section;
6613 if (sec) {
6614 data_offset = sec->data_offset;
6615 data_offset = (data_offset + align - 1) & -align;
6616 addr = data_offset;
6617 /* very important to increment global pointer at this time
6618 because initializers themselves can create new initializers */
6619 data_offset += size;
6620 #ifdef CONFIG_TCC_BCHECK
6621 /* add padding if bound check */
6622 if (tcc_state->do_bounds_check)
6623 data_offset++;
6624 #endif
6625 sec->data_offset = data_offset;
6626 /* allocate section space to put the data */
6627 if (sec->sh_type != SHT_NOBITS &&
6628 data_offset > sec->data_allocated)
6629 section_realloc(sec, data_offset);
6630 /* align section if needed */
6631 if (align > sec->sh_addralign)
6632 sec->sh_addralign = align;
6633 } else {
6634 addr = 0; /* avoid warning */
6637 if (v) {
6638 if (scope != VT_CONST || !sym) {
6639 sym = sym_push(v, type, r | VT_SYM, 0);
6640 sym->asm_label = ad->asm_label;
6642 /* update symbol definition */
6643 if (sec) {
6644 put_extern_sym(sym, sec, addr, size);
6645 } else {
6646 ElfW(Sym) *esym;
6647 /* put a common area */
6648 put_extern_sym(sym, NULL, align, size);
6649 /* XXX: find a nicer way */
6650 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6651 esym->st_shndx = SHN_COMMON;
6653 } else {
6654 /* push global reference */
6655 sym = get_sym_ref(type, sec, addr, size);
6656 vpushsym(type, sym);
6658 /* patch symbol weakness */
6659 if (type->t & VT_WEAK)
6660 weaken_symbol(sym);
6661 apply_visibility(sym, type);
6662 #ifdef CONFIG_TCC_BCHECK
6663 /* handles bounds now because the symbol must be defined
6664 before for the relocation */
6665 if (tcc_state->do_bounds_check) {
6666 addr_t *bounds_ptr;
6668 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6669 /* then add global bound info */
6670 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6671 bounds_ptr[0] = 0; /* relocated */
6672 bounds_ptr[1] = size;
6674 #endif
6676 if (type->t & VT_VLA) {
6677 int a;
6679 /* save current stack pointer */
6680 if (vlas_in_scope == 0) {
6681 if (vla_sp_root_loc == -1)
6682 vla_sp_root_loc = (loc -= PTR_SIZE);
6683 gen_vla_sp_save(vla_sp_root_loc);
6686 vla_runtime_type_size(type, &a);
6687 gen_vla_alloc(type, a);
6688 gen_vla_sp_save(addr);
6689 vla_sp_loc = addr;
6690 vlas_in_scope++;
6691 } else if (has_init) {
6692 size_t oldreloc_offset = 0;
6693 if (sec && sec->reloc)
6694 oldreloc_offset = sec->reloc->data_offset;
6695 decl_initializer(type, sec, addr, 1, 0);
6696 if (sec && sec->reloc)
6697 squeeze_multi_relocs(sec, oldreloc_offset);
6698 /* patch flexible array member size back to -1, */
6699 /* for possible subsequent similar declarations */
6700 if (flexible_array)
6701 flexible_array->type.ref->c = -1;
6703 no_alloc: ;
6704 /* restore parse state if needed */
6705 if (init_str) {
6706 end_macro();
6707 restore_parse_state(&saved_parse_state);
6711 static void put_func_debug(Sym *sym)
6713 char buf[512];
6715 /* stabs info */
6716 /* XXX: we put here a dummy type */
6717 snprintf(buf, sizeof(buf), "%s:%c1",
6718 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6719 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6720 cur_text_section, sym->c);
6721 /* //gr gdb wants a line at the function */
6722 put_stabn(N_SLINE, 0, file->line_num, 0);
6723 last_ind = 0;
6724 last_line_num = 0;
6727 /* parse an old style function declaration list */
6728 /* XXX: check multiple parameter */
6729 static void func_decl_list(Sym *func_sym)
6731 AttributeDef ad;
6732 int v;
6733 Sym *s;
6734 CType btype, type;
6736 /* parse each declaration */
6737 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6738 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6739 if (!parse_btype(&btype, &ad))
6740 expect("declaration list");
6741 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6742 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6743 tok == ';') {
6744 /* we accept no variable after */
6745 } else {
6746 for(;;) {
6747 type = btype;
6748 type_decl(&type, &ad, &v, TYPE_DIRECT);
6749 /* find parameter in function parameter list */
6750 s = func_sym->next;
6751 while (s != NULL) {
6752 if ((s->v & ~SYM_FIELD) == v)
6753 goto found;
6754 s = s->next;
6756 tcc_error("declaration for parameter '%s' but no such parameter",
6757 get_tok_str(v, NULL));
6758 found:
6759 /* check that no storage specifier except 'register' was given */
6760 if (type.t & VT_STORAGE)
6761 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6762 convert_parameter_type(&type);
6763 /* we can add the type (NOTE: it could be local to the function) */
6764 s->type = type;
6765 /* accept other parameters */
6766 if (tok == ',')
6767 next();
6768 else
6769 break;
6772 skip(';');
6776 /* parse a function defined by symbol 'sym' and generate its code in
6777 'cur_text_section' */
6778 static void gen_function(Sym *sym)
6780 int saved_nocode_wanted = nocode_wanted;
6782 nocode_wanted = 0;
6783 ind = cur_text_section->data_offset;
6784 /* NOTE: we patch the symbol size later */
6785 put_extern_sym(sym, cur_text_section, ind, 0);
6786 funcname = get_tok_str(sym->v, NULL);
6787 func_ind = ind;
6788 /* Initialize VLA state */
6789 vla_sp_loc = -1;
6790 vla_sp_root_loc = -1;
6791 /* put debug symbol */
6792 if (tcc_state->do_debug)
6793 put_func_debug(sym);
6795 /* push a dummy symbol to enable local sym storage */
6796 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6797 local_scope = 1; /* for function parameters */
6798 gfunc_prolog(&sym->type);
6799 local_scope = 0;
6801 rsym = 0;
6802 block(NULL, NULL, 0);
6803 gsym(rsym);
6804 gfunc_epilog();
6805 cur_text_section->data_offset = ind;
6806 label_pop(&global_label_stack, NULL);
6807 /* reset local stack */
6808 local_scope = 0;
6809 sym_pop(&local_stack, NULL, 0);
6810 /* end of function */
6811 /* patch symbol size */
6812 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6813 ind - func_ind;
6814 /* patch symbol weakness (this definition overrules any prototype) */
6815 if (sym->type.t & VT_WEAK)
6816 weaken_symbol(sym);
6817 apply_visibility(sym, &sym->type);
6818 if (tcc_state->do_debug) {
6819 put_stabn(N_FUN, 0, 0, ind - func_ind);
6821 /* It's better to crash than to generate wrong code */
6822 cur_text_section = NULL;
6823 funcname = ""; /* for safety */
6824 func_vt.t = VT_VOID; /* for safety */
6825 func_var = 0; /* for safety */
6826 ind = 0; /* for safety */
6827 nocode_wanted = saved_nocode_wanted;
6828 check_vstack();
6831 static void gen_inline_functions(TCCState *s)
6833 Sym *sym;
6834 int inline_generated, i, ln;
6835 struct InlineFunc *fn;
6837 ln = file->line_num;
6838 /* iterate while inline function are referenced */
6839 for(;;) {
6840 inline_generated = 0;
6841 for (i = 0; i < s->nb_inline_fns; ++i) {
6842 fn = s->inline_fns[i];
6843 sym = fn->sym;
6844 if (sym && sym->c) {
6845 /* the function was used: generate its code and
6846 convert it to a normal function */
6847 fn->sym = NULL;
6848 if (file)
6849 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6850 sym->r = VT_SYM | VT_CONST;
6851 sym->type.t &= ~VT_INLINE;
6853 begin_macro(fn->func_str, 1);
6854 next();
6855 cur_text_section = text_section;
6856 gen_function(sym);
6857 end_macro();
6859 inline_generated = 1;
6862 if (!inline_generated)
6863 break;
6865 file->line_num = ln;
6868 ST_FUNC void free_inline_functions(TCCState *s)
6870 int i;
6871 /* free tokens of unused inline functions */
6872 for (i = 0; i < s->nb_inline_fns; ++i) {
6873 struct InlineFunc *fn = s->inline_fns[i];
6874 if (fn->sym)
6875 tok_str_free(fn->func_str);
6877 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6880 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6881 static int decl0(int l, int is_for_loop_init)
6883 int v, has_init, r;
6884 CType type, btype;
6885 Sym *sym;
6886 AttributeDef ad;
6888 while (1) {
6889 if (!parse_btype(&btype, &ad)) {
6890 if (is_for_loop_init)
6891 return 0;
6892 /* skip redundant ';' */
6893 /* XXX: find more elegant solution */
6894 if (tok == ';') {
6895 next();
6896 continue;
6898 if (l == VT_CONST &&
6899 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6900 /* global asm block */
6901 asm_global_instr();
6902 continue;
6904 /* special test for old K&R protos without explicit int
6905 type. Only accepted when defining global data */
6906 if (l == VT_LOCAL || tok < TOK_UIDENT)
6907 break;
6908 btype.t = VT_INT;
6910 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6911 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6912 tok == ';') {
6913 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6914 int v = btype.ref->v;
6915 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6916 tcc_warning("unnamed struct/union that defines no instances");
6918 next();
6919 continue;
6921 while (1) { /* iterate thru each declaration */
6922 type = btype;
6923 /* If the base type itself was an array type of unspecified
6924 size (like in 'typedef int arr[]; arr x = {1};') then
6925 we will overwrite the unknown size by the real one for
6926 this decl. We need to unshare the ref symbol holding
6927 that size. */
6928 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6929 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6931 type_decl(&type, &ad, &v, TYPE_DIRECT);
6932 #if 0
6934 char buf[500];
6935 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6936 printf("type = '%s'\n", buf);
6938 #endif
6939 if ((type.t & VT_BTYPE) == VT_FUNC) {
6940 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6941 tcc_error("function without file scope cannot be static");
6943 /* if old style function prototype, we accept a
6944 declaration list */
6945 sym = type.ref;
6946 if (sym->c == FUNC_OLD)
6947 func_decl_list(sym);
6950 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6951 ad.asm_label = asm_label_instr();
6952 /* parse one last attribute list, after asm label */
6953 parse_attribute(&ad);
6954 if (tok == '{')
6955 expect(";");
6958 if (ad.a.weak)
6959 type.t |= VT_WEAK;
6960 #ifdef TCC_TARGET_PE
6961 if (ad.a.func_import)
6962 type.t |= VT_IMPORT;
6963 if (ad.a.func_export)
6964 type.t |= VT_EXPORT;
6965 #endif
6966 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6968 if (tok == '{') {
6969 if (l == VT_LOCAL)
6970 tcc_error("cannot use local functions");
6971 if ((type.t & VT_BTYPE) != VT_FUNC)
6972 expect("function definition");
6974 /* reject abstract declarators in function definition */
6975 sym = type.ref;
6976 while ((sym = sym->next) != NULL)
6977 if (!(sym->v & ~SYM_FIELD))
6978 expect("identifier");
6980 /* XXX: cannot do better now: convert extern line to static inline */
6981 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
6982 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6984 sym = sym_find(v);
6985 if (sym) {
6986 Sym *ref;
6987 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
6988 goto func_error1;
6990 ref = sym->type.ref;
6991 if (0 == ref->a.func_proto)
6992 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6994 /* use func_call from prototype if not defined */
6995 if (ref->a.func_call != FUNC_CDECL
6996 && type.ref->a.func_call == FUNC_CDECL)
6997 type.ref->a.func_call = ref->a.func_call;
6999 /* use export from prototype */
7000 if (ref->a.func_export)
7001 type.ref->a.func_export = 1;
7003 /* use static from prototype */
7004 if (sym->type.t & VT_STATIC)
7005 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7007 /* If the definition has no visibility use the
7008 one from prototype. */
7009 if (! (type.t & VT_VIS_MASK))
7010 type.t |= sym->type.t & VT_VIS_MASK;
7012 if (!is_compatible_types(&sym->type, &type)) {
7013 func_error1:
7014 tcc_error("incompatible types for redefinition of '%s'",
7015 get_tok_str(v, NULL));
7017 type.ref->a.func_proto = 0;
7018 /* if symbol is already defined, then put complete type */
7019 sym->type = type;
7020 } else {
7021 /* put function symbol */
7022 sym = global_identifier_push(v, type.t, 0);
7023 sym->type.ref = type.ref;
7026 /* static inline functions are just recorded as a kind
7027 of macro. Their code will be emitted at the end of
7028 the compilation unit only if they are used */
7029 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7030 (VT_INLINE | VT_STATIC)) {
7031 int block_level;
7032 struct InlineFunc *fn;
7033 const char *filename;
7035 filename = file ? file->filename : "";
7036 fn = tcc_malloc(sizeof *fn + strlen(filename));
7037 strcpy(fn->filename, filename);
7038 fn->sym = sym;
7039 fn->func_str = tok_str_alloc();
7041 block_level = 0;
7042 for(;;) {
7043 int t;
7044 if (tok == TOK_EOF)
7045 tcc_error("unexpected end of file");
7046 tok_str_add_tok(fn->func_str);
7047 t = tok;
7048 next();
7049 if (t == '{') {
7050 block_level++;
7051 } else if (t == '}') {
7052 block_level--;
7053 if (block_level == 0)
7054 break;
7057 tok_str_add(fn->func_str, -1);
7058 tok_str_add(fn->func_str, 0);
7059 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7061 } else {
7062 /* compute text section */
7063 cur_text_section = ad.section;
7064 if (!cur_text_section)
7065 cur_text_section = text_section;
7066 sym->r = VT_SYM | VT_CONST;
7067 gen_function(sym);
7069 break;
7070 } else {
7071 if (btype.t & VT_TYPEDEF) {
7072 /* save typedefed type */
7073 /* XXX: test storage specifiers ? */
7074 sym = sym_find(v);
7075 if (sym && sym->scope == local_scope) {
7076 if (!is_compatible_types(&sym->type, &type)
7077 || !(sym->type.t & VT_TYPEDEF))
7078 tcc_error("incompatible redefinition of '%s'",
7079 get_tok_str(v, NULL));
7080 sym->type = type;
7081 } else {
7082 sym = sym_push(v, &type, 0, 0);
7084 sym->a = ad.a;
7085 sym->type.t |= VT_TYPEDEF;
7086 } else {
7087 r = 0;
7088 if ((type.t & VT_BTYPE) == VT_FUNC) {
7089 /* external function definition */
7090 /* specific case for func_call attribute */
7091 ad.a.func_proto = 1;
7092 type.ref->a = ad.a;
7093 } else if (!(type.t & VT_ARRAY)) {
7094 /* not lvalue if array */
7095 r |= lvalue_type(type.t);
7097 has_init = (tok == '=');
7098 if (has_init && (type.t & VT_VLA))
7099 tcc_error("variable length array cannot be initialized");
7100 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7101 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7102 !has_init && l == VT_CONST && type.ref->c < 0)) {
7103 /* external variable or function */
7104 /* NOTE: as GCC, uninitialized global static
7105 arrays of null size are considered as
7106 extern */
7107 sym = external_sym(v, &type, r);
7108 sym->asm_label = ad.asm_label;
7110 if (ad.alias_target) {
7111 Section tsec;
7112 ElfW(Sym) *esym;
7113 Sym *alias_target;
7115 alias_target = sym_find(ad.alias_target);
7116 if (!alias_target || !alias_target->c)
7117 tcc_error("unsupported forward __alias__ attribute");
7118 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7119 tsec.sh_num = esym->st_shndx;
7120 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7122 } else {
7123 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7124 if (type.t & VT_STATIC)
7125 r |= VT_CONST;
7126 else
7127 r |= l;
7128 if (has_init)
7129 next();
7130 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7133 if (tok != ',') {
7134 if (is_for_loop_init)
7135 return 1;
7136 skip(';');
7137 break;
7139 next();
7141 ad.a.aligned = 0;
7144 return 0;
7147 ST_FUNC void decl(int l)
7149 decl0(l, 0);
7152 /* ------------------------------------------------------------------------- */