Fix fallthrough of non-entered stmt expressions
[tinycc.git] / tccgen.c
blobc170ff3fd16767ca97a07fcb312ff9c86c3c70f1
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(tcc_state); return t; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 ST_DATA int func_ind;
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
84 static CString initstr;
86 #if PTR_SIZE == 4
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
89 #elif LONG_SIZE == 4
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
92 #else
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
95 #endif
97 static struct switch_t {
98 struct case_t {
99 int64_t v1, v2;
100 int sym;
101 } **p; int n; /* list of case ranges */
102 int def_sym; /* default symbol */
103 int *bsym;
104 struct scope *scope;
105 struct switch_t *prev;
106 SValue sv;
107 } *cur_switch; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 static struct temp_local_variable {
112 int location; //offset on stack. Svalue.c.i
113 short size;
114 short align;
115 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
116 static int nb_temp_local_vars;
118 static struct scope {
119 struct scope *prev;
120 struct { int loc, locorig, num; } vla;
121 struct { Sym *s; int n; } cl;
122 int *bsym, *csym;
123 Sym *lstk, *llstk;
124 } *cur_scope, *loop_scope, *root_scope;
126 typedef struct {
127 Section *sec;
128 int local_offset;
129 Sym *flex_array_ref;
130 } init_params;
132 #if 1
133 #define precedence_parser
134 static void init_prec(void);
135 #endif
137 static void gen_cast(CType *type);
138 static void gen_cast_s(int t);
139 static inline CType *pointed_type(CType *type);
140 static int is_compatible_types(CType *type1, CType *type2);
141 static int parse_btype(CType *type, AttributeDef *ad);
142 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
143 static void parse_expr_type(CType *type);
144 static void init_putv(init_params *p, CType *type, unsigned long c);
145 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
146 static void block(int is_expr);
147 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
148 static void decl(int l);
149 static int decl0(int l, int is_for_loop_init, Sym *);
150 static void expr_eq(void);
151 static void vpush_type_size(CType *type, int *a);
152 static int is_compatible_unqualified_types(CType *type1, CType *type2);
153 static inline int64_t expr_const64(void);
154 static void vpush64(int ty, unsigned long long v);
155 static void vpush(CType *type);
156 static int gvtst(int inv, int t);
157 static void gen_inline_functions(TCCState *s);
158 static void free_inline_functions(TCCState *s);
159 static void skip_or_save_block(TokenString **str);
160 static void gv_dup(void);
161 static int get_temp_local_var(int size,int align);
162 static void clear_temp_local_var_list();
163 static void cast_error(CType *st, CType *dt);
165 /* ------------------------------------------------------------------------- */
167 ST_INLN int is_float(int t)
169 int bt = t & VT_BTYPE;
170 return bt == VT_LDOUBLE
171 || bt == VT_DOUBLE
172 || bt == VT_FLOAT
173 || bt == VT_QFLOAT;
176 static inline int is_integer_btype(int bt)
178 return bt == VT_BYTE
179 || bt == VT_BOOL
180 || bt == VT_SHORT
181 || bt == VT_INT
182 || bt == VT_LLONG;
185 static int btype_size(int bt)
187 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
188 bt == VT_SHORT ? 2 :
189 bt == VT_INT ? 4 :
190 bt == VT_LLONG ? 8 :
191 bt == VT_PTR ? PTR_SIZE : 0;
194 /* returns function return register from type */
195 static int R_RET(int t)
197 if (!is_float(t))
198 return REG_IRET;
199 #ifdef TCC_TARGET_X86_64
200 if ((t & VT_BTYPE) == VT_LDOUBLE)
201 return TREG_ST0;
202 #elif defined TCC_TARGET_RISCV64
203 if ((t & VT_BTYPE) == VT_LDOUBLE)
204 return REG_IRET;
205 #endif
206 return REG_FRET;
209 /* returns 2nd function return register, if any */
210 static int R2_RET(int t)
212 t &= VT_BTYPE;
213 #if PTR_SIZE == 4
214 if (t == VT_LLONG)
215 return REG_IRE2;
216 #elif defined TCC_TARGET_X86_64
217 if (t == VT_QLONG)
218 return REG_IRE2;
219 if (t == VT_QFLOAT)
220 return REG_FRE2;
221 #elif defined TCC_TARGET_RISCV64
222 if (t == VT_LDOUBLE)
223 return REG_IRE2;
224 #endif
225 return VT_CONST;
228 /* returns true for two-word types */
229 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
231 /* put function return registers to stack value */
232 static void PUT_R_RET(SValue *sv, int t)
234 sv->r = R_RET(t), sv->r2 = R2_RET(t);
237 /* returns function return register class for type t */
238 static int RC_RET(int t)
240 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
243 /* returns generic register class for type t */
244 static int RC_TYPE(int t)
246 if (!is_float(t))
247 return RC_INT;
248 #ifdef TCC_TARGET_X86_64
249 if ((t & VT_BTYPE) == VT_LDOUBLE)
250 return RC_ST0;
251 if ((t & VT_BTYPE) == VT_QFLOAT)
252 return RC_FRET;
253 #elif defined TCC_TARGET_RISCV64
254 if ((t & VT_BTYPE) == VT_LDOUBLE)
255 return RC_INT;
256 #endif
257 return RC_FLOAT;
260 /* returns 2nd register class corresponding to t and rc */
261 static int RC2_TYPE(int t, int rc)
263 if (!USING_TWO_WORDS(t))
264 return 0;
265 #ifdef RC_IRE2
266 if (rc == RC_IRET)
267 return RC_IRE2;
268 #endif
269 #ifdef RC_FRE2
270 if (rc == RC_FRET)
271 return RC_FRE2;
272 #endif
273 if (rc & RC_FLOAT)
274 return RC_FLOAT;
275 return RC_INT;
278 /* we use our own 'finite' function to avoid potential problems with
279 non standard math libs */
280 /* XXX: endianness dependent */
281 ST_FUNC int ieee_finite(double d)
283 int p[4];
284 memcpy(p, &d, sizeof(double));
285 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
288 /* compiling intel long double natively */
289 #if (defined __i386__ || defined __x86_64__) \
290 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
291 # define TCC_IS_NATIVE_387
292 #endif
294 ST_FUNC void test_lvalue(void)
296 if (!(vtop->r & VT_LVAL))
297 expect("lvalue");
300 ST_FUNC void check_vstack(void)
302 if (vtop != vstack - 1)
303 tcc_error("internal compiler error: vstack leak (%d)",
304 (int)(vtop - vstack + 1));
307 /* vstack debugging aid */
308 #if 0
309 void pv (const char *lbl, int a, int b)
311 int i;
312 for (i = a; i < a + b; ++i) {
313 SValue *p = &vtop[-i];
314 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
315 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
318 #endif
320 /* ------------------------------------------------------------------------- */
321 /* initialize vstack and types. This must be done also for tcc -E */
322 ST_FUNC void tccgen_init(TCCState *s1)
324 vtop = vstack - 1;
325 memset(vtop, 0, sizeof *vtop);
327 /* define some often used types */
328 int_type.t = VT_INT;
330 char_type.t = VT_BYTE;
331 if (s1->char_is_unsigned)
332 char_type.t |= VT_UNSIGNED;
333 char_pointer_type = char_type;
334 mk_pointer(&char_pointer_type);
336 func_old_type.t = VT_FUNC;
337 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
338 func_old_type.ref->f.func_call = FUNC_CDECL;
339 func_old_type.ref->f.func_type = FUNC_OLD;
340 #ifdef precedence_parser
341 init_prec();
342 #endif
343 cstr_new(&initstr);
346 ST_FUNC int tccgen_compile(TCCState *s1)
348 cur_text_section = NULL;
349 funcname = "";
350 func_ind = -1;
351 anon_sym = SYM_FIRST_ANOM;
352 const_wanted = 0;
353 nocode_wanted = 0x80000000;
354 local_scope = 0;
355 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
357 tcc_debug_start(s1);
358 tcc_tcov_start (s1);
359 #ifdef TCC_TARGET_ARM
360 arm_init(s1);
361 #endif
362 #ifdef INC_DEBUG
363 printf("%s: **** new file\n", file->filename);
364 #endif
365 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
366 next();
367 decl(VT_CONST);
368 gen_inline_functions(s1);
369 check_vstack();
370 /* end of translation unit info */
371 tcc_debug_end(s1);
372 tcc_tcov_end(s1);
373 return 0;
376 ST_FUNC void tccgen_finish(TCCState *s1)
378 cstr_free(&initstr);
379 free_inline_functions(s1);
380 sym_pop(&global_stack, NULL, 0);
381 sym_pop(&local_stack, NULL, 0);
382 /* free preprocessor macros */
383 free_defines(NULL);
384 /* free sym_pools */
385 dynarray_reset(&sym_pools, &nb_sym_pools);
386 sym_free_first = NULL;
389 /* ------------------------------------------------------------------------- */
390 ST_FUNC ElfSym *elfsym(Sym *s)
392 if (!s || !s->c)
393 return NULL;
394 return &((ElfSym *)symtab_section->data)[s->c];
397 /* apply storage attributes to Elf symbol */
398 ST_FUNC void update_storage(Sym *sym)
400 ElfSym *esym;
401 int sym_bind, old_sym_bind;
403 esym = elfsym(sym);
404 if (!esym)
405 return;
407 if (sym->a.visibility)
408 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
409 | sym->a.visibility;
411 if (sym->type.t & (VT_STATIC | VT_INLINE))
412 sym_bind = STB_LOCAL;
413 else if (sym->a.weak)
414 sym_bind = STB_WEAK;
415 else
416 sym_bind = STB_GLOBAL;
417 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
418 if (sym_bind != old_sym_bind) {
419 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
422 #ifdef TCC_TARGET_PE
423 if (sym->a.dllimport)
424 esym->st_other |= ST_PE_IMPORT;
425 if (sym->a.dllexport)
426 esym->st_other |= ST_PE_EXPORT;
427 #endif
429 #if 0
430 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
431 get_tok_str(sym->v, NULL),
432 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
433 sym->a.visibility,
434 sym->a.dllexport,
435 sym->a.dllimport
437 #endif
440 /* ------------------------------------------------------------------------- */
441 /* update sym->c so that it points to an external symbol in section
442 'section' with value 'value' */
444 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
445 addr_t value, unsigned long size,
446 int can_add_underscore)
448 int sym_type, sym_bind, info, other, t;
449 ElfSym *esym;
450 const char *name;
451 char buf1[256];
453 if (!sym->c) {
454 name = get_tok_str(sym->v, NULL);
455 t = sym->type.t;
456 if ((t & VT_BTYPE) == VT_FUNC) {
457 sym_type = STT_FUNC;
458 } else if ((t & VT_BTYPE) == VT_VOID) {
459 sym_type = STT_NOTYPE;
460 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
461 sym_type = STT_FUNC;
462 } else {
463 sym_type = STT_OBJECT;
465 if (t & (VT_STATIC | VT_INLINE))
466 sym_bind = STB_LOCAL;
467 else
468 sym_bind = STB_GLOBAL;
469 other = 0;
471 #ifdef TCC_TARGET_PE
472 if (sym_type == STT_FUNC && sym->type.ref) {
473 Sym *ref = sym->type.ref;
474 if (ref->a.nodecorate) {
475 can_add_underscore = 0;
477 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
478 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
479 name = buf1;
480 other |= ST_PE_STDCALL;
481 can_add_underscore = 0;
484 #endif
486 if (sym->asm_label) {
487 name = get_tok_str(sym->asm_label, NULL);
488 can_add_underscore = 0;
491 if (tcc_state->leading_underscore && can_add_underscore) {
492 buf1[0] = '_';
493 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
494 name = buf1;
497 info = ELFW(ST_INFO)(sym_bind, sym_type);
498 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
500 if (debug_modes)
501 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
503 } else {
504 esym = elfsym(sym);
505 esym->st_value = value;
506 esym->st_size = size;
507 esym->st_shndx = sh_num;
509 update_storage(sym);
512 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
513 addr_t value, unsigned long size)
515 int sh_num = section ? section->sh_num : SHN_UNDEF;
516 put_extern_sym2(sym, sh_num, value, size, 1);
519 /* add a new relocation entry to symbol 'sym' in section 's' */
520 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
521 addr_t addend)
523 int c = 0;
525 if (nocode_wanted && s == cur_text_section)
526 return;
528 if (sym) {
529 if (0 == sym->c)
530 put_extern_sym(sym, NULL, 0, 0);
531 c = sym->c;
534 /* now we can add ELF relocation info */
535 put_elf_reloca(symtab_section, s, offset, type, c, addend);
538 #if PTR_SIZE == 4
539 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
541 greloca(s, sym, offset, type, 0);
543 #endif
545 /* ------------------------------------------------------------------------- */
546 /* symbol allocator */
547 static Sym *__sym_malloc(void)
549 Sym *sym_pool, *sym, *last_sym;
550 int i;
552 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
553 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
555 last_sym = sym_free_first;
556 sym = sym_pool;
557 for(i = 0; i < SYM_POOL_NB; i++) {
558 sym->next = last_sym;
559 last_sym = sym;
560 sym++;
562 sym_free_first = last_sym;
563 return last_sym;
566 static inline Sym *sym_malloc(void)
568 Sym *sym;
569 #ifndef SYM_DEBUG
570 sym = sym_free_first;
571 if (!sym)
572 sym = __sym_malloc();
573 sym_free_first = sym->next;
574 return sym;
575 #else
576 sym = tcc_malloc(sizeof(Sym));
577 return sym;
578 #endif
581 ST_INLN void sym_free(Sym *sym)
583 #ifndef SYM_DEBUG
584 sym->next = sym_free_first;
585 sym_free_first = sym;
586 #else
587 tcc_free(sym);
588 #endif
591 /* push, without hashing */
592 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
594 Sym *s;
596 s = sym_malloc();
597 memset(s, 0, sizeof *s);
598 s->v = v;
599 s->type.t = t;
600 s->c = c;
601 /* add in stack */
602 s->prev = *ps;
603 *ps = s;
604 return s;
607 /* find a symbol and return its associated structure. 's' is the top
608 of the symbol stack */
609 ST_FUNC Sym *sym_find2(Sym *s, int v)
611 while (s) {
612 if (s->v == v)
613 return s;
614 else if (s->v == -1)
615 return NULL;
616 s = s->prev;
618 return NULL;
621 /* structure lookup */
622 ST_INLN Sym *struct_find(int v)
624 v -= TOK_IDENT;
625 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
626 return NULL;
627 return table_ident[v]->sym_struct;
630 /* find an identifier */
631 ST_INLN Sym *sym_find(int v)
633 v -= TOK_IDENT;
634 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
635 return NULL;
636 return table_ident[v]->sym_identifier;
639 static int sym_scope(Sym *s)
641 if (IS_ENUM_VAL (s->type.t))
642 return s->type.ref->sym_scope;
643 else
644 return s->sym_scope;
647 /* push a given symbol on the symbol stack */
648 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
650 Sym *s, **ps;
651 TokenSym *ts;
653 if (local_stack)
654 ps = &local_stack;
655 else
656 ps = &global_stack;
657 s = sym_push2(ps, v, type->t, c);
658 s->type.ref = type->ref;
659 s->r = r;
660 /* don't record fields or anonymous symbols */
661 /* XXX: simplify */
662 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
663 /* record symbol in token array */
664 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
665 if (v & SYM_STRUCT)
666 ps = &ts->sym_struct;
667 else
668 ps = &ts->sym_identifier;
669 s->prev_tok = *ps;
670 *ps = s;
671 s->sym_scope = local_scope;
672 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
673 tcc_error("redeclaration of '%s'",
674 get_tok_str(v & ~SYM_STRUCT, NULL));
676 return s;
679 /* push a global identifier */
680 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
682 Sym *s, **ps;
683 s = sym_push2(&global_stack, v, t, c);
684 s->r = VT_CONST | VT_SYM;
685 /* don't record anonymous symbol */
686 if (v < SYM_FIRST_ANOM) {
687 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
688 /* modify the top most local identifier, so that sym_identifier will
689 point to 's' when popped; happens when called from inline asm */
690 while (*ps != NULL && (*ps)->sym_scope)
691 ps = &(*ps)->prev_tok;
692 s->prev_tok = *ps;
693 *ps = s;
695 return s;
698 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
699 pop them yet from the list, but do remove them from the token array. */
700 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
702 Sym *s, *ss, **ps;
703 TokenSym *ts;
704 int v;
706 s = *ptop;
707 while(s != b) {
708 ss = s->prev;
709 v = s->v;
710 /* remove symbol in token array */
711 /* XXX: simplify */
712 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
713 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
714 if (v & SYM_STRUCT)
715 ps = &ts->sym_struct;
716 else
717 ps = &ts->sym_identifier;
718 *ps = s->prev_tok;
720 if (!keep)
721 sym_free(s);
722 s = ss;
724 if (!keep)
725 *ptop = b;
728 /* ------------------------------------------------------------------------- */
729 static void vcheck_cmp(void)
731 /* cannot let cpu flags if other instruction are generated. Also
732 avoid leaving VT_JMP anywhere except on the top of the stack
733 because it would complicate the code generator.
735 Don't do this when nocode_wanted. vtop might come from
736 !nocode_wanted regions (see 88_codeopt.c) and transforming
737 it to a register without actually generating code is wrong
738 as their value might still be used for real. All values
739 we push under nocode_wanted will eventually be popped
740 again, so that the VT_CMP/VT_JMP value will be in vtop
741 when code is unsuppressed again. */
743 if (vtop->r == VT_CMP && !nocode_wanted)
744 gv(RC_INT);
747 static void vsetc(CType *type, int r, CValue *vc)
749 if (vtop >= vstack + (VSTACK_SIZE - 1))
750 tcc_error("memory full (vstack)");
751 vcheck_cmp();
752 vtop++;
753 vtop->type = *type;
754 vtop->r = r;
755 vtop->r2 = VT_CONST;
756 vtop->c = *vc;
757 vtop->sym = NULL;
760 ST_FUNC void vswap(void)
762 SValue tmp;
764 vcheck_cmp();
765 tmp = vtop[0];
766 vtop[0] = vtop[-1];
767 vtop[-1] = tmp;
770 /* pop stack value */
771 ST_FUNC void vpop(void)
773 int v;
774 v = vtop->r & VT_VALMASK;
775 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
776 /* for x86, we need to pop the FP stack */
777 if (v == TREG_ST0) {
778 o(0xd8dd); /* fstp %st(0) */
779 } else
780 #endif
781 if (v == VT_CMP) {
782 /* need to put correct jump if && or || without test */
783 gsym(vtop->jtrue);
784 gsym(vtop->jfalse);
786 vtop--;
789 /* push constant of type "type" with useless value */
790 static void vpush(CType *type)
792 vset(type, VT_CONST, 0);
795 /* push arbitrary 64bit constant */
796 static void vpush64(int ty, unsigned long long v)
798 CValue cval;
799 CType ctype;
800 ctype.t = ty;
801 ctype.ref = NULL;
802 cval.i = v;
803 vsetc(&ctype, VT_CONST, &cval);
806 /* push integer constant */
807 ST_FUNC void vpushi(int v)
809 vpush64(VT_INT, v);
812 /* push a pointer sized constant */
813 static void vpushs(addr_t v)
815 vpush64(VT_SIZE_T, v);
818 /* push long long constant */
819 static inline void vpushll(long long v)
821 vpush64(VT_LLONG, v);
824 ST_FUNC void vset(CType *type, int r, int v)
826 CValue cval;
827 cval.i = v;
828 vsetc(type, r, &cval);
831 static void vseti(int r, int v)
833 CType type;
834 type.t = VT_INT;
835 type.ref = NULL;
836 vset(&type, r, v);
839 ST_FUNC void vpushv(SValue *v)
841 if (vtop >= vstack + (VSTACK_SIZE - 1))
842 tcc_error("memory full (vstack)");
843 vtop++;
844 *vtop = *v;
847 static void vdup(void)
849 vpushv(vtop);
852 /* rotate n first stack elements to the bottom
853 I1 ... In -> I2 ... In I1 [top is right]
855 ST_FUNC void vrotb(int n)
857 int i;
858 SValue tmp;
860 vcheck_cmp();
861 tmp = vtop[-n + 1];
862 for(i=-n+1;i!=0;i++)
863 vtop[i] = vtop[i+1];
864 vtop[0] = tmp;
867 /* rotate the n elements before entry e towards the top
868 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
870 ST_FUNC void vrote(SValue *e, int n)
872 int i;
873 SValue tmp;
875 vcheck_cmp();
876 tmp = *e;
877 for(i = 0;i < n - 1; i++)
878 e[-i] = e[-i - 1];
879 e[-n + 1] = tmp;
882 /* rotate n first stack elements to the top
883 I1 ... In -> In I1 ... I(n-1) [top is right]
885 ST_FUNC void vrott(int n)
887 vrote(vtop, n);
890 /* ------------------------------------------------------------------------- */
891 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
893 /* called from generators to set the result from relational ops */
894 ST_FUNC void vset_VT_CMP(int op)
896 vtop->r = VT_CMP;
897 vtop->cmp_op = op;
898 vtop->jfalse = 0;
899 vtop->jtrue = 0;
902 /* called once before asking generators to load VT_CMP to a register */
903 static void vset_VT_JMP(void)
905 int op = vtop->cmp_op;
907 if (vtop->jtrue || vtop->jfalse) {
908 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
909 int inv = op & (op < 2); /* small optimization */
910 vseti(VT_JMP+inv, gvtst(inv, 0));
911 } else {
912 /* otherwise convert flags (rsp. 0/1) to register */
913 vtop->c.i = op;
914 if (op < 2) /* doesn't seem to happen */
915 vtop->r = VT_CONST;
919 /* Set CPU Flags, doesn't yet jump */
920 static void gvtst_set(int inv, int t)
922 int *p;
924 if (vtop->r != VT_CMP) {
925 vpushi(0);
926 gen_op(TOK_NE);
927 if (vtop->r != VT_CMP) /* must be VT_CONST then */
928 vset_VT_CMP(vtop->c.i != 0);
931 p = inv ? &vtop->jfalse : &vtop->jtrue;
932 *p = gjmp_append(*p, t);
935 /* Generate value test
937 * Generate a test for any value (jump, comparison and integers) */
938 static int gvtst(int inv, int t)
940 int op, x, u;
942 gvtst_set(inv, t);
943 t = vtop->jtrue, u = vtop->jfalse;
944 if (inv)
945 x = u, u = t, t = x;
946 op = vtop->cmp_op;
948 /* jump to the wanted target */
949 if (op > 1)
950 t = gjmp_cond(op ^ inv, t);
951 else if (op != inv)
952 t = gjmp(t);
953 /* resolve complementary jumps to here */
954 gsym(u);
956 vtop--;
957 return t;
960 /* generate a zero or nozero test */
961 static void gen_test_zero(int op)
963 if (vtop->r == VT_CMP) {
964 int j;
965 if (op == TOK_EQ) {
966 j = vtop->jfalse;
967 vtop->jfalse = vtop->jtrue;
968 vtop->jtrue = j;
969 vtop->cmp_op ^= 1;
971 } else {
972 vpushi(0);
973 gen_op(op);
977 /* ------------------------------------------------------------------------- */
978 /* push a symbol value of TYPE */
979 ST_FUNC void vpushsym(CType *type, Sym *sym)
981 CValue cval;
982 cval.i = 0;
983 vsetc(type, VT_CONST | VT_SYM, &cval);
984 vtop->sym = sym;
987 /* Return a static symbol pointing to a section */
988 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
990 int v;
991 Sym *sym;
993 v = anon_sym++;
994 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
995 sym->type.t |= VT_STATIC;
996 put_extern_sym(sym, sec, offset, size);
997 return sym;
1000 /* push a reference to a section offset by adding a dummy symbol */
1001 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1003 vpushsym(type, get_sym_ref(type, sec, offset, size));
1006 /* define a new external reference to a symbol 'v' of type 'u' */
1007 ST_FUNC Sym *external_global_sym(int v, CType *type)
1009 Sym *s;
1011 s = sym_find(v);
1012 if (!s) {
1013 /* push forward reference */
1014 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1015 s->type.ref = type->ref;
1016 } else if (IS_ASM_SYM(s)) {
1017 s->type.t = type->t | (s->type.t & VT_EXTERN);
1018 s->type.ref = type->ref;
1019 update_storage(s);
1021 return s;
1024 /* create an external reference with no specific type similar to asm labels.
1025 This avoids type conflicts if the symbol is used from C too */
1026 ST_FUNC Sym *external_helper_sym(int v)
1028 CType ct = { VT_ASM_FUNC, NULL };
1029 return external_global_sym(v, &ct);
1032 /* push a reference to an helper function (such as memmove) */
1033 ST_FUNC void vpush_helper_func(int v)
1035 vpushsym(&func_old_type, external_helper_sym(v));
1038 /* Merge symbol attributes. */
1039 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1041 if (sa1->aligned && !sa->aligned)
1042 sa->aligned = sa1->aligned;
1043 sa->packed |= sa1->packed;
1044 sa->weak |= sa1->weak;
1045 if (sa1->visibility != STV_DEFAULT) {
1046 int vis = sa->visibility;
1047 if (vis == STV_DEFAULT
1048 || vis > sa1->visibility)
1049 vis = sa1->visibility;
1050 sa->visibility = vis;
1052 sa->dllexport |= sa1->dllexport;
1053 sa->nodecorate |= sa1->nodecorate;
1054 sa->dllimport |= sa1->dllimport;
1057 /* Merge function attributes. */
1058 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1060 if (fa1->func_call && !fa->func_call)
1061 fa->func_call = fa1->func_call;
1062 if (fa1->func_type && !fa->func_type)
1063 fa->func_type = fa1->func_type;
1064 if (fa1->func_args && !fa->func_args)
1065 fa->func_args = fa1->func_args;
1066 if (fa1->func_noreturn)
1067 fa->func_noreturn = 1;
1068 if (fa1->func_ctor)
1069 fa->func_ctor = 1;
1070 if (fa1->func_dtor)
1071 fa->func_dtor = 1;
1074 /* Merge attributes. */
1075 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1077 merge_symattr(&ad->a, &ad1->a);
1078 merge_funcattr(&ad->f, &ad1->f);
1080 if (ad1->section)
1081 ad->section = ad1->section;
1082 if (ad1->alias_target)
1083 ad->alias_target = ad1->alias_target;
1084 if (ad1->asm_label)
1085 ad->asm_label = ad1->asm_label;
1086 if (ad1->attr_mode)
1087 ad->attr_mode = ad1->attr_mode;
1090 /* Merge some type attributes. */
1091 static void patch_type(Sym *sym, CType *type)
1093 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1094 if (!(sym->type.t & VT_EXTERN))
1095 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1096 sym->type.t &= ~VT_EXTERN;
1099 if (IS_ASM_SYM(sym)) {
1100 /* stay static if both are static */
1101 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1102 sym->type.ref = type->ref;
1105 if (!is_compatible_types(&sym->type, type)) {
1106 tcc_error("incompatible types for redefinition of '%s'",
1107 get_tok_str(sym->v, NULL));
1109 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1110 int static_proto = sym->type.t & VT_STATIC;
1111 /* warn if static follows non-static function declaration */
1112 if ((type->t & VT_STATIC) && !static_proto
1113 /* XXX this test for inline shouldn't be here. Until we
1114 implement gnu-inline mode again it silences a warning for
1115 mingw caused by our workarounds. */
1116 && !((type->t | sym->type.t) & VT_INLINE))
1117 tcc_warning("static storage ignored for redefinition of '%s'",
1118 get_tok_str(sym->v, NULL));
1120 /* set 'inline' if both agree or if one has static */
1121 if ((type->t | sym->type.t) & VT_INLINE) {
1122 if (!((type->t ^ sym->type.t) & VT_INLINE)
1123 || ((type->t | sym->type.t) & VT_STATIC))
1124 static_proto |= VT_INLINE;
1127 if (0 == (type->t & VT_EXTERN)) {
1128 struct FuncAttr f = sym->type.ref->f;
1129 /* put complete type, use static from prototype */
1130 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1131 sym->type.ref = type->ref;
1132 merge_funcattr(&sym->type.ref->f, &f);
1133 } else {
1134 sym->type.t &= ~VT_INLINE | static_proto;
1137 if (sym->type.ref->f.func_type == FUNC_OLD
1138 && type->ref->f.func_type != FUNC_OLD) {
1139 sym->type.ref = type->ref;
1142 } else {
1143 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1144 /* set array size if it was omitted in extern declaration */
1145 sym->type.ref->c = type->ref->c;
1147 if ((type->t ^ sym->type.t) & VT_STATIC)
1148 tcc_warning("storage mismatch for redefinition of '%s'",
1149 get_tok_str(sym->v, NULL));
1153 /* Merge some storage attributes. */
1154 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1156 if (type)
1157 patch_type(sym, type);
1159 #ifdef TCC_TARGET_PE
1160 if (sym->a.dllimport != ad->a.dllimport)
1161 tcc_error("incompatible dll linkage for redefinition of '%s'",
1162 get_tok_str(sym->v, NULL));
1163 #endif
1164 merge_symattr(&sym->a, &ad->a);
1165 if (ad->asm_label)
1166 sym->asm_label = ad->asm_label;
1167 update_storage(sym);
1170 /* copy sym to other stack */
1171 static Sym *sym_copy(Sym *s0, Sym **ps)
1173 Sym *s;
1174 s = sym_malloc(), *s = *s0;
1175 s->prev = *ps, *ps = s;
1176 if (s->v < SYM_FIRST_ANOM) {
1177 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1178 s->prev_tok = *ps, *ps = s;
1180 return s;
1183 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1184 static void sym_copy_ref(Sym *s, Sym **ps)
1186 int bt = s->type.t & VT_BTYPE;
1187 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1188 Sym **sp = &s->type.ref;
1189 for (s = *sp, *sp = NULL; s; s = s->next) {
1190 Sym *s2 = sym_copy(s, ps);
1191 sp = &(*sp = s2)->next;
1192 sym_copy_ref(s2, ps);
1197 /* define a new external reference to a symbol 'v' */
1198 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1200 Sym *s;
1202 /* look for global symbol */
1203 s = sym_find(v);
1204 while (s && s->sym_scope)
1205 s = s->prev_tok;
1207 if (!s) {
1208 /* push forward reference */
1209 s = global_identifier_push(v, type->t, 0);
1210 s->r |= r;
1211 s->a = ad->a;
1212 s->asm_label = ad->asm_label;
1213 s->type.ref = type->ref;
1214 /* copy type to the global stack */
1215 if (local_stack)
1216 sym_copy_ref(s, &global_stack);
1217 } else {
1218 patch_storage(s, ad, type);
1220 /* push variables on local_stack if any */
1221 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1222 s = sym_copy(s, &local_stack);
1223 return s;
1226 /* save registers up to (vtop - n) stack entry */
1227 ST_FUNC void save_regs(int n)
1229 SValue *p, *p1;
1230 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1231 save_reg(p->r);
1234 /* save r to the memory stack, and mark it as being free */
1235 ST_FUNC void save_reg(int r)
1237 save_reg_upstack(r, 0);
1240 /* save r to the memory stack, and mark it as being free,
1241 if seen up to (vtop - n) stack entry */
1242 ST_FUNC void save_reg_upstack(int r, int n)
1244 int l, size, align, bt;
1245 SValue *p, *p1, sv;
1247 if ((r &= VT_VALMASK) >= VT_CONST)
1248 return;
1249 if (nocode_wanted)
1250 return;
1251 l = 0;
1252 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1253 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1254 /* must save value on stack if not already done */
1255 if (!l) {
1256 bt = p->type.t & VT_BTYPE;
1257 if (bt == VT_VOID)
1258 continue;
1259 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1260 bt = VT_PTR;
1261 sv.type.t = bt;
1262 size = type_size(&sv.type, &align);
1263 l = get_temp_local_var(size,align);
1264 sv.r = VT_LOCAL | VT_LVAL;
1265 sv.c.i = l;
1266 store(p->r & VT_VALMASK, &sv);
1267 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1268 /* x86 specific: need to pop fp register ST0 if saved */
1269 if (r == TREG_ST0) {
1270 o(0xd8dd); /* fstp %st(0) */
1272 #endif
1273 /* special long long case */
1274 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1275 sv.c.i += PTR_SIZE;
1276 store(p->r2, &sv);
1279 /* mark that stack entry as being saved on the stack */
1280 if (p->r & VT_LVAL) {
1281 /* also clear the bounded flag because the
1282 relocation address of the function was stored in
1283 p->c.i */
1284 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1285 } else {
1286 p->r = VT_LVAL | VT_LOCAL;
1288 p->sym = NULL;
1289 p->r2 = VT_CONST;
1290 p->c.i = l;
1295 #ifdef TCC_TARGET_ARM
1296 /* find a register of class 'rc2' with at most one reference on stack.
1297 * If none, call get_reg(rc) */
1298 ST_FUNC int get_reg_ex(int rc, int rc2)
1300 int r;
1301 SValue *p;
1303 for(r=0;r<NB_REGS;r++) {
1304 if (reg_classes[r] & rc2) {
1305 int n;
1306 n=0;
1307 for(p = vstack; p <= vtop; p++) {
1308 if ((p->r & VT_VALMASK) == r ||
1309 p->r2 == r)
1310 n++;
1312 if (n <= 1)
1313 return r;
1316 return get_reg(rc);
1318 #endif
1320 /* find a free register of class 'rc'. If none, save one register */
1321 ST_FUNC int get_reg(int rc)
1323 int r;
1324 SValue *p;
1326 /* find a free register */
1327 for(r=0;r<NB_REGS;r++) {
1328 if (reg_classes[r] & rc) {
1329 if (nocode_wanted)
1330 return r;
1331 for(p=vstack;p<=vtop;p++) {
1332 if ((p->r & VT_VALMASK) == r ||
1333 p->r2 == r)
1334 goto notfound;
1336 return r;
1338 notfound: ;
1341 /* no register left : free the first one on the stack (VERY
1342 IMPORTANT to start from the bottom to ensure that we don't
1343 spill registers used in gen_opi()) */
1344 for(p=vstack;p<=vtop;p++) {
1345 /* look at second register (if long long) */
1346 r = p->r2;
1347 if (r < VT_CONST && (reg_classes[r] & rc))
1348 goto save_found;
1349 r = p->r & VT_VALMASK;
1350 if (r < VT_CONST && (reg_classes[r] & rc)) {
1351 save_found:
1352 save_reg(r);
1353 return r;
1356 /* Should never comes here */
1357 return -1;
1360 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1361 static int get_temp_local_var(int size,int align){
1362 int i;
1363 struct temp_local_variable *temp_var;
1364 int found_var;
1365 SValue *p;
1366 int r;
1367 char free;
1368 char found;
1369 found=0;
1370 for(i=0;i<nb_temp_local_vars;i++){
1371 temp_var=&arr_temp_local_vars[i];
1372 if(temp_var->size<size||align!=temp_var->align){
1373 continue;
1375 /*check if temp_var is free*/
1376 free=1;
1377 for(p=vstack;p<=vtop;p++) {
1378 r=p->r&VT_VALMASK;
1379 if(r==VT_LOCAL||r==VT_LLOCAL){
1380 if(p->c.i==temp_var->location){
1381 free=0;
1382 break;
1386 if(free){
1387 found_var=temp_var->location;
1388 found=1;
1389 break;
1392 if(!found){
1393 loc = (loc - size) & -align;
1394 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1395 temp_var=&arr_temp_local_vars[i];
1396 temp_var->location=loc;
1397 temp_var->size=size;
1398 temp_var->align=align;
1399 nb_temp_local_vars++;
1401 found_var=loc;
1403 return found_var;
1406 static void clear_temp_local_var_list(){
1407 nb_temp_local_vars=0;
1410 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1411 if needed */
1412 static void move_reg(int r, int s, int t)
1414 SValue sv;
1416 if (r != s) {
1417 save_reg(r);
1418 sv.type.t = t;
1419 sv.type.ref = NULL;
1420 sv.r = s;
1421 sv.c.i = 0;
1422 load(r, &sv);
1426 /* get address of vtop (vtop MUST BE an lvalue) */
1427 ST_FUNC void gaddrof(void)
1429 vtop->r &= ~VT_LVAL;
1430 /* tricky: if saved lvalue, then we can go back to lvalue */
1431 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1432 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1435 #ifdef CONFIG_TCC_BCHECK
1436 /* generate a bounded pointer addition */
1437 static void gen_bounded_ptr_add(void)
1439 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1440 if (save) {
1441 vpushv(&vtop[-1]);
1442 vrott(3);
1444 vpush_helper_func(TOK___bound_ptr_add);
1445 vrott(3);
1446 gfunc_call(2);
1447 vtop -= save;
1448 vpushi(0);
1449 /* returned pointer is in REG_IRET */
1450 vtop->r = REG_IRET | VT_BOUNDED;
1451 if (nocode_wanted)
1452 return;
1453 /* relocation offset of the bounding function call point */
1454 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1457 /* patch pointer addition in vtop so that pointer dereferencing is
1458 also tested */
1459 static void gen_bounded_ptr_deref(void)
1461 addr_t func;
1462 int size, align;
1463 ElfW_Rel *rel;
1464 Sym *sym;
1466 if (nocode_wanted)
1467 return;
1469 size = type_size(&vtop->type, &align);
1470 switch(size) {
1471 case 1: func = TOK___bound_ptr_indir1; break;
1472 case 2: func = TOK___bound_ptr_indir2; break;
1473 case 4: func = TOK___bound_ptr_indir4; break;
1474 case 8: func = TOK___bound_ptr_indir8; break;
1475 case 12: func = TOK___bound_ptr_indir12; break;
1476 case 16: func = TOK___bound_ptr_indir16; break;
1477 default:
1478 /* may happen with struct member access */
1479 return;
1481 sym = external_helper_sym(func);
1482 if (!sym->c)
1483 put_extern_sym(sym, NULL, 0, 0);
1484 /* patch relocation */
1485 /* XXX: find a better solution ? */
1486 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1487 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1490 /* generate lvalue bound code */
1491 static void gbound(void)
1493 CType type1;
1495 vtop->r &= ~VT_MUSTBOUND;
1496 /* if lvalue, then use checking code before dereferencing */
1497 if (vtop->r & VT_LVAL) {
1498 /* if not VT_BOUNDED value, then make one */
1499 if (!(vtop->r & VT_BOUNDED)) {
1500 /* must save type because we must set it to int to get pointer */
1501 type1 = vtop->type;
1502 vtop->type.t = VT_PTR;
1503 gaddrof();
1504 vpushi(0);
1505 gen_bounded_ptr_add();
1506 vtop->r |= VT_LVAL;
1507 vtop->type = type1;
1509 /* then check for dereferencing */
1510 gen_bounded_ptr_deref();
1514 /* we need to call __bound_ptr_add before we start to load function
1515 args into registers */
1516 ST_FUNC void gbound_args(int nb_args)
1518 int i, v;
1519 SValue *sv;
1521 for (i = 1; i <= nb_args; ++i)
1522 if (vtop[1 - i].r & VT_MUSTBOUND) {
1523 vrotb(i);
1524 gbound();
1525 vrott(i);
1528 sv = vtop - nb_args;
1529 if (sv->r & VT_SYM) {
1530 v = sv->sym->v;
1531 if (v == TOK_setjmp
1532 || v == TOK__setjmp
1533 #ifndef TCC_TARGET_PE
1534 || v == TOK_sigsetjmp
1535 || v == TOK___sigsetjmp
1536 #endif
1538 vpush_helper_func(TOK___bound_setjmp);
1539 vpushv(sv + 1);
1540 gfunc_call(1);
1541 func_bound_add_epilog = 1;
1543 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1544 if (v == TOK_alloca)
1545 func_bound_add_epilog = 1;
1546 #endif
1547 #if TARGETOS_NetBSD
1548 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1549 sv->sym->asm_label = TOK___bound_longjmp;
1550 #endif
1554 /* Add bounds for local symbols from S to E (via ->prev) */
1555 static void add_local_bounds(Sym *s, Sym *e)
1557 for (; s != e; s = s->prev) {
1558 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1559 continue;
1560 /* Add arrays/structs/unions because we always take address */
1561 if ((s->type.t & VT_ARRAY)
1562 || (s->type.t & VT_BTYPE) == VT_STRUCT
1563 || s->a.addrtaken) {
1564 /* add local bound info */
1565 int align, size = type_size(&s->type, &align);
1566 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1567 2 * sizeof(addr_t));
1568 bounds_ptr[0] = s->c;
1569 bounds_ptr[1] = size;
1573 #endif
1575 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1576 static void pop_local_syms(Sym *b, int keep)
1578 #ifdef CONFIG_TCC_BCHECK
1579 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1580 add_local_bounds(local_stack, b);
1581 #endif
1582 if (debug_modes)
1583 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1584 sym_pop(&local_stack, b, keep);
1587 static void incr_bf_adr(int o)
1589 vtop->type = char_pointer_type;
1590 gaddrof();
1591 vpushs(o);
1592 gen_op('+');
1593 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1594 vtop->r |= VT_LVAL;
1597 /* single-byte load mode for packed or otherwise unaligned bitfields */
1598 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1600 int n, o, bits;
1601 save_reg_upstack(vtop->r, 1);
1602 vpush64(type->t & VT_BTYPE, 0); // B X
1603 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1604 do {
1605 vswap(); // X B
1606 incr_bf_adr(o);
1607 vdup(); // X B B
1608 n = 8 - bit_pos;
1609 if (n > bit_size)
1610 n = bit_size;
1611 if (bit_pos)
1612 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1613 if (n < 8)
1614 vpushi((1 << n) - 1), gen_op('&');
1615 gen_cast(type);
1616 if (bits)
1617 vpushi(bits), gen_op(TOK_SHL);
1618 vrotb(3); // B Y X
1619 gen_op('|'); // B X
1620 bits += n, bit_size -= n, o = 1;
1621 } while (bit_size);
1622 vswap(), vpop();
1623 if (!(type->t & VT_UNSIGNED)) {
1624 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1625 vpushi(n), gen_op(TOK_SHL);
1626 vpushi(n), gen_op(TOK_SAR);
1630 /* single-byte store mode for packed or otherwise unaligned bitfields */
1631 static void store_packed_bf(int bit_pos, int bit_size)
1633 int bits, n, o, m, c;
1634 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1635 vswap(); // X B
1636 save_reg_upstack(vtop->r, 1);
1637 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1638 do {
1639 incr_bf_adr(o); // X B
1640 vswap(); //B X
1641 c ? vdup() : gv_dup(); // B V X
1642 vrott(3); // X B V
1643 if (bits)
1644 vpushi(bits), gen_op(TOK_SHR);
1645 if (bit_pos)
1646 vpushi(bit_pos), gen_op(TOK_SHL);
1647 n = 8 - bit_pos;
1648 if (n > bit_size)
1649 n = bit_size;
1650 if (n < 8) {
1651 m = ((1 << n) - 1) << bit_pos;
1652 vpushi(m), gen_op('&'); // X B V1
1653 vpushv(vtop-1); // X B V1 B
1654 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1655 gen_op('&'); // X B V1 B1
1656 gen_op('|'); // X B V2
1658 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1659 vstore(), vpop(); // X B
1660 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1661 } while (bit_size);
1662 vpop(), vpop();
1665 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1667 int t;
1668 if (0 == sv->type.ref)
1669 return 0;
1670 t = sv->type.ref->auxtype;
1671 if (t != -1 && t != VT_STRUCT) {
1672 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1673 sv->r |= VT_LVAL;
1675 return t;
1678 /* store vtop a register belonging to class 'rc'. lvalues are
1679 converted to values. Cannot be used if cannot be converted to
1680 register value (such as structures). */
1681 ST_FUNC int gv(int rc)
1683 int r, r2, r_ok, r2_ok, rc2, bt;
1684 int bit_pos, bit_size, size, align;
1686 /* NOTE: get_reg can modify vstack[] */
1687 if (vtop->type.t & VT_BITFIELD) {
1688 CType type;
1690 bit_pos = BIT_POS(vtop->type.t);
1691 bit_size = BIT_SIZE(vtop->type.t);
1692 /* remove bit field info to avoid loops */
1693 vtop->type.t &= ~VT_STRUCT_MASK;
1695 type.ref = NULL;
1696 type.t = vtop->type.t & VT_UNSIGNED;
1697 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1698 type.t |= VT_UNSIGNED;
1700 r = adjust_bf(vtop, bit_pos, bit_size);
1702 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1703 type.t |= VT_LLONG;
1704 else
1705 type.t |= VT_INT;
1707 if (r == VT_STRUCT) {
1708 load_packed_bf(&type, bit_pos, bit_size);
1709 } else {
1710 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1711 /* cast to int to propagate signedness in following ops */
1712 gen_cast(&type);
1713 /* generate shifts */
1714 vpushi(bits - (bit_pos + bit_size));
1715 gen_op(TOK_SHL);
1716 vpushi(bits - bit_size);
1717 /* NOTE: transformed to SHR if unsigned */
1718 gen_op(TOK_SAR);
1720 r = gv(rc);
1721 } else {
1722 if (is_float(vtop->type.t) &&
1723 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1724 /* CPUs usually cannot use float constants, so we store them
1725 generically in data segment */
1726 init_params p = { rodata_section };
1727 unsigned long offset;
1728 size = type_size(&vtop->type, &align);
1729 if (NODATA_WANTED)
1730 size = 0, align = 1;
1731 offset = section_add(p.sec, size, align);
1732 vpush_ref(&vtop->type, p.sec, offset, size);
1733 vswap();
1734 init_putv(&p, &vtop->type, offset);
1735 vtop->r |= VT_LVAL;
1737 #ifdef CONFIG_TCC_BCHECK
1738 if (vtop->r & VT_MUSTBOUND)
1739 gbound();
1740 #endif
1742 bt = vtop->type.t & VT_BTYPE;
1744 #ifdef TCC_TARGET_RISCV64
1745 /* XXX mega hack */
1746 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1747 rc = RC_INT;
1748 #endif
1749 rc2 = RC2_TYPE(bt, rc);
1751 /* need to reload if:
1752 - constant
1753 - lvalue (need to dereference pointer)
1754 - already a register, but not in the right class */
1755 r = vtop->r & VT_VALMASK;
1756 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1757 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1759 if (!r_ok || !r2_ok) {
1760 if (!r_ok)
1761 r = get_reg(rc);
1762 if (rc2) {
1763 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1764 int original_type = vtop->type.t;
1766 /* two register type load :
1767 expand to two words temporarily */
1768 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1769 /* load constant */
1770 unsigned long long ll = vtop->c.i;
1771 vtop->c.i = ll; /* first word */
1772 load(r, vtop);
1773 vtop->r = r; /* save register value */
1774 vpushi(ll >> 32); /* second word */
1775 } else if (vtop->r & VT_LVAL) {
1776 /* We do not want to modifier the long long pointer here.
1777 So we save any other instances down the stack */
1778 save_reg_upstack(vtop->r, 1);
1779 /* load from memory */
1780 vtop->type.t = load_type;
1781 load(r, vtop);
1782 vdup();
1783 vtop[-1].r = r; /* save register value */
1784 /* increment pointer to get second word */
1785 vtop->type.t = VT_PTRDIFF_T;
1786 gaddrof();
1787 vpushs(PTR_SIZE);
1788 gen_op('+');
1789 vtop->r |= VT_LVAL;
1790 vtop->type.t = load_type;
1791 } else {
1792 /* move registers */
1793 if (!r_ok)
1794 load(r, vtop);
1795 if (r2_ok && vtop->r2 < VT_CONST)
1796 goto done;
1797 vdup();
1798 vtop[-1].r = r; /* save register value */
1799 vtop->r = vtop[-1].r2;
1801 /* Allocate second register. Here we rely on the fact that
1802 get_reg() tries first to free r2 of an SValue. */
1803 r2 = get_reg(rc2);
1804 load(r2, vtop);
1805 vpop();
1806 /* write second register */
1807 vtop->r2 = r2;
1808 done:
1809 vtop->type.t = original_type;
1810 } else {
1811 if (vtop->r == VT_CMP)
1812 vset_VT_JMP();
1813 /* one register type load */
1814 load(r, vtop);
1817 vtop->r = r;
1818 #ifdef TCC_TARGET_C67
1819 /* uses register pairs for doubles */
1820 if (bt == VT_DOUBLE)
1821 vtop->r2 = r+1;
1822 #endif
1824 return r;
1827 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1828 ST_FUNC void gv2(int rc1, int rc2)
1830 /* generate more generic register first. But VT_JMP or VT_CMP
1831 values must be generated first in all cases to avoid possible
1832 reload errors */
1833 if (vtop->r != VT_CMP && rc1 <= rc2) {
1834 vswap();
1835 gv(rc1);
1836 vswap();
1837 gv(rc2);
1838 /* test if reload is needed for first register */
1839 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1840 vswap();
1841 gv(rc1);
1842 vswap();
1844 } else {
1845 gv(rc2);
1846 vswap();
1847 gv(rc1);
1848 vswap();
1849 /* test if reload is needed for first register */
1850 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1851 gv(rc2);
1856 #if PTR_SIZE == 4
1857 /* expand 64bit on stack in two ints */
1858 ST_FUNC void lexpand(void)
1860 int u, v;
1861 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1862 v = vtop->r & (VT_VALMASK | VT_LVAL);
1863 if (v == VT_CONST) {
1864 vdup();
1865 vtop[0].c.i >>= 32;
1866 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1867 vdup();
1868 vtop[0].c.i += 4;
1869 } else {
1870 gv(RC_INT);
1871 vdup();
1872 vtop[0].r = vtop[-1].r2;
1873 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1875 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1877 #endif
1879 #if PTR_SIZE == 4
1880 /* build a long long from two ints */
1881 static void lbuild(int t)
1883 gv2(RC_INT, RC_INT);
1884 vtop[-1].r2 = vtop[0].r;
1885 vtop[-1].type.t = t;
1886 vpop();
1888 #endif
1890 /* convert stack entry to register and duplicate its value in another
1891 register */
1892 static void gv_dup(void)
1894 int t, rc, r;
1896 t = vtop->type.t;
1897 #if PTR_SIZE == 4
1898 if ((t & VT_BTYPE) == VT_LLONG) {
1899 if (t & VT_BITFIELD) {
1900 gv(RC_INT);
1901 t = vtop->type.t;
1903 lexpand();
1904 gv_dup();
1905 vswap();
1906 vrotb(3);
1907 gv_dup();
1908 vrotb(4);
1909 /* stack: H L L1 H1 */
1910 lbuild(t);
1911 vrotb(3);
1912 vrotb(3);
1913 vswap();
1914 lbuild(t);
1915 vswap();
1916 return;
1918 #endif
1919 /* duplicate value */
1920 rc = RC_TYPE(t);
1921 gv(rc);
1922 r = get_reg(rc);
1923 vdup();
1924 load(r, vtop);
1925 vtop->r = r;
1928 #if PTR_SIZE == 4
1929 /* generate CPU independent (unsigned) long long operations */
1930 static void gen_opl(int op)
1932 int t, a, b, op1, c, i;
1933 int func;
1934 unsigned short reg_iret = REG_IRET;
1935 unsigned short reg_lret = REG_IRE2;
1936 SValue tmp;
1938 switch(op) {
1939 case '/':
1940 case TOK_PDIV:
1941 func = TOK___divdi3;
1942 goto gen_func;
1943 case TOK_UDIV:
1944 func = TOK___udivdi3;
1945 goto gen_func;
1946 case '%':
1947 func = TOK___moddi3;
1948 goto gen_mod_func;
1949 case TOK_UMOD:
1950 func = TOK___umoddi3;
1951 gen_mod_func:
1952 #ifdef TCC_ARM_EABI
1953 reg_iret = TREG_R2;
1954 reg_lret = TREG_R3;
1955 #endif
1956 gen_func:
1957 /* call generic long long function */
1958 vpush_helper_func(func);
1959 vrott(3);
1960 gfunc_call(2);
1961 vpushi(0);
1962 vtop->r = reg_iret;
1963 vtop->r2 = reg_lret;
1964 break;
1965 case '^':
1966 case '&':
1967 case '|':
1968 case '*':
1969 case '+':
1970 case '-':
1971 //pv("gen_opl A",0,2);
1972 t = vtop->type.t;
1973 vswap();
1974 lexpand();
1975 vrotb(3);
1976 lexpand();
1977 /* stack: L1 H1 L2 H2 */
1978 tmp = vtop[0];
1979 vtop[0] = vtop[-3];
1980 vtop[-3] = tmp;
1981 tmp = vtop[-2];
1982 vtop[-2] = vtop[-3];
1983 vtop[-3] = tmp;
1984 vswap();
1985 /* stack: H1 H2 L1 L2 */
1986 //pv("gen_opl B",0,4);
1987 if (op == '*') {
1988 vpushv(vtop - 1);
1989 vpushv(vtop - 1);
1990 gen_op(TOK_UMULL);
1991 lexpand();
1992 /* stack: H1 H2 L1 L2 ML MH */
1993 for(i=0;i<4;i++)
1994 vrotb(6);
1995 /* stack: ML MH H1 H2 L1 L2 */
1996 tmp = vtop[0];
1997 vtop[0] = vtop[-2];
1998 vtop[-2] = tmp;
1999 /* stack: ML MH H1 L2 H2 L1 */
2000 gen_op('*');
2001 vrotb(3);
2002 vrotb(3);
2003 gen_op('*');
2004 /* stack: ML MH M1 M2 */
2005 gen_op('+');
2006 gen_op('+');
2007 } else if (op == '+' || op == '-') {
2008 /* XXX: add non carry method too (for MIPS or alpha) */
2009 if (op == '+')
2010 op1 = TOK_ADDC1;
2011 else
2012 op1 = TOK_SUBC1;
2013 gen_op(op1);
2014 /* stack: H1 H2 (L1 op L2) */
2015 vrotb(3);
2016 vrotb(3);
2017 gen_op(op1 + 1); /* TOK_xxxC2 */
2018 } else {
2019 gen_op(op);
2020 /* stack: H1 H2 (L1 op L2) */
2021 vrotb(3);
2022 vrotb(3);
2023 /* stack: (L1 op L2) H1 H2 */
2024 gen_op(op);
2025 /* stack: (L1 op L2) (H1 op H2) */
2027 /* stack: L H */
2028 lbuild(t);
2029 break;
2030 case TOK_SAR:
2031 case TOK_SHR:
2032 case TOK_SHL:
2033 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2034 t = vtop[-1].type.t;
2035 vswap();
2036 lexpand();
2037 vrotb(3);
2038 /* stack: L H shift */
2039 c = (int)vtop->c.i;
2040 /* constant: simpler */
2041 /* NOTE: all comments are for SHL. the other cases are
2042 done by swapping words */
2043 vpop();
2044 if (op != TOK_SHL)
2045 vswap();
2046 if (c >= 32) {
2047 /* stack: L H */
2048 vpop();
2049 if (c > 32) {
2050 vpushi(c - 32);
2051 gen_op(op);
2053 if (op != TOK_SAR) {
2054 vpushi(0);
2055 } else {
2056 gv_dup();
2057 vpushi(31);
2058 gen_op(TOK_SAR);
2060 vswap();
2061 } else {
2062 vswap();
2063 gv_dup();
2064 /* stack: H L L */
2065 vpushi(c);
2066 gen_op(op);
2067 vswap();
2068 vpushi(32 - c);
2069 if (op == TOK_SHL)
2070 gen_op(TOK_SHR);
2071 else
2072 gen_op(TOK_SHL);
2073 vrotb(3);
2074 /* stack: L L H */
2075 vpushi(c);
2076 if (op == TOK_SHL)
2077 gen_op(TOK_SHL);
2078 else
2079 gen_op(TOK_SHR);
2080 gen_op('|');
2082 if (op != TOK_SHL)
2083 vswap();
2084 lbuild(t);
2085 } else {
2086 /* XXX: should provide a faster fallback on x86 ? */
2087 switch(op) {
2088 case TOK_SAR:
2089 func = TOK___ashrdi3;
2090 goto gen_func;
2091 case TOK_SHR:
2092 func = TOK___lshrdi3;
2093 goto gen_func;
2094 case TOK_SHL:
2095 func = TOK___ashldi3;
2096 goto gen_func;
2099 break;
2100 default:
2101 /* compare operations */
2102 t = vtop->type.t;
2103 vswap();
2104 lexpand();
2105 vrotb(3);
2106 lexpand();
2107 /* stack: L1 H1 L2 H2 */
2108 tmp = vtop[-1];
2109 vtop[-1] = vtop[-2];
2110 vtop[-2] = tmp;
2111 /* stack: L1 L2 H1 H2 */
2112 save_regs(4);
2113 /* compare high */
2114 op1 = op;
2115 /* when values are equal, we need to compare low words. since
2116 the jump is inverted, we invert the test too. */
2117 if (op1 == TOK_LT)
2118 op1 = TOK_LE;
2119 else if (op1 == TOK_GT)
2120 op1 = TOK_GE;
2121 else if (op1 == TOK_ULT)
2122 op1 = TOK_ULE;
2123 else if (op1 == TOK_UGT)
2124 op1 = TOK_UGE;
2125 a = 0;
2126 b = 0;
2127 gen_op(op1);
2128 if (op == TOK_NE) {
2129 b = gvtst(0, 0);
2130 } else {
2131 a = gvtst(1, 0);
2132 if (op != TOK_EQ) {
2133 /* generate non equal test */
2134 vpushi(0);
2135 vset_VT_CMP(TOK_NE);
2136 b = gvtst(0, 0);
2139 /* compare low. Always unsigned */
2140 op1 = op;
2141 if (op1 == TOK_LT)
2142 op1 = TOK_ULT;
2143 else if (op1 == TOK_LE)
2144 op1 = TOK_ULE;
2145 else if (op1 == TOK_GT)
2146 op1 = TOK_UGT;
2147 else if (op1 == TOK_GE)
2148 op1 = TOK_UGE;
2149 gen_op(op1);
2150 #if 0//def TCC_TARGET_I386
2151 if (op == TOK_NE) { gsym(b); break; }
2152 if (op == TOK_EQ) { gsym(a); break; }
2153 #endif
2154 gvtst_set(1, a);
2155 gvtst_set(0, b);
2156 break;
2159 #endif
2161 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2163 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2164 return (a ^ b) >> 63 ? -x : x;
2167 static int gen_opic_lt(uint64_t a, uint64_t b)
2169 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2172 /* handle integer constant optimizations and various machine
2173 independent opt */
2174 static void gen_opic(int op)
2176 SValue *v1 = vtop - 1;
2177 SValue *v2 = vtop;
2178 int t1 = v1->type.t & VT_BTYPE;
2179 int t2 = v2->type.t & VT_BTYPE;
2180 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2181 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2182 uint64_t l1 = c1 ? v1->c.i : 0;
2183 uint64_t l2 = c2 ? v2->c.i : 0;
2184 int shm = (t1 == VT_LLONG) ? 63 : 31;
2186 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2187 l1 = ((uint32_t)l1 |
2188 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2189 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2190 l2 = ((uint32_t)l2 |
2191 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2193 if (c1 && c2) {
2194 switch(op) {
2195 case '+': l1 += l2; break;
2196 case '-': l1 -= l2; break;
2197 case '&': l1 &= l2; break;
2198 case '^': l1 ^= l2; break;
2199 case '|': l1 |= l2; break;
2200 case '*': l1 *= l2; break;
2202 case TOK_PDIV:
2203 case '/':
2204 case '%':
2205 case TOK_UDIV:
2206 case TOK_UMOD:
2207 /* if division by zero, generate explicit division */
2208 if (l2 == 0) {
2209 if (const_wanted && !(nocode_wanted & unevalmask))
2210 tcc_error("division by zero in constant");
2211 goto general_case;
2213 switch(op) {
2214 default: l1 = gen_opic_sdiv(l1, l2); break;
2215 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2216 case TOK_UDIV: l1 = l1 / l2; break;
2217 case TOK_UMOD: l1 = l1 % l2; break;
2219 break;
2220 case TOK_SHL: l1 <<= (l2 & shm); break;
2221 case TOK_SHR: l1 >>= (l2 & shm); break;
2222 case TOK_SAR:
2223 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2224 break;
2225 /* tests */
2226 case TOK_ULT: l1 = l1 < l2; break;
2227 case TOK_UGE: l1 = l1 >= l2; break;
2228 case TOK_EQ: l1 = l1 == l2; break;
2229 case TOK_NE: l1 = l1 != l2; break;
2230 case TOK_ULE: l1 = l1 <= l2; break;
2231 case TOK_UGT: l1 = l1 > l2; break;
2232 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2233 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2234 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2235 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2236 /* logical */
2237 case TOK_LAND: l1 = l1 && l2; break;
2238 case TOK_LOR: l1 = l1 || l2; break;
2239 default:
2240 goto general_case;
2242 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2243 l1 = ((uint32_t)l1 |
2244 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2245 v1->c.i = l1;
2246 vtop--;
2247 } else {
2248 /* if commutative ops, put c2 as constant */
2249 if (c1 && (op == '+' || op == '&' || op == '^' ||
2250 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2251 vswap();
2252 c2 = c1; //c = c1, c1 = c2, c2 = c;
2253 l2 = l1; //l = l1, l1 = l2, l2 = l;
2255 if (!const_wanted &&
2256 c1 && ((l1 == 0 &&
2257 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2258 (l1 == -1 && op == TOK_SAR))) {
2259 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2260 vtop--;
2261 } else if (!const_wanted &&
2262 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2263 (op == '|' &&
2264 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2265 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2266 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2267 if (l2 == 1)
2268 vtop->c.i = 0;
2269 vswap();
2270 vtop--;
2271 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2272 op == TOK_PDIV) &&
2273 l2 == 1) ||
2274 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2275 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2276 l2 == 0) ||
2277 (op == '&' &&
2278 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2279 /* filter out NOP operations like x*1, x-0, x&-1... */
2280 vtop--;
2281 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2282 /* try to use shifts instead of muls or divs */
2283 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2284 int n = -1;
2285 while (l2) {
2286 l2 >>= 1;
2287 n++;
2289 vtop->c.i = n;
2290 if (op == '*')
2291 op = TOK_SHL;
2292 else if (op == TOK_PDIV)
2293 op = TOK_SAR;
2294 else
2295 op = TOK_SHR;
2297 goto general_case;
2298 } else if (c2 && (op == '+' || op == '-') &&
2299 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2300 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2301 /* symbol + constant case */
2302 if (op == '-')
2303 l2 = -l2;
2304 l2 += vtop[-1].c.i;
2305 /* The backends can't always deal with addends to symbols
2306 larger than +-1<<31. Don't construct such. */
2307 if ((int)l2 != l2)
2308 goto general_case;
2309 vtop--;
2310 vtop->c.i = l2;
2311 } else {
2312 general_case:
2313 /* call low level op generator */
2314 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2315 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2316 gen_opl(op);
2317 else
2318 gen_opi(op);
2323 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2324 # define gen_negf gen_opf
2325 #elif defined TCC_TARGET_ARM
2326 void gen_negf(int op)
2328 /* arm will detect 0-x and replace by vneg */
2329 vpushi(0), vswap(), gen_op('-');
2331 #else
2332 /* XXX: implement in gen_opf() for other backends too */
2333 void gen_negf(int op)
2335 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2336 subtract(-0, x), but with them it's really a sign flip
2337 operation. We implement this with bit manipulation and have
2338 to do some type reinterpretation for this, which TCC can do
2339 only via memory. */
2341 int align, size, bt;
2343 size = type_size(&vtop->type, &align);
2344 bt = vtop->type.t & VT_BTYPE;
2345 save_reg(gv(RC_TYPE(bt)));
2346 vdup();
2347 incr_bf_adr(size - 1);
2348 vdup();
2349 vpushi(0x80); /* flip sign */
2350 gen_op('^');
2351 vstore();
2352 vpop();
2354 #endif
2356 /* generate a floating point operation with constant propagation */
2357 static void gen_opif(int op)
2359 int c1, c2;
2360 SValue *v1, *v2;
2361 #if defined _MSC_VER && defined __x86_64__
2362 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2363 volatile
2364 #endif
2365 long double f1, f2;
2367 v1 = vtop - 1;
2368 v2 = vtop;
2369 if (op == TOK_NEG)
2370 v1 = v2;
2372 /* currently, we cannot do computations with forward symbols */
2373 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2374 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2375 if (c1 && c2) {
2376 if (v1->type.t == VT_FLOAT) {
2377 f1 = v1->c.f;
2378 f2 = v2->c.f;
2379 } else if (v1->type.t == VT_DOUBLE) {
2380 f1 = v1->c.d;
2381 f2 = v2->c.d;
2382 } else {
2383 f1 = v1->c.ld;
2384 f2 = v2->c.ld;
2386 /* NOTE: we only do constant propagation if finite number (not
2387 NaN or infinity) (ANSI spec) */
2388 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2389 goto general_case;
2390 switch(op) {
2391 case '+': f1 += f2; break;
2392 case '-': f1 -= f2; break;
2393 case '*': f1 *= f2; break;
2394 case '/':
2395 if (f2 == 0.0) {
2396 union { float f; unsigned u; } x1, x2, y;
2397 /* If not in initializer we need to potentially generate
2398 FP exceptions at runtime, otherwise we want to fold. */
2399 if (!const_wanted)
2400 goto general_case;
2401 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2402 when used to compile the f1 /= f2 below, would be -nan */
2403 x1.f = f1, x2.f = f2;
2404 if (f1 == 0.0)
2405 y.u = 0x7fc00000; /* nan */
2406 else
2407 y.u = 0x7f800000; /* infinity */
2408 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2409 f1 = y.f;
2410 break;
2412 f1 /= f2;
2413 break;
2414 case TOK_NEG:
2415 f1 = -f1;
2416 goto unary_result;
2417 /* XXX: also handles tests ? */
2418 default:
2419 goto general_case;
2421 vtop--;
2422 unary_result:
2423 /* XXX: overflow test ? */
2424 if (v1->type.t == VT_FLOAT) {
2425 v1->c.f = f1;
2426 } else if (v1->type.t == VT_DOUBLE) {
2427 v1->c.d = f1;
2428 } else {
2429 v1->c.ld = f1;
2431 } else {
2432 general_case:
2433 if (op == TOK_NEG) {
2434 gen_negf(op);
2435 } else {
2436 gen_opf(op);
2441 /* print a type. If 'varstr' is not NULL, then the variable is also
2442 printed in the type */
2443 /* XXX: union */
2444 /* XXX: add array and function pointers */
2445 static void type_to_str(char *buf, int buf_size,
2446 CType *type, const char *varstr)
2448 int bt, v, t;
2449 Sym *s, *sa;
2450 char buf1[256];
2451 const char *tstr;
2453 t = type->t;
2454 bt = t & VT_BTYPE;
2455 buf[0] = '\0';
2457 if (t & VT_EXTERN)
2458 pstrcat(buf, buf_size, "extern ");
2459 if (t & VT_STATIC)
2460 pstrcat(buf, buf_size, "static ");
2461 if (t & VT_TYPEDEF)
2462 pstrcat(buf, buf_size, "typedef ");
2463 if (t & VT_INLINE)
2464 pstrcat(buf, buf_size, "inline ");
2465 if (bt != VT_PTR) {
2466 if (t & VT_VOLATILE)
2467 pstrcat(buf, buf_size, "volatile ");
2468 if (t & VT_CONSTANT)
2469 pstrcat(buf, buf_size, "const ");
2471 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2472 || ((t & VT_UNSIGNED)
2473 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2474 && !IS_ENUM(t)
2476 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2478 buf_size -= strlen(buf);
2479 buf += strlen(buf);
2481 switch(bt) {
2482 case VT_VOID:
2483 tstr = "void";
2484 goto add_tstr;
2485 case VT_BOOL:
2486 tstr = "_Bool";
2487 goto add_tstr;
2488 case VT_BYTE:
2489 tstr = "char";
2490 goto add_tstr;
2491 case VT_SHORT:
2492 tstr = "short";
2493 goto add_tstr;
2494 case VT_INT:
2495 tstr = "int";
2496 goto maybe_long;
2497 case VT_LLONG:
2498 tstr = "long long";
2499 maybe_long:
2500 if (t & VT_LONG)
2501 tstr = "long";
2502 if (!IS_ENUM(t))
2503 goto add_tstr;
2504 tstr = "enum ";
2505 goto tstruct;
2506 case VT_FLOAT:
2507 tstr = "float";
2508 goto add_tstr;
2509 case VT_DOUBLE:
2510 tstr = "double";
2511 if (!(t & VT_LONG))
2512 goto add_tstr;
2513 case VT_LDOUBLE:
2514 tstr = "long double";
2515 add_tstr:
2516 pstrcat(buf, buf_size, tstr);
2517 break;
2518 case VT_STRUCT:
2519 tstr = "struct ";
2520 if (IS_UNION(t))
2521 tstr = "union ";
2522 tstruct:
2523 pstrcat(buf, buf_size, tstr);
2524 v = type->ref->v & ~SYM_STRUCT;
2525 if (v >= SYM_FIRST_ANOM)
2526 pstrcat(buf, buf_size, "<anonymous>");
2527 else
2528 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2529 break;
2530 case VT_FUNC:
2531 s = type->ref;
2532 buf1[0]=0;
2533 if (varstr && '*' == *varstr) {
2534 pstrcat(buf1, sizeof(buf1), "(");
2535 pstrcat(buf1, sizeof(buf1), varstr);
2536 pstrcat(buf1, sizeof(buf1), ")");
2538 pstrcat(buf1, buf_size, "(");
2539 sa = s->next;
2540 while (sa != NULL) {
2541 char buf2[256];
2542 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2543 pstrcat(buf1, sizeof(buf1), buf2);
2544 sa = sa->next;
2545 if (sa)
2546 pstrcat(buf1, sizeof(buf1), ", ");
2548 if (s->f.func_type == FUNC_ELLIPSIS)
2549 pstrcat(buf1, sizeof(buf1), ", ...");
2550 pstrcat(buf1, sizeof(buf1), ")");
2551 type_to_str(buf, buf_size, &s->type, buf1);
2552 goto no_var;
2553 case VT_PTR:
2554 s = type->ref;
2555 if (t & (VT_ARRAY|VT_VLA)) {
2556 if (varstr && '*' == *varstr)
2557 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2558 else
2559 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2560 type_to_str(buf, buf_size, &s->type, buf1);
2561 goto no_var;
2563 pstrcpy(buf1, sizeof(buf1), "*");
2564 if (t & VT_CONSTANT)
2565 pstrcat(buf1, buf_size, "const ");
2566 if (t & VT_VOLATILE)
2567 pstrcat(buf1, buf_size, "volatile ");
2568 if (varstr)
2569 pstrcat(buf1, sizeof(buf1), varstr);
2570 type_to_str(buf, buf_size, &s->type, buf1);
2571 goto no_var;
2573 if (varstr) {
2574 pstrcat(buf, buf_size, " ");
2575 pstrcat(buf, buf_size, varstr);
2577 no_var: ;
2580 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2582 char buf1[256], buf2[256];
2583 type_to_str(buf1, sizeof(buf1), st, NULL);
2584 type_to_str(buf2, sizeof(buf2), dt, NULL);
2585 tcc_error(fmt, buf1, buf2);
2588 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2590 char buf1[256], buf2[256];
2591 type_to_str(buf1, sizeof(buf1), st, NULL);
2592 type_to_str(buf2, sizeof(buf2), dt, NULL);
2593 tcc_warning(fmt, buf1, buf2);
2596 static int pointed_size(CType *type)
2598 int align;
2599 return type_size(pointed_type(type), &align);
2602 static inline int is_null_pointer(SValue *p)
2604 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2605 return 0;
2606 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2607 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2608 ((p->type.t & VT_BTYPE) == VT_PTR &&
2609 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2610 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2611 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2615 /* compare function types. OLD functions match any new functions */
2616 static int is_compatible_func(CType *type1, CType *type2)
2618 Sym *s1, *s2;
2620 s1 = type1->ref;
2621 s2 = type2->ref;
2622 if (s1->f.func_call != s2->f.func_call)
2623 return 0;
2624 if (s1->f.func_type != s2->f.func_type
2625 && s1->f.func_type != FUNC_OLD
2626 && s2->f.func_type != FUNC_OLD)
2627 return 0;
2628 for (;;) {
2629 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2630 return 0;
2631 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2632 return 1;
2633 s1 = s1->next;
2634 s2 = s2->next;
2635 if (!s1)
2636 return !s2;
2637 if (!s2)
2638 return 0;
2642 /* return true if type1 and type2 are the same. If unqualified is
2643 true, qualifiers on the types are ignored.
2645 static int compare_types(CType *type1, CType *type2, int unqualified)
2647 int bt1, t1, t2;
2649 t1 = type1->t & VT_TYPE;
2650 t2 = type2->t & VT_TYPE;
2651 if (unqualified) {
2652 /* strip qualifiers before comparing */
2653 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2654 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2657 /* Default Vs explicit signedness only matters for char */
2658 if ((t1 & VT_BTYPE) != VT_BYTE) {
2659 t1 &= ~VT_DEFSIGN;
2660 t2 &= ~VT_DEFSIGN;
2662 /* XXX: bitfields ? */
2663 if (t1 != t2)
2664 return 0;
2666 if ((t1 & VT_ARRAY)
2667 && !(type1->ref->c < 0
2668 || type2->ref->c < 0
2669 || type1->ref->c == type2->ref->c))
2670 return 0;
2672 /* test more complicated cases */
2673 bt1 = t1 & VT_BTYPE;
2674 if (bt1 == VT_PTR) {
2675 type1 = pointed_type(type1);
2676 type2 = pointed_type(type2);
2677 return is_compatible_types(type1, type2);
2678 } else if (bt1 == VT_STRUCT) {
2679 return (type1->ref == type2->ref);
2680 } else if (bt1 == VT_FUNC) {
2681 return is_compatible_func(type1, type2);
2682 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2683 /* If both are enums then they must be the same, if only one is then
2684 t1 and t2 must be equal, which was checked above already. */
2685 return type1->ref == type2->ref;
2686 } else {
2687 return 1;
2691 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2692 type is stored in DEST if non-null (except for pointer plus/minus) . */
2693 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2695 CType *type1 = &op1->type, *type2 = &op2->type, type;
2696 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2697 int ret = 1;
2699 type.t = VT_VOID;
2700 type.ref = NULL;
2702 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2703 ret = op == '?' ? 1 : 0;
2704 /* NOTE: as an extension, we accept void on only one side */
2705 type.t = VT_VOID;
2706 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2707 if (op == '+') ; /* Handled in caller */
2708 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2709 /* If one is a null ptr constant the result type is the other. */
2710 else if (is_null_pointer (op2)) type = *type1;
2711 else if (is_null_pointer (op1)) type = *type2;
2712 else if (bt1 != bt2) {
2713 /* accept comparison or cond-expr between pointer and integer
2714 with a warning */
2715 if ((op == '?' || TOK_ISCOND(op))
2716 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2717 tcc_warning("pointer/integer mismatch in %s",
2718 op == '?' ? "conditional expression" : "comparison");
2719 else if (op != '-' || !is_integer_btype(bt2))
2720 ret = 0;
2721 type = *(bt1 == VT_PTR ? type1 : type2);
2722 } else {
2723 CType *pt1 = pointed_type(type1);
2724 CType *pt2 = pointed_type(type2);
2725 int pbt1 = pt1->t & VT_BTYPE;
2726 int pbt2 = pt2->t & VT_BTYPE;
2727 int newquals, copied = 0;
2728 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2729 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2730 if (op != '?' && !TOK_ISCOND(op))
2731 ret = 0;
2732 else
2733 type_incompatibility_warning(type1, type2,
2734 op == '?'
2735 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2736 : "pointer type mismatch in comparison('%s' and '%s')");
2738 if (op == '?') {
2739 /* pointers to void get preferred, otherwise the
2740 pointed to types minus qualifs should be compatible */
2741 type = *((pbt1 == VT_VOID) ? type1 : type2);
2742 /* combine qualifs */
2743 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2744 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2745 & newquals)
2747 /* copy the pointer target symbol */
2748 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2749 0, type.ref->c);
2750 copied = 1;
2751 pointed_type(&type)->t |= newquals;
2753 /* pointers to incomplete arrays get converted to
2754 pointers to completed ones if possible */
2755 if (pt1->t & VT_ARRAY
2756 && pt2->t & VT_ARRAY
2757 && pointed_type(&type)->ref->c < 0
2758 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2760 if (!copied)
2761 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2762 0, type.ref->c);
2763 pointed_type(&type)->ref =
2764 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2765 0, pointed_type(&type)->ref->c);
2766 pointed_type(&type)->ref->c =
2767 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2771 if (TOK_ISCOND(op))
2772 type.t = VT_SIZE_T;
2773 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2774 if (op != '?' || !compare_types(type1, type2, 1))
2775 ret = 0;
2776 type = *type1;
2777 } else if (is_float(bt1) || is_float(bt2)) {
2778 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2779 type.t = VT_LDOUBLE;
2780 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2781 type.t = VT_DOUBLE;
2782 } else {
2783 type.t = VT_FLOAT;
2785 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2786 /* cast to biggest op */
2787 type.t = VT_LLONG | VT_LONG;
2788 if (bt1 == VT_LLONG)
2789 type.t &= t1;
2790 if (bt2 == VT_LLONG)
2791 type.t &= t2;
2792 /* convert to unsigned if it does not fit in a long long */
2793 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2794 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2795 type.t |= VT_UNSIGNED;
2796 } else {
2797 /* integer operations */
2798 type.t = VT_INT | (VT_LONG & (t1 | t2));
2799 /* convert to unsigned if it does not fit in an integer */
2800 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2801 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2802 type.t |= VT_UNSIGNED;
2804 if (dest)
2805 *dest = type;
2806 return ret;
2809 /* generic gen_op: handles types problems */
2810 ST_FUNC void gen_op(int op)
2812 int t1, t2, bt1, bt2, t;
2813 CType type1, combtype;
2815 redo:
2816 t1 = vtop[-1].type.t;
2817 t2 = vtop[0].type.t;
2818 bt1 = t1 & VT_BTYPE;
2819 bt2 = t2 & VT_BTYPE;
2821 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2822 if (bt2 == VT_FUNC) {
2823 mk_pointer(&vtop->type);
2824 gaddrof();
2826 if (bt1 == VT_FUNC) {
2827 vswap();
2828 mk_pointer(&vtop->type);
2829 gaddrof();
2830 vswap();
2832 goto redo;
2833 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2834 tcc_error_noabort("invalid operand types for binary operation");
2835 vpop();
2836 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2837 /* at least one operand is a pointer */
2838 /* relational op: must be both pointers */
2839 int align;
2840 if (TOK_ISCOND(op))
2841 goto std_op;
2842 /* if both pointers, then it must be the '-' op */
2843 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2844 if (op != '-')
2845 tcc_error("cannot use pointers here");
2846 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2847 vrott(3);
2848 gen_opic(op);
2849 vtop->type.t = VT_PTRDIFF_T;
2850 vswap();
2851 gen_op(TOK_PDIV);
2852 } else {
2853 /* exactly one pointer : must be '+' or '-'. */
2854 if (op != '-' && op != '+')
2855 tcc_error("cannot use pointers here");
2856 /* Put pointer as first operand */
2857 if (bt2 == VT_PTR) {
2858 vswap();
2859 t = t1, t1 = t2, t2 = t;
2861 #if PTR_SIZE == 4
2862 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2863 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2864 gen_cast_s(VT_INT);
2865 #endif
2866 type1 = vtop[-1].type;
2867 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2868 gen_op('*');
2869 #ifdef CONFIG_TCC_BCHECK
2870 if (tcc_state->do_bounds_check && !const_wanted) {
2871 /* if bounded pointers, we generate a special code to
2872 test bounds */
2873 if (op == '-') {
2874 vpushi(0);
2875 vswap();
2876 gen_op('-');
2878 gen_bounded_ptr_add();
2879 } else
2880 #endif
2882 gen_opic(op);
2884 type1.t &= ~(VT_ARRAY|VT_VLA);
2885 /* put again type if gen_opic() swaped operands */
2886 vtop->type = type1;
2888 } else {
2889 /* floats can only be used for a few operations */
2890 if (is_float(combtype.t)
2891 && op != '+' && op != '-' && op != '*' && op != '/'
2892 && !TOK_ISCOND(op))
2893 tcc_error("invalid operands for binary operation");
2894 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2895 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2896 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2897 t |= VT_UNSIGNED;
2898 t |= (VT_LONG & t1);
2899 combtype.t = t;
2901 std_op:
2902 t = t2 = combtype.t;
2903 /* XXX: currently, some unsigned operations are explicit, so
2904 we modify them here */
2905 if (t & VT_UNSIGNED) {
2906 if (op == TOK_SAR)
2907 op = TOK_SHR;
2908 else if (op == '/')
2909 op = TOK_UDIV;
2910 else if (op == '%')
2911 op = TOK_UMOD;
2912 else if (op == TOK_LT)
2913 op = TOK_ULT;
2914 else if (op == TOK_GT)
2915 op = TOK_UGT;
2916 else if (op == TOK_LE)
2917 op = TOK_ULE;
2918 else if (op == TOK_GE)
2919 op = TOK_UGE;
2921 vswap();
2922 gen_cast_s(t);
2923 vswap();
2924 /* special case for shifts and long long: we keep the shift as
2925 an integer */
2926 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2927 t2 = VT_INT;
2928 gen_cast_s(t2);
2929 if (is_float(t))
2930 gen_opif(op);
2931 else
2932 gen_opic(op);
2933 if (TOK_ISCOND(op)) {
2934 /* relational op: the result is an int */
2935 vtop->type.t = VT_INT;
2936 } else {
2937 vtop->type.t = t;
2940 // Make sure that we have converted to an rvalue:
2941 if (vtop->r & VT_LVAL)
2942 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2945 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2946 #define gen_cvt_itof1 gen_cvt_itof
2947 #else
2948 /* generic itof for unsigned long long case */
2949 static void gen_cvt_itof1(int t)
2951 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2952 (VT_LLONG | VT_UNSIGNED)) {
2954 if (t == VT_FLOAT)
2955 vpush_helper_func(TOK___floatundisf);
2956 #if LDOUBLE_SIZE != 8
2957 else if (t == VT_LDOUBLE)
2958 vpush_helper_func(TOK___floatundixf);
2959 #endif
2960 else
2961 vpush_helper_func(TOK___floatundidf);
2962 vrott(2);
2963 gfunc_call(1);
2964 vpushi(0);
2965 PUT_R_RET(vtop, t);
2966 } else {
2967 gen_cvt_itof(t);
2970 #endif
2972 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2973 #define gen_cvt_ftoi1 gen_cvt_ftoi
2974 #else
2975 /* generic ftoi for unsigned long long case */
2976 static void gen_cvt_ftoi1(int t)
2978 int st;
2979 if (t == (VT_LLONG | VT_UNSIGNED)) {
2980 /* not handled natively */
2981 st = vtop->type.t & VT_BTYPE;
2982 if (st == VT_FLOAT)
2983 vpush_helper_func(TOK___fixunssfdi);
2984 #if LDOUBLE_SIZE != 8
2985 else if (st == VT_LDOUBLE)
2986 vpush_helper_func(TOK___fixunsxfdi);
2987 #endif
2988 else
2989 vpush_helper_func(TOK___fixunsdfdi);
2990 vrott(2);
2991 gfunc_call(1);
2992 vpushi(0);
2993 PUT_R_RET(vtop, t);
2994 } else {
2995 gen_cvt_ftoi(t);
2998 #endif
3000 /* special delayed cast for char/short */
3001 static void force_charshort_cast(void)
3003 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3004 int dbt = vtop->type.t;
3005 vtop->r &= ~VT_MUSTCAST;
3006 vtop->type.t = sbt;
3007 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3008 vtop->type.t = dbt;
3011 static void gen_cast_s(int t)
3013 CType type;
3014 type.t = t;
3015 type.ref = NULL;
3016 gen_cast(&type);
3019 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3020 static void gen_cast(CType *type)
3022 int sbt, dbt, sf, df, c;
3023 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3025 /* special delayed cast for char/short */
3026 if (vtop->r & VT_MUSTCAST)
3027 force_charshort_cast();
3029 /* bitfields first get cast to ints */
3030 if (vtop->type.t & VT_BITFIELD)
3031 gv(RC_INT);
3033 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3034 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3035 if (sbt == VT_FUNC)
3036 sbt = VT_PTR;
3038 again:
3039 if (sbt != dbt) {
3040 sf = is_float(sbt);
3041 df = is_float(dbt);
3042 dbt_bt = dbt & VT_BTYPE;
3043 sbt_bt = sbt & VT_BTYPE;
3044 if (dbt_bt == VT_VOID)
3045 goto done;
3046 if (sbt_bt == VT_VOID) {
3047 error:
3048 cast_error(&vtop->type, type);
3051 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3052 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3053 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3054 #endif
3055 if (c) {
3056 /* constant case: we can do it now */
3057 /* XXX: in ISOC, cannot do it if error in convert */
3058 if (sbt == VT_FLOAT)
3059 vtop->c.ld = vtop->c.f;
3060 else if (sbt == VT_DOUBLE)
3061 vtop->c.ld = vtop->c.d;
3063 if (df) {
3064 if (sbt_bt == VT_LLONG) {
3065 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3066 vtop->c.ld = vtop->c.i;
3067 else
3068 vtop->c.ld = -(long double)-vtop->c.i;
3069 } else if(!sf) {
3070 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3071 vtop->c.ld = (uint32_t)vtop->c.i;
3072 else
3073 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3076 if (dbt == VT_FLOAT)
3077 vtop->c.f = (float)vtop->c.ld;
3078 else if (dbt == VT_DOUBLE)
3079 vtop->c.d = (double)vtop->c.ld;
3080 } else if (sf && dbt == VT_BOOL) {
3081 vtop->c.i = (vtop->c.ld != 0);
3082 } else {
3083 if(sf)
3084 vtop->c.i = vtop->c.ld;
3085 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3087 else if (sbt & VT_UNSIGNED)
3088 vtop->c.i = (uint32_t)vtop->c.i;
3089 else
3090 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3092 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3094 else if (dbt == VT_BOOL)
3095 vtop->c.i = (vtop->c.i != 0);
3096 else {
3097 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3098 dbt_bt == VT_SHORT ? 0xffff :
3099 0xffffffff;
3100 vtop->c.i &= m;
3101 if (!(dbt & VT_UNSIGNED))
3102 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3105 goto done;
3107 } else if (dbt == VT_BOOL
3108 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3109 == (VT_CONST | VT_SYM)) {
3110 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3111 vtop->r = VT_CONST;
3112 vtop->c.i = 1;
3113 goto done;
3116 /* cannot generate code for global or static initializers */
3117 if (STATIC_DATA_WANTED)
3118 goto done;
3120 /* non constant case: generate code */
3121 if (dbt == VT_BOOL) {
3122 gen_test_zero(TOK_NE);
3123 goto done;
3126 if (sf || df) {
3127 if (sf && df) {
3128 /* convert from fp to fp */
3129 gen_cvt_ftof(dbt);
3130 } else if (df) {
3131 /* convert int to fp */
3132 gen_cvt_itof1(dbt);
3133 } else {
3134 /* convert fp to int */
3135 sbt = dbt;
3136 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3137 sbt = VT_INT;
3138 gen_cvt_ftoi1(sbt);
3139 goto again; /* may need char/short cast */
3141 goto done;
3144 ds = btype_size(dbt_bt);
3145 ss = btype_size(sbt_bt);
3146 if (ds == 0 || ss == 0)
3147 goto error;
3149 if (IS_ENUM(type->t) && type->ref->c < 0)
3150 tcc_error("cast to incomplete type");
3152 /* same size and no sign conversion needed */
3153 if (ds == ss && ds >= 4)
3154 goto done;
3155 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3156 tcc_warning("cast between pointer and integer of different size");
3157 if (sbt_bt == VT_PTR) {
3158 /* put integer type to allow logical operations below */
3159 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3163 /* processor allows { int a = 0, b = *(char*)&a; }
3164 That means that if we cast to less width, we can just
3165 change the type and read it still later. */
3166 #define ALLOW_SUBTYPE_ACCESS 1
3168 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3169 /* value still in memory */
3170 if (ds <= ss)
3171 goto done;
3172 /* ss <= 4 here */
3173 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3174 gv(RC_INT);
3175 goto done; /* no 64bit envolved */
3178 gv(RC_INT);
3180 trunc = 0;
3181 #if PTR_SIZE == 4
3182 if (ds == 8) {
3183 /* generate high word */
3184 if (sbt & VT_UNSIGNED) {
3185 vpushi(0);
3186 gv(RC_INT);
3187 } else {
3188 gv_dup();
3189 vpushi(31);
3190 gen_op(TOK_SAR);
3192 lbuild(dbt);
3193 } else if (ss == 8) {
3194 /* from long long: just take low order word */
3195 lexpand();
3196 vpop();
3198 ss = 4;
3200 #elif PTR_SIZE == 8
3201 if (ds == 8) {
3202 /* need to convert from 32bit to 64bit */
3203 if (sbt & VT_UNSIGNED) {
3204 #if defined(TCC_TARGET_RISCV64)
3205 /* RISC-V keeps 32bit vals in registers sign-extended.
3206 So here we need a zero-extension. */
3207 trunc = 32;
3208 #else
3209 goto done;
3210 #endif
3211 } else {
3212 gen_cvt_sxtw();
3213 goto done;
3215 ss = ds, ds = 4, dbt = sbt;
3216 } else if (ss == 8) {
3217 /* RISC-V keeps 32bit vals in registers sign-extended.
3218 So here we need a sign-extension for signed types and
3219 zero-extension. for unsigned types. */
3220 #if !defined(TCC_TARGET_RISCV64)
3221 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3222 #endif
3223 } else {
3224 ss = 4;
3226 #endif
3228 if (ds >= ss)
3229 goto done;
3230 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3231 if (ss == 4) {
3232 gen_cvt_csti(dbt);
3233 goto done;
3235 #endif
3236 bits = (ss - ds) * 8;
3237 /* for unsigned, gen_op will convert SAR to SHR */
3238 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3239 vpushi(bits);
3240 gen_op(TOK_SHL);
3241 vpushi(bits - trunc);
3242 gen_op(TOK_SAR);
3243 vpushi(trunc);
3244 gen_op(TOK_SHR);
3246 done:
3247 vtop->type = *type;
3248 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3251 /* return type size as known at compile time. Put alignment at 'a' */
3252 ST_FUNC int type_size(CType *type, int *a)
3254 Sym *s;
3255 int bt;
3257 bt = type->t & VT_BTYPE;
3258 if (bt == VT_STRUCT) {
3259 /* struct/union */
3260 s = type->ref;
3261 *a = s->r;
3262 return s->c;
3263 } else if (bt == VT_PTR) {
3264 if (type->t & VT_ARRAY) {
3265 int ts;
3267 s = type->ref;
3268 ts = type_size(&s->type, a);
3270 if (ts < 0 && s->c < 0)
3271 ts = -ts;
3273 return ts * s->c;
3274 } else {
3275 *a = PTR_SIZE;
3276 return PTR_SIZE;
3278 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3279 *a = 0;
3280 return -1; /* incomplete enum */
3281 } else if (bt == VT_LDOUBLE) {
3282 *a = LDOUBLE_ALIGN;
3283 return LDOUBLE_SIZE;
3284 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3285 #ifdef TCC_TARGET_I386
3286 #ifdef TCC_TARGET_PE
3287 *a = 8;
3288 #else
3289 *a = 4;
3290 #endif
3291 #elif defined(TCC_TARGET_ARM)
3292 #ifdef TCC_ARM_EABI
3293 *a = 8;
3294 #else
3295 *a = 4;
3296 #endif
3297 #else
3298 *a = 8;
3299 #endif
3300 return 8;
3301 } else if (bt == VT_INT || bt == VT_FLOAT) {
3302 *a = 4;
3303 return 4;
3304 } else if (bt == VT_SHORT) {
3305 *a = 2;
3306 return 2;
3307 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3308 *a = 8;
3309 return 16;
3310 } else {
3311 /* char, void, function, _Bool */
3312 *a = 1;
3313 return 1;
3317 /* push type size as known at runtime time on top of value stack. Put
3318 alignment at 'a' */
3319 static void vpush_type_size(CType *type, int *a)
3321 if (type->t & VT_VLA) {
3322 type_size(&type->ref->type, a);
3323 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3324 } else {
3325 int size = type_size(type, a);
3326 if (size < 0)
3327 tcc_error("unknown type size");
3328 #if PTR_SIZE == 8
3329 vpushll(size);
3330 #else
3331 vpushi(size);
3332 #endif
3336 /* return the pointed type of t */
3337 static inline CType *pointed_type(CType *type)
3339 return &type->ref->type;
3342 /* modify type so that its it is a pointer to type. */
3343 ST_FUNC void mk_pointer(CType *type)
3345 Sym *s;
3346 s = sym_push(SYM_FIELD, type, 0, -1);
3347 type->t = VT_PTR | (type->t & VT_STORAGE);
3348 type->ref = s;
3351 /* return true if type1 and type2 are exactly the same (including
3352 qualifiers).
3354 static int is_compatible_types(CType *type1, CType *type2)
3356 return compare_types(type1,type2,0);
3359 /* return true if type1 and type2 are the same (ignoring qualifiers).
3361 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3363 return compare_types(type1,type2,1);
3366 static void cast_error(CType *st, CType *dt)
3368 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3371 /* verify type compatibility to store vtop in 'dt' type */
3372 static void verify_assign_cast(CType *dt)
3374 CType *st, *type1, *type2;
3375 int dbt, sbt, qualwarn, lvl;
3377 st = &vtop->type; /* source type */
3378 dbt = dt->t & VT_BTYPE;
3379 sbt = st->t & VT_BTYPE;
3380 if (dt->t & VT_CONSTANT)
3381 tcc_warning("assignment of read-only location");
3382 switch(dbt) {
3383 case VT_VOID:
3384 if (sbt != dbt)
3385 tcc_error("assignment to void expression");
3386 break;
3387 case VT_PTR:
3388 /* special cases for pointers */
3389 /* '0' can also be a pointer */
3390 if (is_null_pointer(vtop))
3391 break;
3392 /* accept implicit pointer to integer cast with warning */
3393 if (is_integer_btype(sbt)) {
3394 tcc_warning("assignment makes pointer from integer without a cast");
3395 break;
3397 type1 = pointed_type(dt);
3398 if (sbt == VT_PTR)
3399 type2 = pointed_type(st);
3400 else if (sbt == VT_FUNC)
3401 type2 = st; /* a function is implicitly a function pointer */
3402 else
3403 goto error;
3404 if (is_compatible_types(type1, type2))
3405 break;
3406 for (qualwarn = lvl = 0;; ++lvl) {
3407 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3408 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3409 qualwarn = 1;
3410 dbt = type1->t & (VT_BTYPE|VT_LONG);
3411 sbt = type2->t & (VT_BTYPE|VT_LONG);
3412 if (dbt != VT_PTR || sbt != VT_PTR)
3413 break;
3414 type1 = pointed_type(type1);
3415 type2 = pointed_type(type2);
3417 if (!is_compatible_unqualified_types(type1, type2)) {
3418 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3419 /* void * can match anything */
3420 } else if (dbt == sbt
3421 && is_integer_btype(sbt & VT_BTYPE)
3422 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3423 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3424 /* Like GCC don't warn by default for merely changes
3425 in pointer target signedness. Do warn for different
3426 base types, though, in particular for unsigned enums
3427 and signed int targets. */
3428 } else {
3429 tcc_warning("assignment from incompatible pointer type");
3430 break;
3433 if (qualwarn)
3434 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3435 break;
3436 case VT_BYTE:
3437 case VT_SHORT:
3438 case VT_INT:
3439 case VT_LLONG:
3440 if (sbt == VT_PTR || sbt == VT_FUNC) {
3441 tcc_warning("assignment makes integer from pointer without a cast");
3442 } else if (sbt == VT_STRUCT) {
3443 goto case_VT_STRUCT;
3445 /* XXX: more tests */
3446 break;
3447 case VT_STRUCT:
3448 case_VT_STRUCT:
3449 if (!is_compatible_unqualified_types(dt, st)) {
3450 error:
3451 cast_error(st, dt);
3453 break;
3457 static void gen_assign_cast(CType *dt)
3459 verify_assign_cast(dt);
3460 gen_cast(dt);
3463 /* store vtop in lvalue pushed on stack */
3464 ST_FUNC void vstore(void)
3466 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3468 ft = vtop[-1].type.t;
3469 sbt = vtop->type.t & VT_BTYPE;
3470 dbt = ft & VT_BTYPE;
3472 verify_assign_cast(&vtop[-1].type);
3474 if (sbt == VT_STRUCT) {
3475 /* if structure, only generate pointer */
3476 /* structure assignment : generate memcpy */
3477 /* XXX: optimize if small size */
3478 size = type_size(&vtop->type, &align);
3480 /* destination */
3481 vswap();
3482 #ifdef CONFIG_TCC_BCHECK
3483 if (vtop->r & VT_MUSTBOUND)
3484 gbound(); /* check would be wrong after gaddrof() */
3485 #endif
3486 vtop->type.t = VT_PTR;
3487 gaddrof();
3489 /* address of memcpy() */
3490 #ifdef TCC_ARM_EABI
3491 if(!(align & 7))
3492 vpush_helper_func(TOK_memmove8);
3493 else if(!(align & 3))
3494 vpush_helper_func(TOK_memmove4);
3495 else
3496 #endif
3497 /* Use memmove, rather than memcpy, as dest and src may be same: */
3498 vpush_helper_func(TOK_memmove);
3500 vswap();
3501 /* source */
3502 vpushv(vtop - 2);
3503 #ifdef CONFIG_TCC_BCHECK
3504 if (vtop->r & VT_MUSTBOUND)
3505 gbound();
3506 #endif
3507 vtop->type.t = VT_PTR;
3508 gaddrof();
3509 /* type size */
3510 vpushi(size);
3511 gfunc_call(3);
3512 /* leave source on stack */
3514 } else if (ft & VT_BITFIELD) {
3515 /* bitfield store handling */
3517 /* save lvalue as expression result (example: s.b = s.a = n;) */
3518 vdup(), vtop[-1] = vtop[-2];
3520 bit_pos = BIT_POS(ft);
3521 bit_size = BIT_SIZE(ft);
3522 /* remove bit field info to avoid loops */
3523 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3525 if (dbt == VT_BOOL) {
3526 gen_cast(&vtop[-1].type);
3527 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3529 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3530 if (dbt != VT_BOOL) {
3531 gen_cast(&vtop[-1].type);
3532 dbt = vtop[-1].type.t & VT_BTYPE;
3534 if (r == VT_STRUCT) {
3535 store_packed_bf(bit_pos, bit_size);
3536 } else {
3537 unsigned long long mask = (1ULL << bit_size) - 1;
3538 if (dbt != VT_BOOL) {
3539 /* mask source */
3540 if (dbt == VT_LLONG)
3541 vpushll(mask);
3542 else
3543 vpushi((unsigned)mask);
3544 gen_op('&');
3546 /* shift source */
3547 vpushi(bit_pos);
3548 gen_op(TOK_SHL);
3549 vswap();
3550 /* duplicate destination */
3551 vdup();
3552 vrott(3);
3553 /* load destination, mask and or with source */
3554 if (dbt == VT_LLONG)
3555 vpushll(~(mask << bit_pos));
3556 else
3557 vpushi(~((unsigned)mask << bit_pos));
3558 gen_op('&');
3559 gen_op('|');
3560 /* store result */
3561 vstore();
3562 /* ... and discard */
3563 vpop();
3565 } else if (dbt == VT_VOID) {
3566 --vtop;
3567 } else {
3568 /* optimize char/short casts */
3569 delayed_cast = 0;
3570 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3571 && is_integer_btype(sbt)
3573 if ((vtop->r & VT_MUSTCAST)
3574 && btype_size(dbt) > btype_size(sbt)
3576 force_charshort_cast();
3577 delayed_cast = 1;
3578 } else {
3579 gen_cast(&vtop[-1].type);
3582 #ifdef CONFIG_TCC_BCHECK
3583 /* bound check case */
3584 if (vtop[-1].r & VT_MUSTBOUND) {
3585 vswap();
3586 gbound();
3587 vswap();
3589 #endif
3590 gv(RC_TYPE(dbt)); /* generate value */
3592 if (delayed_cast) {
3593 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3594 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3595 vtop->type.t = ft & VT_TYPE;
3598 /* if lvalue was saved on stack, must read it */
3599 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3600 SValue sv;
3601 r = get_reg(RC_INT);
3602 sv.type.t = VT_PTRDIFF_T;
3603 sv.r = VT_LOCAL | VT_LVAL;
3604 sv.c.i = vtop[-1].c.i;
3605 load(r, &sv);
3606 vtop[-1].r = r | VT_LVAL;
3609 r = vtop->r & VT_VALMASK;
3610 /* two word case handling :
3611 store second register at word + 4 (or +8 for x86-64) */
3612 if (USING_TWO_WORDS(dbt)) {
3613 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3614 vtop[-1].type.t = load_type;
3615 store(r, vtop - 1);
3616 vswap();
3617 /* convert to int to increment easily */
3618 vtop->type.t = VT_PTRDIFF_T;
3619 gaddrof();
3620 vpushs(PTR_SIZE);
3621 gen_op('+');
3622 vtop->r |= VT_LVAL;
3623 vswap();
3624 vtop[-1].type.t = load_type;
3625 /* XXX: it works because r2 is spilled last ! */
3626 store(vtop->r2, vtop - 1);
3627 } else {
3628 /* single word */
3629 store(r, vtop - 1);
3631 vswap();
3632 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3636 /* post defines POST/PRE add. c is the token ++ or -- */
3637 ST_FUNC void inc(int post, int c)
3639 test_lvalue();
3640 vdup(); /* save lvalue */
3641 if (post) {
3642 gv_dup(); /* duplicate value */
3643 vrotb(3);
3644 vrotb(3);
3646 /* add constant */
3647 vpushi(c - TOK_MID);
3648 gen_op('+');
3649 vstore(); /* store value */
3650 if (post)
3651 vpop(); /* if post op, return saved value */
3654 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3656 /* read the string */
3657 if (tok != TOK_STR)
3658 expect(msg);
3659 cstr_new(astr);
3660 while (tok == TOK_STR) {
3661 /* XXX: add \0 handling too ? */
3662 cstr_cat(astr, tokc.str.data, -1);
3663 next();
3665 cstr_ccat(astr, '\0');
3668 /* If I is >= 1 and a power of two, returns log2(i)+1.
3669 If I is 0 returns 0. */
3670 ST_FUNC int exact_log2p1(int i)
3672 int ret;
3673 if (!i)
3674 return 0;
3675 for (ret = 1; i >= 1 << 8; ret += 8)
3676 i >>= 8;
3677 if (i >= 1 << 4)
3678 ret += 4, i >>= 4;
3679 if (i >= 1 << 2)
3680 ret += 2, i >>= 2;
3681 if (i >= 1 << 1)
3682 ret++;
3683 return ret;
3686 /* Parse __attribute__((...)) GNUC extension. */
3687 static void parse_attribute(AttributeDef *ad)
3689 int t, n;
3690 CString astr;
3692 redo:
3693 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3694 return;
3695 next();
3696 skip('(');
3697 skip('(');
3698 while (tok != ')') {
3699 if (tok < TOK_IDENT)
3700 expect("attribute name");
3701 t = tok;
3702 next();
3703 switch(t) {
3704 case TOK_CLEANUP1:
3705 case TOK_CLEANUP2:
3707 Sym *s;
3709 skip('(');
3710 s = sym_find(tok);
3711 if (!s) {
3712 tcc_warning_c(warn_implicit_function_declaration)(
3713 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3714 s = external_global_sym(tok, &func_old_type);
3715 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3716 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3717 ad->cleanup_func = s;
3718 next();
3719 skip(')');
3720 break;
3722 case TOK_CONSTRUCTOR1:
3723 case TOK_CONSTRUCTOR2:
3724 ad->f.func_ctor = 1;
3725 break;
3726 case TOK_DESTRUCTOR1:
3727 case TOK_DESTRUCTOR2:
3728 ad->f.func_dtor = 1;
3729 break;
3730 case TOK_ALWAYS_INLINE1:
3731 case TOK_ALWAYS_INLINE2:
3732 ad->f.func_alwinl = 1;
3733 break;
3734 case TOK_SECTION1:
3735 case TOK_SECTION2:
3736 skip('(');
3737 parse_mult_str(&astr, "section name");
3738 ad->section = find_section(tcc_state, (char *)astr.data);
3739 skip(')');
3740 cstr_free(&astr);
3741 break;
3742 case TOK_ALIAS1:
3743 case TOK_ALIAS2:
3744 skip('(');
3745 parse_mult_str(&astr, "alias(\"target\")");
3746 ad->alias_target = /* save string as token, for later */
3747 tok_alloc((char*)astr.data, astr.size-1)->tok;
3748 skip(')');
3749 cstr_free(&astr);
3750 break;
3751 case TOK_VISIBILITY1:
3752 case TOK_VISIBILITY2:
3753 skip('(');
3754 parse_mult_str(&astr,
3755 "visibility(\"default|hidden|internal|protected\")");
3756 if (!strcmp (astr.data, "default"))
3757 ad->a.visibility = STV_DEFAULT;
3758 else if (!strcmp (astr.data, "hidden"))
3759 ad->a.visibility = STV_HIDDEN;
3760 else if (!strcmp (astr.data, "internal"))
3761 ad->a.visibility = STV_INTERNAL;
3762 else if (!strcmp (astr.data, "protected"))
3763 ad->a.visibility = STV_PROTECTED;
3764 else
3765 expect("visibility(\"default|hidden|internal|protected\")");
3766 skip(')');
3767 cstr_free(&astr);
3768 break;
3769 case TOK_ALIGNED1:
3770 case TOK_ALIGNED2:
3771 if (tok == '(') {
3772 next();
3773 n = expr_const();
3774 if (n <= 0 || (n & (n - 1)) != 0)
3775 tcc_error("alignment must be a positive power of two");
3776 skip(')');
3777 } else {
3778 n = MAX_ALIGN;
3780 ad->a.aligned = exact_log2p1(n);
3781 if (n != 1 << (ad->a.aligned - 1))
3782 tcc_error("alignment of %d is larger than implemented", n);
3783 break;
3784 case TOK_PACKED1:
3785 case TOK_PACKED2:
3786 ad->a.packed = 1;
3787 break;
3788 case TOK_WEAK1:
3789 case TOK_WEAK2:
3790 ad->a.weak = 1;
3791 break;
3792 case TOK_UNUSED1:
3793 case TOK_UNUSED2:
3794 /* currently, no need to handle it because tcc does not
3795 track unused objects */
3796 break;
3797 case TOK_NORETURN1:
3798 case TOK_NORETURN2:
3799 ad->f.func_noreturn = 1;
3800 break;
3801 case TOK_CDECL1:
3802 case TOK_CDECL2:
3803 case TOK_CDECL3:
3804 ad->f.func_call = FUNC_CDECL;
3805 break;
3806 case TOK_STDCALL1:
3807 case TOK_STDCALL2:
3808 case TOK_STDCALL3:
3809 ad->f.func_call = FUNC_STDCALL;
3810 break;
3811 #ifdef TCC_TARGET_I386
3812 case TOK_REGPARM1:
3813 case TOK_REGPARM2:
3814 skip('(');
3815 n = expr_const();
3816 if (n > 3)
3817 n = 3;
3818 else if (n < 0)
3819 n = 0;
3820 if (n > 0)
3821 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3822 skip(')');
3823 break;
3824 case TOK_FASTCALL1:
3825 case TOK_FASTCALL2:
3826 case TOK_FASTCALL3:
3827 ad->f.func_call = FUNC_FASTCALLW;
3828 break;
3829 #endif
3830 case TOK_MODE:
3831 skip('(');
3832 switch(tok) {
3833 case TOK_MODE_DI:
3834 ad->attr_mode = VT_LLONG + 1;
3835 break;
3836 case TOK_MODE_QI:
3837 ad->attr_mode = VT_BYTE + 1;
3838 break;
3839 case TOK_MODE_HI:
3840 ad->attr_mode = VT_SHORT + 1;
3841 break;
3842 case TOK_MODE_SI:
3843 case TOK_MODE_word:
3844 ad->attr_mode = VT_INT + 1;
3845 break;
3846 default:
3847 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3848 break;
3850 next();
3851 skip(')');
3852 break;
3853 case TOK_DLLEXPORT:
3854 ad->a.dllexport = 1;
3855 break;
3856 case TOK_NODECORATE:
3857 ad->a.nodecorate = 1;
3858 break;
3859 case TOK_DLLIMPORT:
3860 ad->a.dllimport = 1;
3861 break;
3862 default:
3863 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3864 /* skip parameters */
3865 if (tok == '(') {
3866 int parenthesis = 0;
3867 do {
3868 if (tok == '(')
3869 parenthesis++;
3870 else if (tok == ')')
3871 parenthesis--;
3872 next();
3873 } while (parenthesis && tok != -1);
3875 break;
3877 if (tok != ',')
3878 break;
3879 next();
3881 skip(')');
3882 skip(')');
3883 goto redo;
3886 static Sym * find_field (CType *type, int v, int *cumofs)
3888 Sym *s = type->ref;
3889 v |= SYM_FIELD;
3890 while ((s = s->next) != NULL) {
3891 if ((s->v & SYM_FIELD) &&
3892 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3893 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3894 Sym *ret = find_field (&s->type, v, cumofs);
3895 if (ret) {
3896 *cumofs += s->c;
3897 return ret;
3900 if (s->v == v)
3901 break;
3903 return s;
3906 static void check_fields (CType *type, int check)
3908 Sym *s = type->ref;
3910 while ((s = s->next) != NULL) {
3911 int v = s->v & ~SYM_FIELD;
3912 if (v < SYM_FIRST_ANOM) {
3913 TokenSym *ts = table_ident[v - TOK_IDENT];
3914 if (check && (ts->tok & SYM_FIELD))
3915 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3916 ts->tok ^= SYM_FIELD;
3917 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3918 check_fields (&s->type, check);
3922 static void struct_layout(CType *type, AttributeDef *ad)
3924 int size, align, maxalign, offset, c, bit_pos, bit_size;
3925 int packed, a, bt, prevbt, prev_bit_size;
3926 int pcc = !tcc_state->ms_bitfields;
3927 int pragma_pack = *tcc_state->pack_stack_ptr;
3928 Sym *f;
3930 maxalign = 1;
3931 offset = 0;
3932 c = 0;
3933 bit_pos = 0;
3934 prevbt = VT_STRUCT; /* make it never match */
3935 prev_bit_size = 0;
3937 //#define BF_DEBUG
3939 for (f = type->ref->next; f; f = f->next) {
3940 if (f->type.t & VT_BITFIELD)
3941 bit_size = BIT_SIZE(f->type.t);
3942 else
3943 bit_size = -1;
3944 size = type_size(&f->type, &align);
3945 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3946 packed = 0;
3948 if (pcc && bit_size == 0) {
3949 /* in pcc mode, packing does not affect zero-width bitfields */
3951 } else {
3952 /* in pcc mode, attribute packed overrides if set. */
3953 if (pcc && (f->a.packed || ad->a.packed))
3954 align = packed = 1;
3956 /* pragma pack overrides align if lesser and packs bitfields always */
3957 if (pragma_pack) {
3958 packed = 1;
3959 if (pragma_pack < align)
3960 align = pragma_pack;
3961 /* in pcc mode pragma pack also overrides individual align */
3962 if (pcc && pragma_pack < a)
3963 a = 0;
3966 /* some individual align was specified */
3967 if (a)
3968 align = a;
3970 if (type->ref->type.t == VT_UNION) {
3971 if (pcc && bit_size >= 0)
3972 size = (bit_size + 7) >> 3;
3973 offset = 0;
3974 if (size > c)
3975 c = size;
3977 } else if (bit_size < 0) {
3978 if (pcc)
3979 c += (bit_pos + 7) >> 3;
3980 c = (c + align - 1) & -align;
3981 offset = c;
3982 if (size > 0)
3983 c += size;
3984 bit_pos = 0;
3985 prevbt = VT_STRUCT;
3986 prev_bit_size = 0;
3988 } else {
3989 /* A bit-field. Layout is more complicated. There are two
3990 options: PCC (GCC) compatible and MS compatible */
3991 if (pcc) {
3992 /* In PCC layout a bit-field is placed adjacent to the
3993 preceding bit-fields, except if:
3994 - it has zero-width
3995 - an individual alignment was given
3996 - it would overflow its base type container and
3997 there is no packing */
3998 if (bit_size == 0) {
3999 new_field:
4000 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4001 bit_pos = 0;
4002 } else if (f->a.aligned) {
4003 goto new_field;
4004 } else if (!packed) {
4005 int a8 = align * 8;
4006 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4007 if (ofs > size / align)
4008 goto new_field;
4011 /* in pcc mode, long long bitfields have type int if they fit */
4012 if (size == 8 && bit_size <= 32)
4013 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4015 while (bit_pos >= align * 8)
4016 c += align, bit_pos -= align * 8;
4017 offset = c;
4019 /* In PCC layout named bit-fields influence the alignment
4020 of the containing struct using the base types alignment,
4021 except for packed fields (which here have correct align). */
4022 if (f->v & SYM_FIRST_ANOM
4023 // && bit_size // ??? gcc on ARM/rpi does that
4025 align = 1;
4027 } else {
4028 bt = f->type.t & VT_BTYPE;
4029 if ((bit_pos + bit_size > size * 8)
4030 || (bit_size > 0) == (bt != prevbt)
4032 c = (c + align - 1) & -align;
4033 offset = c;
4034 bit_pos = 0;
4035 /* In MS bitfield mode a bit-field run always uses
4036 at least as many bits as the underlying type.
4037 To start a new run it's also required that this
4038 or the last bit-field had non-zero width. */
4039 if (bit_size || prev_bit_size)
4040 c += size;
4042 /* In MS layout the records alignment is normally
4043 influenced by the field, except for a zero-width
4044 field at the start of a run (but by further zero-width
4045 fields it is again). */
4046 if (bit_size == 0 && prevbt != bt)
4047 align = 1;
4048 prevbt = bt;
4049 prev_bit_size = bit_size;
4052 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4053 | (bit_pos << VT_STRUCT_SHIFT);
4054 bit_pos += bit_size;
4056 if (align > maxalign)
4057 maxalign = align;
4059 #ifdef BF_DEBUG
4060 printf("set field %s offset %-2d size %-2d align %-2d",
4061 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4062 if (f->type.t & VT_BITFIELD) {
4063 printf(" pos %-2d bits %-2d",
4064 BIT_POS(f->type.t),
4065 BIT_SIZE(f->type.t)
4068 printf("\n");
4069 #endif
4071 f->c = offset;
4072 f->r = 0;
4075 if (pcc)
4076 c += (bit_pos + 7) >> 3;
4078 /* store size and alignment */
4079 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4080 if (a < maxalign)
4081 a = maxalign;
4082 type->ref->r = a;
4083 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4084 /* can happen if individual align for some member was given. In
4085 this case MSVC ignores maxalign when aligning the size */
4086 a = pragma_pack;
4087 if (a < bt)
4088 a = bt;
4090 c = (c + a - 1) & -a;
4091 type->ref->c = c;
4093 #ifdef BF_DEBUG
4094 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4095 #endif
4097 /* check whether we can access bitfields by their type */
4098 for (f = type->ref->next; f; f = f->next) {
4099 int s, px, cx, c0;
4100 CType t;
4102 if (0 == (f->type.t & VT_BITFIELD))
4103 continue;
4104 f->type.ref = f;
4105 f->auxtype = -1;
4106 bit_size = BIT_SIZE(f->type.t);
4107 if (bit_size == 0)
4108 continue;
4109 bit_pos = BIT_POS(f->type.t);
4110 size = type_size(&f->type, &align);
4112 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4113 #ifdef TCC_TARGET_ARM
4114 && !(f->c & (align - 1))
4115 #endif
4117 continue;
4119 /* try to access the field using a different type */
4120 c0 = -1, s = align = 1;
4121 t.t = VT_BYTE;
4122 for (;;) {
4123 px = f->c * 8 + bit_pos;
4124 cx = (px >> 3) & -align;
4125 px = px - (cx << 3);
4126 if (c0 == cx)
4127 break;
4128 s = (px + bit_size + 7) >> 3;
4129 if (s > 4) {
4130 t.t = VT_LLONG;
4131 } else if (s > 2) {
4132 t.t = VT_INT;
4133 } else if (s > 1) {
4134 t.t = VT_SHORT;
4135 } else {
4136 t.t = VT_BYTE;
4138 s = type_size(&t, &align);
4139 c0 = cx;
4142 if (px + bit_size <= s * 8 && cx + s <= c
4143 #ifdef TCC_TARGET_ARM
4144 && !(cx & (align - 1))
4145 #endif
4147 /* update offset and bit position */
4148 f->c = cx;
4149 bit_pos = px;
4150 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4151 | (bit_pos << VT_STRUCT_SHIFT);
4152 if (s != size)
4153 f->auxtype = t.t;
4154 #ifdef BF_DEBUG
4155 printf("FIX field %s offset %-2d size %-2d align %-2d "
4156 "pos %-2d bits %-2d\n",
4157 get_tok_str(f->v & ~SYM_FIELD, NULL),
4158 cx, s, align, px, bit_size);
4159 #endif
4160 } else {
4161 /* fall back to load/store single-byte wise */
4162 f->auxtype = VT_STRUCT;
4163 #ifdef BF_DEBUG
4164 printf("FIX field %s : load byte-wise\n",
4165 get_tok_str(f->v & ~SYM_FIELD, NULL));
4166 #endif
4171 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4172 static void struct_decl(CType *type, int u)
4174 int v, c, size, align, flexible;
4175 int bit_size, bsize, bt;
4176 Sym *s, *ss, **ps;
4177 AttributeDef ad, ad1;
4178 CType type1, btype;
4180 memset(&ad, 0, sizeof ad);
4181 next();
4182 parse_attribute(&ad);
4183 if (tok != '{') {
4184 v = tok;
4185 next();
4186 /* struct already defined ? return it */
4187 if (v < TOK_IDENT)
4188 expect("struct/union/enum name");
4189 s = struct_find(v);
4190 if (s && (s->sym_scope == local_scope || tok != '{')) {
4191 if (u == s->type.t)
4192 goto do_decl;
4193 if (u == VT_ENUM && IS_ENUM(s->type.t))
4194 goto do_decl;
4195 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4197 } else {
4198 v = anon_sym++;
4200 /* Record the original enum/struct/union token. */
4201 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4202 type1.ref = NULL;
4203 /* we put an undefined size for struct/union */
4204 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4205 s->r = 0; /* default alignment is zero as gcc */
4206 do_decl:
4207 type->t = s->type.t;
4208 type->ref = s;
4210 if (tok == '{') {
4211 next();
4212 if (s->c != -1)
4213 tcc_error("struct/union/enum already defined");
4214 s->c = -2;
4215 /* cannot be empty */
4216 /* non empty enums are not allowed */
4217 ps = &s->next;
4218 if (u == VT_ENUM) {
4219 long long ll = 0, pl = 0, nl = 0;
4220 CType t;
4221 t.ref = s;
4222 /* enum symbols have static storage */
4223 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4224 for(;;) {
4225 v = tok;
4226 if (v < TOK_UIDENT)
4227 expect("identifier");
4228 ss = sym_find(v);
4229 if (ss && !local_stack)
4230 tcc_error("redefinition of enumerator '%s'",
4231 get_tok_str(v, NULL));
4232 next();
4233 if (tok == '=') {
4234 next();
4235 ll = expr_const64();
4237 ss = sym_push(v, &t, VT_CONST, 0);
4238 ss->enum_val = ll;
4239 *ps = ss, ps = &ss->next;
4240 if (ll < nl)
4241 nl = ll;
4242 if (ll > pl)
4243 pl = ll;
4244 if (tok != ',')
4245 break;
4246 next();
4247 ll++;
4248 /* NOTE: we accept a trailing comma */
4249 if (tok == '}')
4250 break;
4252 skip('}');
4253 /* set integral type of the enum */
4254 t.t = VT_INT;
4255 if (nl >= 0) {
4256 if (pl != (unsigned)pl)
4257 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4258 t.t |= VT_UNSIGNED;
4259 } else if (pl != (int)pl || nl != (int)nl)
4260 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4261 s->type.t = type->t = t.t | VT_ENUM;
4262 s->c = 0;
4263 /* set type for enum members */
4264 for (ss = s->next; ss; ss = ss->next) {
4265 ll = ss->enum_val;
4266 if (ll == (int)ll) /* default is int if it fits */
4267 continue;
4268 if (t.t & VT_UNSIGNED) {
4269 ss->type.t |= VT_UNSIGNED;
4270 if (ll == (unsigned)ll)
4271 continue;
4273 ss->type.t = (ss->type.t & ~VT_BTYPE)
4274 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4276 } else {
4277 c = 0;
4278 flexible = 0;
4279 while (tok != '}') {
4280 if (!parse_btype(&btype, &ad1)) {
4281 skip(';');
4282 continue;
4284 while (1) {
4285 if (flexible)
4286 tcc_error("flexible array member '%s' not at the end of struct",
4287 get_tok_str(v, NULL));
4288 bit_size = -1;
4289 v = 0;
4290 type1 = btype;
4291 if (tok != ':') {
4292 if (tok != ';')
4293 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4294 if (v == 0) {
4295 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4296 expect("identifier");
4297 else {
4298 int v = btype.ref->v;
4299 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4300 if (tcc_state->ms_extensions == 0)
4301 expect("identifier");
4305 if (type_size(&type1, &align) < 0) {
4306 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4307 flexible = 1;
4308 else
4309 tcc_error("field '%s' has incomplete type",
4310 get_tok_str(v, NULL));
4312 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4313 (type1.t & VT_BTYPE) == VT_VOID ||
4314 (type1.t & VT_STORAGE))
4315 tcc_error("invalid type for '%s'",
4316 get_tok_str(v, NULL));
4318 if (tok == ':') {
4319 next();
4320 bit_size = expr_const();
4321 /* XXX: handle v = 0 case for messages */
4322 if (bit_size < 0)
4323 tcc_error("negative width in bit-field '%s'",
4324 get_tok_str(v, NULL));
4325 if (v && bit_size == 0)
4326 tcc_error("zero width for bit-field '%s'",
4327 get_tok_str(v, NULL));
4328 parse_attribute(&ad1);
4330 size = type_size(&type1, &align);
4331 if (bit_size >= 0) {
4332 bt = type1.t & VT_BTYPE;
4333 if (bt != VT_INT &&
4334 bt != VT_BYTE &&
4335 bt != VT_SHORT &&
4336 bt != VT_BOOL &&
4337 bt != VT_LLONG)
4338 tcc_error("bitfields must have scalar type");
4339 bsize = size * 8;
4340 if (bit_size > bsize) {
4341 tcc_error("width of '%s' exceeds its type",
4342 get_tok_str(v, NULL));
4343 } else if (bit_size == bsize
4344 && !ad.a.packed && !ad1.a.packed) {
4345 /* no need for bit fields */
4347 } else if (bit_size == 64) {
4348 tcc_error("field width 64 not implemented");
4349 } else {
4350 type1.t = (type1.t & ~VT_STRUCT_MASK)
4351 | VT_BITFIELD
4352 | (bit_size << (VT_STRUCT_SHIFT + 6));
4355 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4356 /* Remember we've seen a real field to check
4357 for placement of flexible array member. */
4358 c = 1;
4360 /* If member is a struct or bit-field, enforce
4361 placing into the struct (as anonymous). */
4362 if (v == 0 &&
4363 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4364 bit_size >= 0)) {
4365 v = anon_sym++;
4367 if (v) {
4368 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4369 ss->a = ad1.a;
4370 *ps = ss;
4371 ps = &ss->next;
4373 if (tok == ';' || tok == TOK_EOF)
4374 break;
4375 skip(',');
4377 skip(';');
4379 skip('}');
4380 parse_attribute(&ad);
4381 if (ad.cleanup_func) {
4382 tcc_warning("attribute '__cleanup__' ignored on type");
4384 check_fields(type, 1);
4385 check_fields(type, 0);
4386 struct_layout(type, &ad);
4387 if (debug_modes)
4388 tcc_debug_fix_anon(tcc_state, type);
4393 static void sym_to_attr(AttributeDef *ad, Sym *s)
4395 merge_symattr(&ad->a, &s->a);
4396 merge_funcattr(&ad->f, &s->f);
4399 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4400 are added to the element type, copied because it could be a typedef. */
4401 static void parse_btype_qualify(CType *type, int qualifiers)
4403 while (type->t & VT_ARRAY) {
4404 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4405 type = &type->ref->type;
4407 type->t |= qualifiers;
4410 /* return 0 if no type declaration. otherwise, return the basic type
4411 and skip it.
4413 static int parse_btype(CType *type, AttributeDef *ad)
4415 int t, u, bt, st, type_found, typespec_found, g, n;
4416 Sym *s;
4417 CType type1;
4419 memset(ad, 0, sizeof(AttributeDef));
4420 type_found = 0;
4421 typespec_found = 0;
4422 t = VT_INT;
4423 bt = st = -1;
4424 type->ref = NULL;
4426 while(1) {
4427 switch(tok) {
4428 case TOK_EXTENSION:
4429 /* currently, we really ignore extension */
4430 next();
4431 continue;
4433 /* basic types */
4434 case TOK_CHAR:
4435 u = VT_BYTE;
4436 basic_type:
4437 next();
4438 basic_type1:
4439 if (u == VT_SHORT || u == VT_LONG) {
4440 if (st != -1 || (bt != -1 && bt != VT_INT))
4441 tmbt: tcc_error("too many basic types");
4442 st = u;
4443 } else {
4444 if (bt != -1 || (st != -1 && u != VT_INT))
4445 goto tmbt;
4446 bt = u;
4448 if (u != VT_INT)
4449 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4450 typespec_found = 1;
4451 break;
4452 case TOK_VOID:
4453 u = VT_VOID;
4454 goto basic_type;
4455 case TOK_SHORT:
4456 u = VT_SHORT;
4457 goto basic_type;
4458 case TOK_INT:
4459 u = VT_INT;
4460 goto basic_type;
4461 case TOK_ALIGNAS:
4462 { int n;
4463 AttributeDef ad1;
4464 next();
4465 skip('(');
4466 memset(&ad1, 0, sizeof(AttributeDef));
4467 if (parse_btype(&type1, &ad1)) {
4468 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4469 if (ad1.a.aligned)
4470 n = 1 << (ad1.a.aligned - 1);
4471 else
4472 type_size(&type1, &n);
4473 } else {
4474 n = expr_const();
4475 if (n <= 0 || (n & (n - 1)) != 0)
4476 tcc_error("alignment must be a positive power of two");
4478 skip(')');
4479 ad->a.aligned = exact_log2p1(n);
4481 continue;
4482 case TOK_LONG:
4483 if ((t & VT_BTYPE) == VT_DOUBLE) {
4484 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4485 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4486 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4487 } else {
4488 u = VT_LONG;
4489 goto basic_type;
4491 next();
4492 break;
4493 #ifdef TCC_TARGET_ARM64
4494 case TOK_UINT128:
4495 /* GCC's __uint128_t appears in some Linux header files. Make it a
4496 synonym for long double to get the size and alignment right. */
4497 u = VT_LDOUBLE;
4498 goto basic_type;
4499 #endif
4500 case TOK_BOOL:
4501 u = VT_BOOL;
4502 goto basic_type;
4503 case TOK_FLOAT:
4504 u = VT_FLOAT;
4505 goto basic_type;
4506 case TOK_DOUBLE:
4507 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4508 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4509 } else {
4510 u = VT_DOUBLE;
4511 goto basic_type;
4513 next();
4514 break;
4515 case TOK_ENUM:
4516 struct_decl(&type1, VT_ENUM);
4517 basic_type2:
4518 u = type1.t;
4519 type->ref = type1.ref;
4520 goto basic_type1;
4521 case TOK_STRUCT:
4522 struct_decl(&type1, VT_STRUCT);
4523 goto basic_type2;
4524 case TOK_UNION:
4525 struct_decl(&type1, VT_UNION);
4526 goto basic_type2;
4528 /* type modifiers */
4529 case TOK__Atomic:
4530 next();
4531 type->t = t;
4532 parse_btype_qualify(type, VT_ATOMIC);
4533 t = type->t;
4534 if (tok == '(') {
4535 parse_expr_type(&type1);
4536 /* remove all storage modifiers except typedef */
4537 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4538 if (type1.ref)
4539 sym_to_attr(ad, type1.ref);
4540 goto basic_type2;
4542 break;
4543 case TOK_CONST1:
4544 case TOK_CONST2:
4545 case TOK_CONST3:
4546 type->t = t;
4547 parse_btype_qualify(type, VT_CONSTANT);
4548 t = type->t;
4549 next();
4550 break;
4551 case TOK_VOLATILE1:
4552 case TOK_VOLATILE2:
4553 case TOK_VOLATILE3:
4554 type->t = t;
4555 parse_btype_qualify(type, VT_VOLATILE);
4556 t = type->t;
4557 next();
4558 break;
4559 case TOK_SIGNED1:
4560 case TOK_SIGNED2:
4561 case TOK_SIGNED3:
4562 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4563 tcc_error("signed and unsigned modifier");
4564 t |= VT_DEFSIGN;
4565 next();
4566 typespec_found = 1;
4567 break;
4568 case TOK_REGISTER:
4569 case TOK_AUTO:
4570 case TOK_RESTRICT1:
4571 case TOK_RESTRICT2:
4572 case TOK_RESTRICT3:
4573 next();
4574 break;
4575 case TOK_UNSIGNED:
4576 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4577 tcc_error("signed and unsigned modifier");
4578 t |= VT_DEFSIGN | VT_UNSIGNED;
4579 next();
4580 typespec_found = 1;
4581 break;
4583 /* storage */
4584 case TOK_EXTERN:
4585 g = VT_EXTERN;
4586 goto storage;
4587 case TOK_STATIC:
4588 g = VT_STATIC;
4589 goto storage;
4590 case TOK_TYPEDEF:
4591 g = VT_TYPEDEF;
4592 goto storage;
4593 storage:
4594 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4595 tcc_error("multiple storage classes");
4596 t |= g;
4597 next();
4598 break;
4599 case TOK_INLINE1:
4600 case TOK_INLINE2:
4601 case TOK_INLINE3:
4602 t |= VT_INLINE;
4603 next();
4604 break;
4605 case TOK_NORETURN3:
4606 next();
4607 ad->f.func_noreturn = 1;
4608 break;
4609 /* GNUC attribute */
4610 case TOK_ATTRIBUTE1:
4611 case TOK_ATTRIBUTE2:
4612 parse_attribute(ad);
4613 if (ad->attr_mode) {
4614 u = ad->attr_mode -1;
4615 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4617 continue;
4618 /* GNUC typeof */
4619 case TOK_TYPEOF1:
4620 case TOK_TYPEOF2:
4621 case TOK_TYPEOF3:
4622 next();
4623 parse_expr_type(&type1);
4624 /* remove all storage modifiers except typedef */
4625 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4626 if (type1.ref)
4627 sym_to_attr(ad, type1.ref);
4628 goto basic_type2;
4629 default:
4630 if (typespec_found)
4631 goto the_end;
4632 s = sym_find(tok);
4633 if (!s || !(s->type.t & VT_TYPEDEF))
4634 goto the_end;
4636 n = tok, next();
4637 if (tok == ':' && !in_generic) {
4638 /* ignore if it's a label */
4639 unget_tok(n);
4640 goto the_end;
4643 t &= ~(VT_BTYPE|VT_LONG);
4644 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4645 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4646 type->ref = s->type.ref;
4647 if (t)
4648 parse_btype_qualify(type, t);
4649 t = type->t;
4650 /* get attributes from typedef */
4651 sym_to_attr(ad, s);
4652 typespec_found = 1;
4653 st = bt = -2;
4654 break;
4656 type_found = 1;
4658 the_end:
4659 if (tcc_state->char_is_unsigned) {
4660 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4661 t |= VT_UNSIGNED;
4663 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4664 bt = t & (VT_BTYPE|VT_LONG);
4665 if (bt == VT_LONG)
4666 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4667 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4668 if (bt == VT_LDOUBLE)
4669 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4670 #endif
4671 type->t = t;
4672 return type_found;
4675 /* convert a function parameter type (array to pointer and function to
4676 function pointer) */
4677 static inline void convert_parameter_type(CType *pt)
4679 /* remove const and volatile qualifiers (XXX: const could be used
4680 to indicate a const function parameter */
4681 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4682 /* array must be transformed to pointer according to ANSI C */
4683 pt->t &= ~VT_ARRAY;
4684 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4685 mk_pointer(pt);
4689 ST_FUNC void parse_asm_str(CString *astr)
4691 skip('(');
4692 parse_mult_str(astr, "string constant");
4695 /* Parse an asm label and return the token */
4696 static int asm_label_instr(void)
4698 int v;
4699 CString astr;
4701 next();
4702 parse_asm_str(&astr);
4703 skip(')');
4704 #ifdef ASM_DEBUG
4705 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4706 #endif
4707 v = tok_alloc(astr.data, astr.size - 1)->tok;
4708 cstr_free(&astr);
4709 return v;
4712 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4714 int n, l, t1, arg_size, align, unused_align;
4715 Sym **plast, *s, *first;
4716 AttributeDef ad1;
4717 CType pt;
4718 TokenString *vla_array_tok = NULL;
4719 int *vla_array_str = NULL;
4721 if (tok == '(') {
4722 /* function type, or recursive declarator (return if so) */
4723 next();
4724 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4725 return 0;
4726 if (tok == ')')
4727 l = 0;
4728 else if (parse_btype(&pt, &ad1))
4729 l = FUNC_NEW;
4730 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4731 merge_attr (ad, &ad1);
4732 return 0;
4733 } else
4734 l = FUNC_OLD;
4736 first = NULL;
4737 plast = &first;
4738 arg_size = 0;
4739 ++local_scope;
4740 if (l) {
4741 for(;;) {
4742 /* read param name and compute offset */
4743 if (l != FUNC_OLD) {
4744 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4745 break;
4746 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4747 if ((pt.t & VT_BTYPE) == VT_VOID)
4748 tcc_error("parameter declared as void");
4749 if (n == 0)
4750 n = SYM_FIELD;
4751 } else {
4752 n = tok;
4753 pt.t = VT_VOID; /* invalid type */
4754 pt.ref = NULL;
4755 next();
4757 if (n < TOK_UIDENT)
4758 expect("identifier");
4759 convert_parameter_type(&pt);
4760 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4761 s = sym_push(n, &pt, 0, 0);
4762 *plast = s;
4763 plast = &s->next;
4764 if (tok == ')')
4765 break;
4766 skip(',');
4767 if (l == FUNC_NEW && tok == TOK_DOTS) {
4768 l = FUNC_ELLIPSIS;
4769 next();
4770 break;
4772 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4773 tcc_error("invalid type");
4775 } else
4776 /* if no parameters, then old type prototype */
4777 l = FUNC_OLD;
4778 skip(')');
4779 /* remove parameter symbols from token table, keep on stack */
4780 if (first) {
4781 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4782 for (s = first; s; s = s->next)
4783 s->v |= SYM_FIELD;
4785 --local_scope;
4786 /* NOTE: const is ignored in returned type as it has a special
4787 meaning in gcc / C++ */
4788 type->t &= ~VT_CONSTANT;
4789 /* some ancient pre-K&R C allows a function to return an array
4790 and the array brackets to be put after the arguments, such
4791 that "int c()[]" means something like "int[] c()" */
4792 if (tok == '[') {
4793 next();
4794 skip(']'); /* only handle simple "[]" */
4795 mk_pointer(type);
4797 /* we push a anonymous symbol which will contain the function prototype */
4798 ad->f.func_args = arg_size;
4799 ad->f.func_type = l;
4800 s = sym_push(SYM_FIELD, type, 0, 0);
4801 s->a = ad->a;
4802 s->f = ad->f;
4803 s->next = first;
4804 type->t = VT_FUNC;
4805 type->ref = s;
4806 } else if (tok == '[') {
4807 int saved_nocode_wanted = nocode_wanted;
4808 /* array definition */
4809 next();
4810 n = -1;
4811 t1 = 0;
4812 if (td & TYPE_PARAM) while (1) {
4813 /* XXX The optional type-quals and static should only be accepted
4814 in parameter decls. The '*' as well, and then even only
4815 in prototypes (not function defs). */
4816 switch (tok) {
4817 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4818 case TOK_CONST1:
4819 case TOK_VOLATILE1:
4820 case TOK_STATIC:
4821 case '*':
4822 next();
4823 continue;
4824 default:
4825 break;
4827 if (tok != ']') {
4828 int nest = 1;
4830 /* Code generation is not done now but has to be done
4831 at start of function. Save code here for later use. */
4832 nocode_wanted = 1;
4833 vla_array_tok = tok_str_alloc();
4834 for (;;) {
4835 if (tok == ']') {
4836 nest--;
4837 if (nest == 0)
4838 break;
4840 if (tok == '[')
4841 nest++;
4842 tok_str_add_tok(vla_array_tok);
4843 next();
4845 unget_tok(0);
4846 tok_str_add(vla_array_tok, -1);
4847 tok_str_add(vla_array_tok, 0);
4848 vla_array_str = vla_array_tok->str;
4849 begin_macro(vla_array_tok, 2);
4850 next();
4851 gexpr();
4852 end_macro();
4853 next();
4854 goto check;
4856 break;
4858 } else if (tok != ']') {
4859 if (!local_stack || (storage & VT_STATIC))
4860 vpushi(expr_const());
4861 else {
4862 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4863 length must always be evaluated, even under nocode_wanted,
4864 so that its size slot is initialized (e.g. under sizeof
4865 or typeof). */
4866 nocode_wanted = 0;
4867 gexpr();
4869 check:
4870 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4871 n = vtop->c.i;
4872 if (n < 0)
4873 tcc_error("invalid array size");
4874 } else {
4875 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4876 tcc_error("size of variable length array should be an integer");
4877 n = 0;
4878 t1 = VT_VLA;
4881 skip(']');
4882 /* parse next post type */
4883 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4885 if ((type->t & VT_BTYPE) == VT_FUNC)
4886 tcc_error("declaration of an array of functions");
4887 if ((type->t & VT_BTYPE) == VT_VOID
4888 || type_size(type, &unused_align) < 0)
4889 tcc_error("declaration of an array of incomplete type elements");
4891 t1 |= type->t & VT_VLA;
4893 if (t1 & VT_VLA) {
4894 if (n < 0) {
4895 if (td & TYPE_NEST)
4896 tcc_error("need explicit inner array size in VLAs");
4898 else {
4899 loc -= type_size(&int_type, &align);
4900 loc &= -align;
4901 n = loc;
4903 vpush_type_size(type, &align);
4904 gen_op('*');
4905 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4906 vswap();
4907 vstore();
4910 if (n != -1)
4911 vpop();
4912 nocode_wanted = saved_nocode_wanted;
4914 /* we push an anonymous symbol which will contain the array
4915 element type */
4916 s = sym_push(SYM_FIELD, type, 0, n);
4917 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4918 type->ref = s;
4919 if (vla_array_str) {
4920 if (t1 & VT_VLA)
4921 s->vla_array_str = vla_array_str;
4922 else
4923 tok_str_free_str(vla_array_str);
4926 return 1;
4929 /* Parse a type declarator (except basic type), and return the type
4930 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4931 expected. 'type' should contain the basic type. 'ad' is the
4932 attribute definition of the basic type. It can be modified by
4933 type_decl(). If this (possibly abstract) declarator is a pointer chain
4934 it returns the innermost pointed to type (equals *type, but is a different
4935 pointer), otherwise returns type itself, that's used for recursive calls. */
4936 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4938 CType *post, *ret;
4939 int qualifiers, storage;
4941 /* recursive type, remove storage bits first, apply them later again */
4942 storage = type->t & VT_STORAGE;
4943 type->t &= ~VT_STORAGE;
4944 post = ret = type;
4946 while (tok == '*') {
4947 qualifiers = 0;
4948 redo:
4949 next();
4950 switch(tok) {
4951 case TOK__Atomic:
4952 qualifiers |= VT_ATOMIC;
4953 goto redo;
4954 case TOK_CONST1:
4955 case TOK_CONST2:
4956 case TOK_CONST3:
4957 qualifiers |= VT_CONSTANT;
4958 goto redo;
4959 case TOK_VOLATILE1:
4960 case TOK_VOLATILE2:
4961 case TOK_VOLATILE3:
4962 qualifiers |= VT_VOLATILE;
4963 goto redo;
4964 case TOK_RESTRICT1:
4965 case TOK_RESTRICT2:
4966 case TOK_RESTRICT3:
4967 goto redo;
4968 /* XXX: clarify attribute handling */
4969 case TOK_ATTRIBUTE1:
4970 case TOK_ATTRIBUTE2:
4971 parse_attribute(ad);
4972 break;
4974 mk_pointer(type);
4975 type->t |= qualifiers;
4976 if (ret == type)
4977 /* innermost pointed to type is the one for the first derivation */
4978 ret = pointed_type(type);
4981 if (tok == '(') {
4982 /* This is possibly a parameter type list for abstract declarators
4983 ('int ()'), use post_type for testing this. */
4984 if (!post_type(type, ad, 0, td)) {
4985 /* It's not, so it's a nested declarator, and the post operations
4986 apply to the innermost pointed to type (if any). */
4987 /* XXX: this is not correct to modify 'ad' at this point, but
4988 the syntax is not clear */
4989 parse_attribute(ad);
4990 post = type_decl(type, ad, v, td);
4991 skip(')');
4992 } else
4993 goto abstract;
4994 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4995 /* type identifier */
4996 *v = tok;
4997 next();
4998 } else {
4999 abstract:
5000 if (!(td & TYPE_ABSTRACT))
5001 expect("identifier");
5002 *v = 0;
5004 post_type(post, ad, storage, td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5005 parse_attribute(ad);
5006 type->t |= storage;
5007 return ret;
5010 /* indirection with full error checking and bound check */
5011 ST_FUNC void indir(void)
5013 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5014 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5015 return;
5016 expect("pointer");
5018 if (vtop->r & VT_LVAL)
5019 gv(RC_INT);
5020 vtop->type = *pointed_type(&vtop->type);
5021 /* Arrays and functions are never lvalues */
5022 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5023 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5024 vtop->r |= VT_LVAL;
5025 /* if bound checking, the referenced pointer must be checked */
5026 #ifdef CONFIG_TCC_BCHECK
5027 if (tcc_state->do_bounds_check)
5028 vtop->r |= VT_MUSTBOUND;
5029 #endif
5033 /* pass a parameter to a function and do type checking and casting */
5034 static void gfunc_param_typed(Sym *func, Sym *arg)
5036 int func_type;
5037 CType type;
5039 func_type = func->f.func_type;
5040 if (func_type == FUNC_OLD ||
5041 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5042 /* default casting : only need to convert float to double */
5043 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5044 gen_cast_s(VT_DOUBLE);
5045 } else if (vtop->type.t & VT_BITFIELD) {
5046 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5047 type.ref = vtop->type.ref;
5048 gen_cast(&type);
5049 } else if (vtop->r & VT_MUSTCAST) {
5050 force_charshort_cast();
5052 } else if (arg == NULL) {
5053 tcc_error("too many arguments to function");
5054 } else {
5055 type = arg->type;
5056 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5057 gen_assign_cast(&type);
5061 /* parse an expression and return its type without any side effect. */
5062 static void expr_type(CType *type, void (*expr_fn)(void))
5064 nocode_wanted++;
5065 expr_fn();
5066 *type = vtop->type;
5067 vpop();
5068 nocode_wanted--;
5071 /* parse an expression of the form '(type)' or '(expr)' and return its
5072 type */
5073 static void parse_expr_type(CType *type)
5075 int n;
5076 AttributeDef ad;
5078 skip('(');
5079 if (parse_btype(type, &ad)) {
5080 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5081 } else {
5082 expr_type(type, gexpr);
5084 skip(')');
5087 static void parse_type(CType *type)
5089 AttributeDef ad;
5090 int n;
5092 if (!parse_btype(type, &ad)) {
5093 expect("type");
5095 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5098 static void parse_builtin_params(int nc, const char *args)
5100 char c, sep = '(';
5101 CType type;
5102 if (nc)
5103 nocode_wanted++;
5104 next();
5105 if (*args == 0)
5106 skip(sep);
5107 while ((c = *args++)) {
5108 skip(sep);
5109 sep = ',';
5110 if (c == 't') {
5111 parse_type(&type);
5112 vpush(&type);
5113 continue;
5115 expr_eq();
5116 type.ref = NULL;
5117 type.t = 0;
5118 switch (c) {
5119 case 'e':
5120 continue;
5121 case 'V':
5122 type.t = VT_CONSTANT;
5123 case 'v':
5124 type.t |= VT_VOID;
5125 mk_pointer (&type);
5126 break;
5127 case 'S':
5128 type.t = VT_CONSTANT;
5129 case 's':
5130 type.t |= char_type.t;
5131 mk_pointer (&type);
5132 break;
5133 case 'i':
5134 type.t = VT_INT;
5135 break;
5136 case 'l':
5137 type.t = VT_SIZE_T;
5138 break;
5139 default:
5140 break;
5142 gen_assign_cast(&type);
5144 skip(')');
5145 if (nc)
5146 nocode_wanted--;
5149 static void parse_atomic(int atok)
5151 int size, align, arg;
5152 CType *atom, *atom_ptr, ct = {0};
5153 char buf[40];
5154 static const char *const templates[] = {
5156 * Each entry consists of callback and function template.
5157 * The template represents argument types and return type.
5159 * ? void (return-only)
5160 * b bool
5161 * a atomic
5162 * A read-only atomic
5163 * p pointer to memory
5164 * v value
5165 * m memory model
5168 /* keep in order of appearance in tcctok.h: */
5169 /* __atomic_store */ "avm.?",
5170 /* __atomic_load */ "Am.v",
5171 /* __atomic_exchange */ "avm.v",
5172 /* __atomic_compare_exchange */ "apvbmm.b",
5173 /* __atomic_fetch_add */ "avm.v",
5174 /* __atomic_fetch_sub */ "avm.v",
5175 /* __atomic_fetch_or */ "avm.v",
5176 /* __atomic_fetch_xor */ "avm.v",
5177 /* __atomic_fetch_and */ "avm.v"
5179 const char *template = templates[(atok - TOK___atomic_store)];
5181 atom = atom_ptr = NULL;
5182 size = 0; /* pacify compiler */
5183 next();
5184 skip('(');
5185 for (arg = 0;;) {
5186 expr_eq();
5187 switch (template[arg]) {
5188 case 'a':
5189 case 'A':
5190 atom_ptr = &vtop->type;
5191 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5192 expect("pointer");
5193 atom = pointed_type(atom_ptr);
5194 size = type_size(atom, &align);
5195 if (size > 8
5196 || (size & (size - 1))
5197 || (atok > TOK___atomic_compare_exchange
5198 && (0 == btype_size(atom->t & VT_BTYPE)
5199 || (atom->t & VT_BTYPE) == VT_PTR)))
5200 expect("integral or integer-sized pointer target type");
5201 /* GCC does not care either: */
5202 /* if (!(atom->t & VT_ATOMIC))
5203 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5204 break;
5206 case 'p':
5207 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5208 || type_size(pointed_type(&vtop->type), &align) != size)
5209 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5210 gen_assign_cast(atom_ptr);
5211 break;
5212 case 'v':
5213 gen_assign_cast(atom);
5214 break;
5215 case 'm':
5216 gen_assign_cast(&int_type);
5217 break;
5218 case 'b':
5219 ct.t = VT_BOOL;
5220 gen_assign_cast(&ct);
5221 break;
5223 if ('.' == template[++arg])
5224 break;
5225 skip(',');
5227 skip(')');
5229 ct.t = VT_VOID;
5230 switch (template[arg + 1]) {
5231 case 'b':
5232 ct.t = VT_BOOL;
5233 break;
5234 case 'v':
5235 ct = *atom;
5236 break;
5239 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5240 vpush_helper_func(tok_alloc_const(buf));
5241 vrott(arg + 1);
5242 gfunc_call(arg);
5244 vpush(&ct);
5245 PUT_R_RET(vtop, ct.t);
5246 if (ct.t == VT_BOOL) {
5247 #ifdef PROMOTE_RET
5248 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5249 #else
5250 vtop->type.t = VT_INT;
5251 #endif
5255 ST_FUNC void unary(void)
5257 int n, t, align, size, r, sizeof_caller;
5258 CType type;
5259 Sym *s;
5260 AttributeDef ad;
5262 /* generate line number info */
5263 if (debug_modes)
5264 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5266 sizeof_caller = in_sizeof;
5267 in_sizeof = 0;
5268 type.ref = NULL;
5269 /* XXX: GCC 2.95.3 does not generate a table although it should be
5270 better here */
5271 tok_next:
5272 switch(tok) {
5273 case TOK_EXTENSION:
5274 next();
5275 goto tok_next;
5276 case TOK_LCHAR:
5277 #ifdef TCC_TARGET_PE
5278 t = VT_SHORT|VT_UNSIGNED;
5279 goto push_tokc;
5280 #endif
5281 case TOK_CINT:
5282 case TOK_CCHAR:
5283 t = VT_INT;
5284 push_tokc:
5285 type.t = t;
5286 vsetc(&type, VT_CONST, &tokc);
5287 next();
5288 break;
5289 case TOK_CUINT:
5290 t = VT_INT | VT_UNSIGNED;
5291 goto push_tokc;
5292 case TOK_CLLONG:
5293 t = VT_LLONG;
5294 goto push_tokc;
5295 case TOK_CULLONG:
5296 t = VT_LLONG | VT_UNSIGNED;
5297 goto push_tokc;
5298 case TOK_CFLOAT:
5299 t = VT_FLOAT;
5300 goto push_tokc;
5301 case TOK_CDOUBLE:
5302 t = VT_DOUBLE;
5303 goto push_tokc;
5304 case TOK_CLDOUBLE:
5305 t = VT_LDOUBLE;
5306 goto push_tokc;
5307 case TOK_CLONG:
5308 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5309 goto push_tokc;
5310 case TOK_CULONG:
5311 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5312 goto push_tokc;
5313 case TOK___FUNCTION__:
5314 if (!gnu_ext)
5315 goto tok_identifier;
5316 /* fall thru */
5317 case TOK___FUNC__:
5319 Section *sec;
5320 int len;
5321 /* special function name identifier */
5322 len = strlen(funcname) + 1;
5323 /* generate char[len] type */
5324 type.t = char_type.t;
5325 if (tcc_state->warn_write_strings & WARN_ON)
5326 type.t |= VT_CONSTANT;
5327 mk_pointer(&type);
5328 type.t |= VT_ARRAY;
5329 type.ref->c = len;
5330 sec = rodata_section;
5331 vpush_ref(&type, sec, sec->data_offset, NODATA_WANTED ? 0 : len);
5332 if (!NODATA_WANTED)
5333 memcpy(section_ptr_add(sec, len), funcname, len);
5334 next();
5336 break;
5337 case TOK_LSTR:
5338 #ifdef TCC_TARGET_PE
5339 t = VT_SHORT | VT_UNSIGNED;
5340 #else
5341 t = VT_INT;
5342 #endif
5343 goto str_init;
5344 case TOK_STR:
5345 /* string parsing */
5346 t = char_type.t;
5347 str_init:
5348 if (tcc_state->warn_write_strings & WARN_ON)
5349 t |= VT_CONSTANT;
5350 type.t = t;
5351 mk_pointer(&type);
5352 type.t |= VT_ARRAY;
5353 memset(&ad, 0, sizeof(AttributeDef));
5354 ad.section = rodata_section;
5355 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5356 break;
5357 case '(':
5358 next();
5359 /* cast ? */
5360 if (parse_btype(&type, &ad)) {
5361 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5362 skip(')');
5363 /* check ISOC99 compound literal */
5364 if (tok == '{') {
5365 /* data is allocated locally by default */
5366 if (global_expr)
5367 r = VT_CONST;
5368 else
5369 r = VT_LOCAL;
5370 /* all except arrays are lvalues */
5371 if (!(type.t & VT_ARRAY))
5372 r |= VT_LVAL;
5373 memset(&ad, 0, sizeof(AttributeDef));
5374 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5375 } else {
5376 if (sizeof_caller) {
5377 vpush(&type);
5378 return;
5380 unary();
5381 gen_cast(&type);
5383 } else if (tok == '{') {
5384 int saved_nocode_wanted = nocode_wanted;
5385 if (const_wanted && !(nocode_wanted & unevalmask))
5386 expect("constant");
5387 if (0 == local_scope)
5388 tcc_error("statement expression outside of function");
5389 /* save all registers */
5390 save_regs(0);
5391 /* statement expression : we do not accept break/continue
5392 inside as GCC does. We do retain the nocode_wanted state,
5393 as statement expressions can't ever be entered from the
5394 outside, so any reactivation of code emission (from labels
5395 or loop heads) can be disabled again after the end of it. */
5396 block(1);
5397 /* If the statement expr can be entered, then we retain the current
5398 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5399 If it can't be entered then the state is that from before the
5400 statement expression. */
5401 if (saved_nocode_wanted)
5402 nocode_wanted = saved_nocode_wanted;
5403 skip(')');
5404 } else {
5405 gexpr();
5406 skip(')');
5408 break;
5409 case '*':
5410 next();
5411 unary();
5412 indir();
5413 break;
5414 case '&':
5415 next();
5416 unary();
5417 /* functions names must be treated as function pointers,
5418 except for unary '&' and sizeof. Since we consider that
5419 functions are not lvalues, we only have to handle it
5420 there and in function calls. */
5421 /* arrays can also be used although they are not lvalues */
5422 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5423 !(vtop->type.t & VT_ARRAY))
5424 test_lvalue();
5425 if (vtop->sym)
5426 vtop->sym->a.addrtaken = 1;
5427 mk_pointer(&vtop->type);
5428 gaddrof();
5429 break;
5430 case '!':
5431 next();
5432 unary();
5433 gen_test_zero(TOK_EQ);
5434 break;
5435 case '~':
5436 next();
5437 unary();
5438 vpushi(-1);
5439 gen_op('^');
5440 break;
5441 case '+':
5442 next();
5443 unary();
5444 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5445 tcc_error("pointer not accepted for unary plus");
5446 /* In order to force cast, we add zero, except for floating point
5447 where we really need an noop (otherwise -0.0 will be transformed
5448 into +0.0). */
5449 if (!is_float(vtop->type.t)) {
5450 vpushi(0);
5451 gen_op('+');
5453 break;
5454 case TOK_SIZEOF:
5455 case TOK_ALIGNOF1:
5456 case TOK_ALIGNOF2:
5457 case TOK_ALIGNOF3:
5458 t = tok;
5459 next();
5460 in_sizeof++;
5461 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5462 if (t == TOK_SIZEOF) {
5463 vpush_type_size(&type, &align);
5464 gen_cast_s(VT_SIZE_T);
5465 } else {
5466 type_size(&type, &align);
5467 s = NULL;
5468 if (vtop[1].r & VT_SYM)
5469 s = vtop[1].sym; /* hack: accessing previous vtop */
5470 if (s && s->a.aligned)
5471 align = 1 << (s->a.aligned - 1);
5472 vpushs(align);
5474 break;
5476 case TOK_builtin_expect:
5477 /* __builtin_expect is a no-op for now */
5478 parse_builtin_params(0, "ee");
5479 vpop();
5480 break;
5481 case TOK_builtin_types_compatible_p:
5482 parse_builtin_params(0, "tt");
5483 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5484 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5485 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5486 vtop -= 2;
5487 vpushi(n);
5488 break;
5489 case TOK_builtin_choose_expr:
5491 int64_t c;
5492 next();
5493 skip('(');
5494 c = expr_const64();
5495 skip(',');
5496 if (!c) {
5497 nocode_wanted++;
5499 expr_eq();
5500 if (!c) {
5501 vpop();
5502 nocode_wanted--;
5504 skip(',');
5505 if (c) {
5506 nocode_wanted++;
5508 expr_eq();
5509 if (c) {
5510 vpop();
5511 nocode_wanted--;
5513 skip(')');
5515 break;
5516 case TOK_builtin_constant_p:
5517 parse_builtin_params(1, "e");
5518 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5519 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5520 vtop--;
5521 vpushi(n);
5522 break;
5523 case TOK_builtin_frame_address:
5524 case TOK_builtin_return_address:
5526 int tok1 = tok;
5527 int level;
5528 next();
5529 skip('(');
5530 if (tok != TOK_CINT) {
5531 tcc_error("%s only takes positive integers",
5532 tok1 == TOK_builtin_return_address ?
5533 "__builtin_return_address" :
5534 "__builtin_frame_address");
5536 level = (uint32_t)tokc.i;
5537 next();
5538 skip(')');
5539 type.t = VT_VOID;
5540 mk_pointer(&type);
5541 vset(&type, VT_LOCAL, 0); /* local frame */
5542 while (level--) {
5543 #ifdef TCC_TARGET_RISCV64
5544 vpushi(2*PTR_SIZE);
5545 gen_op('-');
5546 #endif
5547 mk_pointer(&vtop->type);
5548 indir(); /* -> parent frame */
5550 if (tok1 == TOK_builtin_return_address) {
5551 // assume return address is just above frame pointer on stack
5552 #ifdef TCC_TARGET_ARM
5553 vpushi(2*PTR_SIZE);
5554 gen_op('+');
5555 #elif defined TCC_TARGET_RISCV64
5556 vpushi(PTR_SIZE);
5557 gen_op('-');
5558 #else
5559 vpushi(PTR_SIZE);
5560 gen_op('+');
5561 #endif
5562 mk_pointer(&vtop->type);
5563 indir();
5566 break;
5567 #ifdef TCC_TARGET_RISCV64
5568 case TOK_builtin_va_start:
5569 parse_builtin_params(0, "ee");
5570 r = vtop->r & VT_VALMASK;
5571 if (r == VT_LLOCAL)
5572 r = VT_LOCAL;
5573 if (r != VT_LOCAL)
5574 tcc_error("__builtin_va_start expects a local variable");
5575 gen_va_start();
5576 vstore();
5577 break;
5578 #endif
5579 #ifdef TCC_TARGET_X86_64
5580 #ifdef TCC_TARGET_PE
5581 case TOK_builtin_va_start:
5582 parse_builtin_params(0, "ee");
5583 r = vtop->r & VT_VALMASK;
5584 if (r == VT_LLOCAL)
5585 r = VT_LOCAL;
5586 if (r != VT_LOCAL)
5587 tcc_error("__builtin_va_start expects a local variable");
5588 vtop->r = r;
5589 vtop->type = char_pointer_type;
5590 vtop->c.i += 8;
5591 vstore();
5592 break;
5593 #else
5594 case TOK_builtin_va_arg_types:
5595 parse_builtin_params(0, "t");
5596 vpushi(classify_x86_64_va_arg(&vtop->type));
5597 vswap();
5598 vpop();
5599 break;
5600 #endif
5601 #endif
5603 #ifdef TCC_TARGET_ARM64
5604 case TOK_builtin_va_start: {
5605 parse_builtin_params(0, "ee");
5606 //xx check types
5607 gen_va_start();
5608 vpushi(0);
5609 vtop->type.t = VT_VOID;
5610 break;
5612 case TOK_builtin_va_arg: {
5613 parse_builtin_params(0, "et");
5614 type = vtop->type;
5615 vpop();
5616 //xx check types
5617 gen_va_arg(&type);
5618 vtop->type = type;
5619 break;
5621 case TOK___arm64_clear_cache: {
5622 parse_builtin_params(0, "ee");
5623 gen_clear_cache();
5624 vpushi(0);
5625 vtop->type.t = VT_VOID;
5626 break;
5628 #endif
5630 /* atomic operations */
5631 case TOK___atomic_store:
5632 case TOK___atomic_load:
5633 case TOK___atomic_exchange:
5634 case TOK___atomic_compare_exchange:
5635 case TOK___atomic_fetch_add:
5636 case TOK___atomic_fetch_sub:
5637 case TOK___atomic_fetch_or:
5638 case TOK___atomic_fetch_xor:
5639 case TOK___atomic_fetch_and:
5640 parse_atomic(tok);
5641 break;
5643 /* pre operations */
5644 case TOK_INC:
5645 case TOK_DEC:
5646 t = tok;
5647 next();
5648 unary();
5649 inc(0, t);
5650 break;
5651 case '-':
5652 next();
5653 unary();
5654 if (is_float(vtop->type.t)) {
5655 gen_opif(TOK_NEG);
5656 } else {
5657 vpushi(0);
5658 vswap();
5659 gen_op('-');
5661 break;
5662 case TOK_LAND:
5663 if (!gnu_ext)
5664 goto tok_identifier;
5665 next();
5666 /* allow to take the address of a label */
5667 if (tok < TOK_UIDENT)
5668 expect("label identifier");
5669 s = label_find(tok);
5670 if (!s) {
5671 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5672 } else {
5673 if (s->r == LABEL_DECLARED)
5674 s->r = LABEL_FORWARD;
5676 if (!s->type.t) {
5677 s->type.t = VT_VOID;
5678 mk_pointer(&s->type);
5679 s->type.t |= VT_STATIC;
5681 vpushsym(&s->type, s);
5682 next();
5683 break;
5685 case TOK_GENERIC:
5687 CType controlling_type;
5688 int has_default = 0;
5689 int has_match = 0;
5690 int learn = 0;
5691 TokenString *str = NULL;
5692 int saved_const_wanted = const_wanted;
5694 next();
5695 skip('(');
5696 const_wanted = 0;
5697 expr_type(&controlling_type, expr_eq);
5698 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5699 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5700 mk_pointer(&controlling_type);
5701 const_wanted = saved_const_wanted;
5702 for (;;) {
5703 learn = 0;
5704 skip(',');
5705 if (tok == TOK_DEFAULT) {
5706 if (has_default)
5707 tcc_error("too many 'default'");
5708 has_default = 1;
5709 if (!has_match)
5710 learn = 1;
5711 next();
5712 } else {
5713 AttributeDef ad_tmp;
5714 int itmp;
5715 CType cur_type;
5717 in_generic++;
5718 parse_btype(&cur_type, &ad_tmp);
5719 in_generic--;
5721 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5722 if (compare_types(&controlling_type, &cur_type, 0)) {
5723 if (has_match) {
5724 tcc_error("type match twice");
5726 has_match = 1;
5727 learn = 1;
5730 skip(':');
5731 if (learn) {
5732 if (str)
5733 tok_str_free(str);
5734 skip_or_save_block(&str);
5735 } else {
5736 skip_or_save_block(NULL);
5738 if (tok == ')')
5739 break;
5741 if (!str) {
5742 char buf[60];
5743 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5744 tcc_error("type '%s' does not match any association", buf);
5746 begin_macro(str, 1);
5747 next();
5748 expr_eq();
5749 if (tok != TOK_EOF)
5750 expect(",");
5751 end_macro();
5752 next();
5753 break;
5755 // special qnan , snan and infinity values
5756 case TOK___NAN__:
5757 n = 0x7fc00000;
5758 special_math_val:
5759 vpushi(n);
5760 vtop->type.t = VT_FLOAT;
5761 next();
5762 break;
5763 case TOK___SNAN__:
5764 n = 0x7f800001;
5765 goto special_math_val;
5766 case TOK___INF__:
5767 n = 0x7f800000;
5768 goto special_math_val;
5770 default:
5771 tok_identifier:
5772 t = tok;
5773 next();
5774 if (t < TOK_UIDENT)
5775 expect("identifier");
5776 s = sym_find(t);
5777 if (!s || IS_ASM_SYM(s)) {
5778 const char *name = get_tok_str(t, NULL);
5779 if (tok != '(')
5780 tcc_error("'%s' undeclared", name);
5781 /* for simple function calls, we tolerate undeclared
5782 external reference to int() function */
5783 tcc_warning_c(warn_implicit_function_declaration)(
5784 "implicit declaration of function '%s'", name);
5785 s = external_global_sym(t, &func_old_type);
5788 r = s->r;
5789 /* A symbol that has a register is a local register variable,
5790 which starts out as VT_LOCAL value. */
5791 if ((r & VT_VALMASK) < VT_CONST)
5792 r = (r & ~VT_VALMASK) | VT_LOCAL;
5794 vset(&s->type, r, s->c);
5795 /* Point to s as backpointer (even without r&VT_SYM).
5796 Will be used by at least the x86 inline asm parser for
5797 regvars. */
5798 vtop->sym = s;
5800 if (r & VT_SYM) {
5801 vtop->c.i = 0;
5802 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5803 vtop->c.i = s->enum_val;
5805 break;
5808 /* post operations */
5809 while (1) {
5810 if (tok == TOK_INC || tok == TOK_DEC) {
5811 inc(1, tok);
5812 next();
5813 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5814 int qualifiers, cumofs = 0;
5815 /* field */
5816 if (tok == TOK_ARROW)
5817 indir();
5818 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5819 test_lvalue();
5820 gaddrof();
5821 /* expect pointer on structure */
5822 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5823 expect("struct or union");
5824 if (tok == TOK_CDOUBLE)
5825 expect("field name");
5826 next();
5827 if (tok == TOK_CINT || tok == TOK_CUINT)
5828 expect("field name");
5829 s = find_field(&vtop->type, tok, &cumofs);
5830 if (!s)
5831 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5832 /* add field offset to pointer */
5833 vtop->type = char_pointer_type; /* change type to 'char *' */
5834 vpushi(cumofs + s->c);
5835 gen_op('+');
5836 /* change type to field type, and set to lvalue */
5837 vtop->type = s->type;
5838 vtop->type.t |= qualifiers;
5839 /* an array is never an lvalue */
5840 if (!(vtop->type.t & VT_ARRAY)) {
5841 vtop->r |= VT_LVAL;
5842 #ifdef CONFIG_TCC_BCHECK
5843 /* if bound checking, the referenced pointer must be checked */
5844 if (tcc_state->do_bounds_check)
5845 vtop->r |= VT_MUSTBOUND;
5846 #endif
5848 next();
5849 } else if (tok == '[') {
5850 next();
5851 gexpr();
5852 gen_op('+');
5853 indir();
5854 skip(']');
5855 } else if (tok == '(') {
5856 SValue ret;
5857 Sym *sa;
5858 int nb_args, ret_nregs, ret_align, regsize, variadic;
5860 /* function call */
5861 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5862 /* pointer test (no array accepted) */
5863 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5864 vtop->type = *pointed_type(&vtop->type);
5865 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5866 goto error_func;
5867 } else {
5868 error_func:
5869 expect("function pointer");
5871 } else {
5872 vtop->r &= ~VT_LVAL; /* no lvalue */
5874 /* get return type */
5875 s = vtop->type.ref;
5876 next();
5877 sa = s->next; /* first parameter */
5878 nb_args = regsize = 0;
5879 ret.r2 = VT_CONST;
5880 /* compute first implicit argument if a structure is returned */
5881 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5882 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5883 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5884 &ret_align, &regsize);
5885 if (ret_nregs <= 0) {
5886 /* get some space for the returned structure */
5887 size = type_size(&s->type, &align);
5888 #ifdef TCC_TARGET_ARM64
5889 /* On arm64, a small struct is return in registers.
5890 It is much easier to write it to memory if we know
5891 that we are allowed to write some extra bytes, so
5892 round the allocated space up to a power of 2: */
5893 if (size < 16)
5894 while (size & (size - 1))
5895 size = (size | (size - 1)) + 1;
5896 #endif
5897 loc = (loc - size) & -align;
5898 ret.type = s->type;
5899 ret.r = VT_LOCAL | VT_LVAL;
5900 /* pass it as 'int' to avoid structure arg passing
5901 problems */
5902 vseti(VT_LOCAL, loc);
5903 #ifdef CONFIG_TCC_BCHECK
5904 if (tcc_state->do_bounds_check)
5905 --loc;
5906 #endif
5907 ret.c = vtop->c;
5908 if (ret_nregs < 0)
5909 vtop--;
5910 else
5911 nb_args++;
5913 } else {
5914 ret_nregs = 1;
5915 ret.type = s->type;
5918 if (ret_nregs > 0) {
5919 /* return in register */
5920 ret.c.i = 0;
5921 PUT_R_RET(&ret, ret.type.t);
5923 if (tok != ')') {
5924 for(;;) {
5925 expr_eq();
5926 gfunc_param_typed(s, sa);
5927 nb_args++;
5928 if (sa)
5929 sa = sa->next;
5930 if (tok == ')')
5931 break;
5932 skip(',');
5935 if (sa)
5936 tcc_error("too few arguments to function");
5937 skip(')');
5938 gfunc_call(nb_args);
5940 if (ret_nregs < 0) {
5941 vsetc(&ret.type, ret.r, &ret.c);
5942 #ifdef TCC_TARGET_RISCV64
5943 arch_transfer_ret_regs(1);
5944 #endif
5945 } else {
5946 /* return value */
5947 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5948 vsetc(&ret.type, r, &ret.c);
5949 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5952 /* handle packed struct return */
5953 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5954 int addr, offset;
5956 size = type_size(&s->type, &align);
5957 /* We're writing whole regs often, make sure there's enough
5958 space. Assume register size is power of 2. */
5959 if (regsize > align)
5960 align = regsize;
5961 loc = (loc - size) & -align;
5962 addr = loc;
5963 offset = 0;
5964 for (;;) {
5965 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5966 vswap();
5967 vstore();
5968 vtop--;
5969 if (--ret_nregs == 0)
5970 break;
5971 offset += regsize;
5973 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5976 /* Promote char/short return values. This is matters only
5977 for calling function that were not compiled by TCC and
5978 only on some architectures. For those where it doesn't
5979 matter we expect things to be already promoted to int,
5980 but not larger. */
5981 t = s->type.t & VT_BTYPE;
5982 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5983 #ifdef PROMOTE_RET
5984 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5985 #else
5986 vtop->type.t = VT_INT;
5987 #endif
5990 if (s->f.func_noreturn) {
5991 if (debug_modes)
5992 tcc_tcov_block_end(tcc_state, -1);
5993 CODE_OFF();
5995 } else {
5996 break;
6001 #ifndef precedence_parser /* original top-down parser */
6003 static void expr_prod(void)
6005 int t;
6007 unary();
6008 while ((t = tok) == '*' || t == '/' || t == '%') {
6009 next();
6010 unary();
6011 gen_op(t);
6015 static void expr_sum(void)
6017 int t;
6019 expr_prod();
6020 while ((t = tok) == '+' || t == '-') {
6021 next();
6022 expr_prod();
6023 gen_op(t);
6027 static void expr_shift(void)
6029 int t;
6031 expr_sum();
6032 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6033 next();
6034 expr_sum();
6035 gen_op(t);
6039 static void expr_cmp(void)
6041 int t;
6043 expr_shift();
6044 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6045 t == TOK_ULT || t == TOK_UGE) {
6046 next();
6047 expr_shift();
6048 gen_op(t);
6052 static void expr_cmpeq(void)
6054 int t;
6056 expr_cmp();
6057 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6058 next();
6059 expr_cmp();
6060 gen_op(t);
6064 static void expr_and(void)
6066 expr_cmpeq();
6067 while (tok == '&') {
6068 next();
6069 expr_cmpeq();
6070 gen_op('&');
6074 static void expr_xor(void)
6076 expr_and();
6077 while (tok == '^') {
6078 next();
6079 expr_and();
6080 gen_op('^');
6084 static void expr_or(void)
6086 expr_xor();
6087 while (tok == '|') {
6088 next();
6089 expr_xor();
6090 gen_op('|');
6094 static void expr_landor(int op);
6096 static void expr_land(void)
6098 expr_or();
6099 if (tok == TOK_LAND)
6100 expr_landor(tok);
6103 static void expr_lor(void)
6105 expr_land();
6106 if (tok == TOK_LOR)
6107 expr_landor(tok);
6110 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6111 #else /* defined precedence_parser */
6112 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6113 # define expr_lor() unary(), expr_infix(1)
6115 static int precedence(int tok)
6117 switch (tok) {
6118 case TOK_LOR: return 1;
6119 case TOK_LAND: return 2;
6120 case '|': return 3;
6121 case '^': return 4;
6122 case '&': return 5;
6123 case TOK_EQ: case TOK_NE: return 6;
6124 relat: case TOK_ULT: case TOK_UGE: return 7;
6125 case TOK_SHL: case TOK_SAR: return 8;
6126 case '+': case '-': return 9;
6127 case '*': case '/': case '%': return 10;
6128 default:
6129 if (tok >= TOK_ULE && tok <= TOK_GT)
6130 goto relat;
6131 return 0;
6134 static unsigned char prec[256];
6135 static void init_prec(void)
6137 int i;
6138 for (i = 0; i < 256; i++)
6139 prec[i] = precedence(i);
6141 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6143 static void expr_landor(int op);
6145 static void expr_infix(int p)
6147 int t = tok, p2;
6148 while ((p2 = precedence(t)) >= p) {
6149 if (t == TOK_LOR || t == TOK_LAND) {
6150 expr_landor(t);
6151 } else {
6152 next();
6153 unary();
6154 if (precedence(tok) > p2)
6155 expr_infix(p2 + 1);
6156 gen_op(t);
6158 t = tok;
6161 #endif
6163 /* Assuming vtop is a value used in a conditional context
6164 (i.e. compared with zero) return 0 if it's false, 1 if
6165 true and -1 if it can't be statically determined. */
6166 static int condition_3way(void)
6168 int c = -1;
6169 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6170 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6171 vdup();
6172 gen_cast_s(VT_BOOL);
6173 c = vtop->c.i;
6174 vpop();
6176 return c;
6179 static void expr_landor(int op)
6181 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6182 for(;;) {
6183 c = f ? i : condition_3way();
6184 if (c < 0)
6185 save_regs(1), cc = 0;
6186 else if (c != i)
6187 nocode_wanted++, f = 1;
6188 if (tok != op)
6189 break;
6190 if (c < 0)
6191 t = gvtst(i, t);
6192 else
6193 vpop();
6194 next();
6195 expr_landor_next(op);
6197 if (cc || f) {
6198 vpop();
6199 vpushi(i ^ f);
6200 gsym(t);
6201 nocode_wanted -= f;
6202 } else {
6203 gvtst_set(i, t);
6207 static int is_cond_bool(SValue *sv)
6209 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6210 && (sv->type.t & VT_BTYPE) == VT_INT)
6211 return (unsigned)sv->c.i < 2;
6212 if (sv->r == VT_CMP)
6213 return 1;
6214 return 0;
6217 static void expr_cond(void)
6219 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6220 SValue sv;
6221 CType type;
6222 int ncw_prev;
6224 expr_lor();
6225 if (tok == '?') {
6226 next();
6227 c = condition_3way();
6228 g = (tok == ':' && gnu_ext);
6229 tt = 0;
6230 if (!g) {
6231 if (c < 0) {
6232 save_regs(1);
6233 tt = gvtst(1, 0);
6234 } else {
6235 vpop();
6237 } else if (c < 0) {
6238 /* needed to avoid having different registers saved in
6239 each branch */
6240 save_regs(1);
6241 gv_dup();
6242 tt = gvtst(0, 0);
6245 ncw_prev = nocode_wanted;
6246 if (c == 0)
6247 nocode_wanted++;
6248 if (!g)
6249 gexpr();
6251 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6252 mk_pointer(&vtop->type);
6253 sv = *vtop; /* save value to handle it later */
6254 vtop--; /* no vpop so that FP stack is not flushed */
6256 if (g) {
6257 u = tt;
6258 } else if (c < 0) {
6259 u = gjmp(0);
6260 gsym(tt);
6261 } else
6262 u = 0;
6264 nocode_wanted = ncw_prev;
6265 if (c == 1)
6266 nocode_wanted++;
6267 skip(':');
6268 expr_cond();
6270 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6271 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6272 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6273 this code jumps directly to the if's then/else branches. */
6274 t1 = gvtst(0, 0);
6275 t2 = gjmp(0);
6276 gsym(u);
6277 vpushv(&sv);
6278 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6279 gvtst_set(0, t1);
6280 gvtst_set(1, t2);
6281 nocode_wanted = ncw_prev;
6282 // tcc_warning("two conditions expr_cond");
6283 return;
6286 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6287 mk_pointer(&vtop->type);
6289 /* cast operands to correct type according to ISOC rules */
6290 if (!combine_types(&type, &sv, vtop, '?'))
6291 type_incompatibility_error(&sv.type, &vtop->type,
6292 "type mismatch in conditional expression (have '%s' and '%s')");
6293 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6294 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6295 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6297 /* now we convert second operand */
6298 if (c != 1) {
6299 gen_cast(&type);
6300 if (islv) {
6301 mk_pointer(&vtop->type);
6302 gaddrof();
6303 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6304 gaddrof();
6307 rc = RC_TYPE(type.t);
6308 /* for long longs, we use fixed registers to avoid having
6309 to handle a complicated move */
6310 if (USING_TWO_WORDS(type.t))
6311 rc = RC_RET(type.t);
6313 tt = r2 = 0;
6314 if (c < 0) {
6315 r2 = gv(rc);
6316 tt = gjmp(0);
6318 gsym(u);
6319 nocode_wanted = ncw_prev;
6321 /* this is horrible, but we must also convert first
6322 operand */
6323 if (c != 0) {
6324 *vtop = sv;
6325 gen_cast(&type);
6326 if (islv) {
6327 mk_pointer(&vtop->type);
6328 gaddrof();
6329 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6330 gaddrof();
6333 if (c < 0) {
6334 r1 = gv(rc);
6335 move_reg(r2, r1, islv ? VT_PTR : type.t);
6336 vtop->r = r2;
6337 gsym(tt);
6340 if (islv)
6341 indir();
6345 static void expr_eq(void)
6347 int t;
6349 expr_cond();
6350 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6351 test_lvalue();
6352 next();
6353 if (t == '=') {
6354 expr_eq();
6355 } else {
6356 vdup();
6357 expr_eq();
6358 gen_op(TOK_ASSIGN_OP(t));
6360 vstore();
6364 ST_FUNC void gexpr(void)
6366 while (1) {
6367 expr_eq();
6368 if (tok != ',')
6369 break;
6370 vpop();
6371 next();
6375 /* parse a constant expression and return value in vtop. */
6376 static void expr_const1(void)
6378 const_wanted++;
6379 nocode_wanted += unevalmask + 1;
6380 expr_cond();
6381 nocode_wanted -= unevalmask + 1;
6382 const_wanted--;
6385 /* parse an integer constant and return its value. */
6386 static inline int64_t expr_const64(void)
6388 int64_t c;
6389 expr_const1();
6390 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6391 expect("constant expression");
6392 c = vtop->c.i;
6393 vpop();
6394 return c;
6397 /* parse an integer constant and return its value.
6398 Complain if it doesn't fit 32bit (signed or unsigned). */
6399 ST_FUNC int expr_const(void)
6401 int c;
6402 int64_t wc = expr_const64();
6403 c = wc;
6404 if (c != wc && (unsigned)c != wc)
6405 tcc_error("constant exceeds 32 bit");
6406 return c;
6409 /* ------------------------------------------------------------------------- */
6410 /* return from function */
6412 #ifndef TCC_TARGET_ARM64
6413 static void gfunc_return(CType *func_type)
6415 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6416 CType type, ret_type;
6417 int ret_align, ret_nregs, regsize;
6418 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6419 &ret_align, &regsize);
6420 if (ret_nregs < 0) {
6421 #ifdef TCC_TARGET_RISCV64
6422 arch_transfer_ret_regs(0);
6423 #endif
6424 } else if (0 == ret_nregs) {
6425 /* if returning structure, must copy it to implicit
6426 first pointer arg location */
6427 type = *func_type;
6428 mk_pointer(&type);
6429 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6430 indir();
6431 vswap();
6432 /* copy structure value to pointer */
6433 vstore();
6434 } else {
6435 /* returning structure packed into registers */
6436 int size, addr, align, rc;
6437 size = type_size(func_type,&align);
6438 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6439 (vtop->c.i & (ret_align-1)))
6440 && (align & (ret_align-1))) {
6441 loc = (loc - size) & -ret_align;
6442 addr = loc;
6443 type = *func_type;
6444 vset(&type, VT_LOCAL | VT_LVAL, addr);
6445 vswap();
6446 vstore();
6447 vpop();
6448 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6450 vtop->type = ret_type;
6451 rc = RC_RET(ret_type.t);
6452 if (ret_nregs == 1)
6453 gv(rc);
6454 else {
6455 for (;;) {
6456 vdup();
6457 gv(rc);
6458 vpop();
6459 if (--ret_nregs == 0)
6460 break;
6461 /* We assume that when a structure is returned in multiple
6462 registers, their classes are consecutive values of the
6463 suite s(n) = 2^n */
6464 rc <<= 1;
6465 vtop->c.i += regsize;
6469 } else {
6470 gv(RC_RET(func_type->t));
6472 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6474 #endif
6476 static void check_func_return(void)
6478 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6479 return;
6480 if (!strcmp (funcname, "main")
6481 && (func_vt.t & VT_BTYPE) == VT_INT) {
6482 /* main returns 0 by default */
6483 vpushi(0);
6484 gen_assign_cast(&func_vt);
6485 gfunc_return(&func_vt);
6486 } else {
6487 tcc_warning("function might return no value: '%s'", funcname);
6491 /* ------------------------------------------------------------------------- */
6492 /* switch/case */
6494 static int case_cmpi(const void *pa, const void *pb)
6496 int64_t a = (*(struct case_t**) pa)->v1;
6497 int64_t b = (*(struct case_t**) pb)->v1;
6498 return a < b ? -1 : a > b;
6501 static int case_cmpu(const void *pa, const void *pb)
6503 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6504 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6505 return a < b ? -1 : a > b;
6508 static void gtst_addr(int t, int a)
6510 gsym_addr(gvtst(0, t), a);
6513 static void gcase(struct case_t **base, int len, int *bsym)
6515 struct case_t *p;
6516 int e;
6517 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6518 while (len > 8) {
6519 /* binary search */
6520 p = base[len/2];
6521 vdup();
6522 if (ll)
6523 vpushll(p->v2);
6524 else
6525 vpushi(p->v2);
6526 gen_op(TOK_LE);
6527 e = gvtst(1, 0);
6528 vdup();
6529 if (ll)
6530 vpushll(p->v1);
6531 else
6532 vpushi(p->v1);
6533 gen_op(TOK_GE);
6534 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6535 /* x < v1 */
6536 gcase(base, len/2, bsym);
6537 /* x > v2 */
6538 gsym(e);
6539 e = len/2 + 1;
6540 base += e; len -= e;
6542 /* linear scan */
6543 while (len--) {
6544 p = *base++;
6545 vdup();
6546 if (ll)
6547 vpushll(p->v2);
6548 else
6549 vpushi(p->v2);
6550 if (p->v1 == p->v2) {
6551 gen_op(TOK_EQ);
6552 gtst_addr(0, p->sym);
6553 } else {
6554 gen_op(TOK_LE);
6555 e = gvtst(1, 0);
6556 vdup();
6557 if (ll)
6558 vpushll(p->v1);
6559 else
6560 vpushi(p->v1);
6561 gen_op(TOK_GE);
6562 gtst_addr(0, p->sym);
6563 gsym(e);
6566 *bsym = gjmp(*bsym);
6569 /* ------------------------------------------------------------------------- */
6570 /* __attribute__((cleanup(fn))) */
6572 static void try_call_scope_cleanup(Sym *stop)
6574 Sym *cls = cur_scope->cl.s;
6576 for (; cls != stop; cls = cls->ncl) {
6577 Sym *fs = cls->next;
6578 Sym *vs = cls->prev_tok;
6580 vpushsym(&fs->type, fs);
6581 vset(&vs->type, vs->r, vs->c);
6582 vtop->sym = vs;
6583 mk_pointer(&vtop->type);
6584 gaddrof();
6585 gfunc_call(1);
6589 static void try_call_cleanup_goto(Sym *cleanupstate)
6591 Sym *oc, *cc;
6592 int ocd, ccd;
6594 if (!cur_scope->cl.s)
6595 return;
6597 /* search NCA of both cleanup chains given parents and initial depth */
6598 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6599 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6601 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6603 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6606 try_call_scope_cleanup(cc);
6609 /* call 'func' for each __attribute__((cleanup(func))) */
6610 static void block_cleanup(struct scope *o)
6612 int jmp = 0;
6613 Sym *g, **pg;
6614 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6615 if (g->prev_tok->r & LABEL_FORWARD) {
6616 Sym *pcl = g->next;
6617 if (!jmp)
6618 jmp = gjmp(0);
6619 gsym(pcl->jnext);
6620 try_call_scope_cleanup(o->cl.s);
6621 pcl->jnext = gjmp(0);
6622 if (!o->cl.n)
6623 goto remove_pending;
6624 g->c = o->cl.n;
6625 pg = &g->prev;
6626 } else {
6627 remove_pending:
6628 *pg = g->prev;
6629 sym_free(g);
6632 gsym(jmp);
6633 try_call_scope_cleanup(o->cl.s);
6636 /* ------------------------------------------------------------------------- */
6637 /* VLA */
6639 static void vla_restore(int loc)
6641 if (loc)
6642 gen_vla_sp_restore(loc);
6645 static void vla_leave(struct scope *o)
6647 struct scope *c = cur_scope, *v = NULL;
6648 for (; c != o && c; c = c->prev)
6649 if (c->vla.num)
6650 v = c;
6651 if (v)
6652 vla_restore(v->vla.locorig);
6655 /* ------------------------------------------------------------------------- */
6656 /* local scopes */
6658 static void new_scope(struct scope *o)
6660 /* copy and link previous scope */
6661 *o = *cur_scope;
6662 o->prev = cur_scope;
6663 cur_scope = o;
6664 cur_scope->vla.num = 0;
6666 /* record local declaration stack position */
6667 o->lstk = local_stack;
6668 o->llstk = local_label_stack;
6669 ++local_scope;
6671 if (debug_modes)
6672 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6675 static void prev_scope(struct scope *o, int is_expr)
6677 vla_leave(o->prev);
6679 if (o->cl.s != o->prev->cl.s)
6680 block_cleanup(o->prev);
6682 /* pop locally defined labels */
6683 label_pop(&local_label_stack, o->llstk, is_expr);
6685 /* In the is_expr case (a statement expression is finished here),
6686 vtop might refer to symbols on the local_stack. Either via the
6687 type or via vtop->sym. We can't pop those nor any that in turn
6688 might be referred to. To make it easier we don't roll back
6689 any symbols in that case; some upper level call to block() will
6690 do that. We do have to remove such symbols from the lookup
6691 tables, though. sym_pop will do that. */
6693 /* pop locally defined symbols */
6694 pop_local_syms(o->lstk, is_expr);
6695 cur_scope = o->prev;
6696 --local_scope;
6698 if (debug_modes)
6699 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6702 /* leave a scope via break/continue(/goto) */
6703 static void leave_scope(struct scope *o)
6705 if (!o)
6706 return;
6707 try_call_scope_cleanup(o->cl.s);
6708 vla_leave(o);
6711 /* ------------------------------------------------------------------------- */
6712 /* call block from 'for do while' loops */
6714 static void lblock(int *bsym, int *csym)
6716 struct scope *lo = loop_scope, *co = cur_scope;
6717 int *b = co->bsym, *c = co->csym;
6718 if (csym) {
6719 co->csym = csym;
6720 loop_scope = co;
6722 co->bsym = bsym;
6723 block(0);
6724 co->bsym = b;
6725 if (csym) {
6726 co->csym = c;
6727 loop_scope = lo;
6731 static void block(int is_expr)
6733 int a, b, c, d, e, t;
6734 struct scope o;
6735 Sym *s;
6737 if (is_expr) {
6738 /* default return value is (void) */
6739 vpushi(0);
6740 vtop->type.t = VT_VOID;
6743 again:
6744 t = tok;
6745 /* If the token carries a value, next() might destroy it. Only with
6746 invalid code such as f(){"123"4;} */
6747 if (TOK_HAS_VALUE(t))
6748 goto expr;
6749 next();
6751 if (debug_modes)
6752 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6754 if (t == TOK_IF) {
6755 skip('(');
6756 gexpr();
6757 skip(')');
6758 a = gvtst(1, 0);
6759 block(0);
6760 if (tok == TOK_ELSE) {
6761 d = gjmp(0);
6762 gsym(a);
6763 next();
6764 block(0);
6765 gsym(d); /* patch else jmp */
6766 } else {
6767 gsym(a);
6770 } else if (t == TOK_WHILE) {
6771 d = gind();
6772 skip('(');
6773 gexpr();
6774 skip(')');
6775 a = gvtst(1, 0);
6776 b = 0;
6777 lblock(&a, &b);
6778 gjmp_addr(d);
6779 gsym_addr(b, d);
6780 gsym(a);
6782 } else if (t == '{') {
6783 new_scope(&o);
6785 /* handle local labels declarations */
6786 while (tok == TOK_LABEL) {
6787 do {
6788 next();
6789 if (tok < TOK_UIDENT)
6790 expect("label identifier");
6791 label_push(&local_label_stack, tok, LABEL_DECLARED);
6792 next();
6793 } while (tok == ',');
6794 skip(';');
6797 while (tok != '}') {
6798 decl(VT_LOCAL);
6799 if (tok != '}') {
6800 if (is_expr)
6801 vpop();
6802 block(is_expr);
6806 prev_scope(&o, is_expr);
6807 if (local_scope)
6808 next();
6809 else if (!nocode_wanted)
6810 check_func_return();
6812 } else if (t == TOK_RETURN) {
6813 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6814 if (tok != ';') {
6815 gexpr();
6816 if (b) {
6817 gen_assign_cast(&func_vt);
6818 } else {
6819 if (vtop->type.t != VT_VOID)
6820 tcc_warning("void function returns a value");
6821 vtop--;
6823 } else if (b) {
6824 tcc_warning("'return' with no value");
6825 b = 0;
6827 leave_scope(root_scope);
6828 if (b)
6829 gfunc_return(&func_vt);
6830 skip(';');
6831 /* jump unless last stmt in top-level block */
6832 if (tok != '}' || local_scope != 1)
6833 rsym = gjmp(rsym);
6834 if (debug_modes)
6835 tcc_tcov_block_end (tcc_state, -1);
6836 CODE_OFF();
6838 } else if (t == TOK_BREAK) {
6839 /* compute jump */
6840 if (!cur_scope->bsym)
6841 tcc_error("cannot break");
6842 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6843 leave_scope(cur_switch->scope);
6844 else
6845 leave_scope(loop_scope);
6846 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6847 skip(';');
6849 } else if (t == TOK_CONTINUE) {
6850 /* compute jump */
6851 if (!cur_scope->csym)
6852 tcc_error("cannot continue");
6853 leave_scope(loop_scope);
6854 *cur_scope->csym = gjmp(*cur_scope->csym);
6855 skip(';');
6857 } else if (t == TOK_FOR) {
6858 new_scope(&o);
6860 skip('(');
6861 if (tok != ';') {
6862 /* c99 for-loop init decl? */
6863 if (!decl0(VT_LOCAL, 1, NULL)) {
6864 /* no, regular for-loop init expr */
6865 gexpr();
6866 vpop();
6869 skip(';');
6870 a = b = 0;
6871 c = d = gind();
6872 if (tok != ';') {
6873 gexpr();
6874 a = gvtst(1, 0);
6876 skip(';');
6877 if (tok != ')') {
6878 e = gjmp(0);
6879 d = gind();
6880 gexpr();
6881 vpop();
6882 gjmp_addr(c);
6883 gsym(e);
6885 skip(')');
6886 lblock(&a, &b);
6887 gjmp_addr(d);
6888 gsym_addr(b, d);
6889 gsym(a);
6890 prev_scope(&o, 0);
6892 } else if (t == TOK_DO) {
6893 a = b = 0;
6894 d = gind();
6895 lblock(&a, &b);
6896 gsym(b);
6897 skip(TOK_WHILE);
6898 skip('(');
6899 gexpr();
6900 skip(')');
6901 skip(';');
6902 c = gvtst(0, 0);
6903 gsym_addr(c, d);
6904 gsym(a);
6906 } else if (t == TOK_SWITCH) {
6907 struct switch_t *sw;
6909 sw = tcc_mallocz(sizeof *sw);
6910 sw->bsym = &a;
6911 sw->scope = cur_scope;
6912 sw->prev = cur_switch;
6913 cur_switch = sw;
6915 skip('(');
6916 gexpr();
6917 skip(')');
6918 sw->sv = *vtop--; /* save switch value */
6920 a = 0;
6921 b = gjmp(0); /* jump to first case */
6922 lblock(&a, NULL);
6923 a = gjmp(a); /* add implicit break */
6924 /* case lookup */
6925 gsym(b);
6927 if (sw->sv.type.t & VT_UNSIGNED)
6928 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
6929 else
6930 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
6932 for (b = 1; b < sw->n; b++)
6933 if (sw->sv.type.t & VT_UNSIGNED
6934 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
6935 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
6936 tcc_error("duplicate case value");
6938 vpushv(&sw->sv);
6939 gv(RC_INT);
6940 d = 0, gcase(sw->p, sw->n, &d);
6941 vpop();
6942 if (sw->def_sym)
6943 gsym_addr(d, sw->def_sym);
6944 else
6945 gsym(d);
6946 /* break label */
6947 gsym(a);
6949 dynarray_reset(&sw->p, &sw->n);
6950 cur_switch = sw->prev;
6951 tcc_free(sw);
6953 } else if (t == TOK_CASE) {
6954 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6955 if (!cur_switch)
6956 expect("switch");
6957 cr->v1 = cr->v2 = expr_const64();
6958 if (gnu_ext && tok == TOK_DOTS) {
6959 next();
6960 cr->v2 = expr_const64();
6961 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
6962 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
6963 tcc_warning("empty case range");
6965 if (debug_modes)
6966 tcc_tcov_reset_ind(tcc_state);
6967 cr->sym = gind();
6968 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6969 skip(':');
6970 is_expr = 0;
6971 goto block_after_label;
6973 } else if (t == TOK_DEFAULT) {
6974 if (!cur_switch)
6975 expect("switch");
6976 if (cur_switch->def_sym)
6977 tcc_error("too many 'default'");
6978 if (debug_modes)
6979 tcc_tcov_reset_ind(tcc_state);
6980 cur_switch->def_sym = gind();
6981 skip(':');
6982 is_expr = 0;
6983 goto block_after_label;
6985 } else if (t == TOK_GOTO) {
6986 if (cur_scope->vla.num)
6987 vla_restore(cur_scope->vla.locorig);
6988 if (tok == '*' && gnu_ext) {
6989 /* computed goto */
6990 next();
6991 gexpr();
6992 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6993 expect("pointer");
6994 ggoto();
6996 } else if (tok >= TOK_UIDENT) {
6997 s = label_find(tok);
6998 /* put forward definition if needed */
6999 if (!s)
7000 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7001 else if (s->r == LABEL_DECLARED)
7002 s->r = LABEL_FORWARD;
7004 if (s->r & LABEL_FORWARD) {
7005 /* start new goto chain for cleanups, linked via label->next */
7006 if (cur_scope->cl.s && !nocode_wanted) {
7007 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7008 pending_gotos->prev_tok = s;
7009 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7010 pending_gotos->next = s;
7012 s->jnext = gjmp(s->jnext);
7013 } else {
7014 try_call_cleanup_goto(s->cleanupstate);
7015 gjmp_addr(s->jnext);
7017 next();
7019 } else {
7020 expect("label identifier");
7022 skip(';');
7024 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7025 asm_instr();
7027 } else {
7028 if (tok == ':' && t >= TOK_UIDENT) {
7029 /* label case */
7030 next();
7031 s = label_find(t);
7032 if (s) {
7033 if (s->r == LABEL_DEFINED)
7034 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7035 s->r = LABEL_DEFINED;
7036 if (s->next) {
7037 Sym *pcl; /* pending cleanup goto */
7038 for (pcl = s->next; pcl; pcl = pcl->prev)
7039 gsym(pcl->jnext);
7040 sym_pop(&s->next, NULL, 0);
7041 } else
7042 gsym(s->jnext);
7043 } else {
7044 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7046 s->jnext = gind();
7047 s->cleanupstate = cur_scope->cl.s;
7049 block_after_label:
7050 vla_restore(cur_scope->vla.loc);
7051 if (tok != '}')
7052 goto again;
7053 /* we accept this, but it is a mistake */
7054 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7056 } else {
7057 /* expression case */
7058 if (t != ';') {
7059 unget_tok(t);
7060 expr:
7061 if (is_expr) {
7062 vpop();
7063 gexpr();
7064 } else {
7065 gexpr();
7066 vpop();
7068 skip(';');
7073 if (debug_modes)
7074 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7077 /* This skips over a stream of tokens containing balanced {} and ()
7078 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7079 with a '{'). If STR then allocates and stores the skipped tokens
7080 in *STR. This doesn't check if () and {} are nested correctly,
7081 i.e. "({)}" is accepted. */
7082 static void skip_or_save_block(TokenString **str)
7084 int braces = tok == '{';
7085 int level = 0;
7086 if (str)
7087 *str = tok_str_alloc();
7089 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7090 int t;
7091 if (tok == TOK_EOF) {
7092 if (str || level > 0)
7093 tcc_error("unexpected end of file");
7094 else
7095 break;
7097 if (str)
7098 tok_str_add_tok(*str);
7099 t = tok;
7100 next();
7101 if (t == '{' || t == '(') {
7102 level++;
7103 } else if (t == '}' || t == ')') {
7104 level--;
7105 if (level == 0 && braces && t == '}')
7106 break;
7109 if (str) {
7110 tok_str_add(*str, -1);
7111 tok_str_add(*str, 0);
7115 #define EXPR_CONST 1
7116 #define EXPR_ANY 2
7118 static void parse_init_elem(int expr_type)
7120 int saved_global_expr;
7121 switch(expr_type) {
7122 case EXPR_CONST:
7123 /* compound literals must be allocated globally in this case */
7124 saved_global_expr = global_expr;
7125 global_expr = 1;
7126 expr_const1();
7127 global_expr = saved_global_expr;
7128 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7129 (compound literals). */
7130 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7131 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7132 || vtop->sym->v < SYM_FIRST_ANOM))
7133 #ifdef TCC_TARGET_PE
7134 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7135 #endif
7137 tcc_error("initializer element is not constant");
7138 break;
7139 case EXPR_ANY:
7140 expr_eq();
7141 break;
7145 #if 1
7146 static void init_assert(init_params *p, int offset)
7148 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7149 : !nocode_wanted && offset > p->local_offset)
7150 tcc_internal_error("initializer overflow");
7152 #else
7153 #define init_assert(sec, offset)
7154 #endif
7156 /* put zeros for variable based init */
7157 static void init_putz(init_params *p, unsigned long c, int size)
7159 init_assert(p, c + size);
7160 if (p->sec) {
7161 /* nothing to do because globals are already set to zero */
7162 } else {
7163 vpush_helper_func(TOK_memset);
7164 vseti(VT_LOCAL, c);
7165 #ifdef TCC_TARGET_ARM
7166 vpushs(size);
7167 vpushi(0);
7168 #else
7169 vpushi(0);
7170 vpushs(size);
7171 #endif
7172 gfunc_call(3);
7176 #define DIF_FIRST 1
7177 #define DIF_SIZE_ONLY 2
7178 #define DIF_HAVE_ELEM 4
7179 #define DIF_CLEAR 8
7181 /* delete relocations for specified range c ... c + size. Unfortunatly
7182 in very special cases, relocations may occur unordered */
7183 static void decl_design_delrels(Section *sec, int c, int size)
7185 ElfW_Rel *rel, *rel2, *rel_end;
7186 if (!sec || !sec->reloc)
7187 return;
7188 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7189 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7190 while (rel < rel_end) {
7191 if (rel->r_offset >= c && rel->r_offset < c + size) {
7192 sec->reloc->data_offset -= sizeof *rel;
7193 } else {
7194 if (rel2 != rel)
7195 memcpy(rel2, rel, sizeof *rel);
7196 ++rel2;
7198 ++rel;
7202 static void decl_design_flex(init_params *p, Sym *ref, int index)
7204 if (ref == p->flex_array_ref) {
7205 if (index >= ref->c)
7206 ref->c = index + 1;
7207 } else if (ref->c < 0)
7208 tcc_error("flexible array has zero size in this context");
7211 /* t is the array or struct type. c is the array or struct
7212 address. cur_field is the pointer to the current
7213 field, for arrays the 'c' member contains the current start
7214 index. 'flags' is as in decl_initializer.
7215 'al' contains the already initialized length of the
7216 current container (starting at c). This returns the new length of that. */
7217 static int decl_designator(init_params *p, CType *type, unsigned long c,
7218 Sym **cur_field, int flags, int al)
7220 Sym *s, *f;
7221 int index, index_last, align, l, nb_elems, elem_size;
7222 unsigned long corig = c;
7224 elem_size = 0;
7225 nb_elems = 1;
7227 if (flags & DIF_HAVE_ELEM)
7228 goto no_designator;
7230 if (gnu_ext && tok >= TOK_UIDENT) {
7231 l = tok, next();
7232 if (tok == ':')
7233 goto struct_field;
7234 unget_tok(l);
7237 /* NOTE: we only support ranges for last designator */
7238 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7239 if (tok == '[') {
7240 if (!(type->t & VT_ARRAY))
7241 expect("array type");
7242 next();
7243 index = index_last = expr_const();
7244 if (tok == TOK_DOTS && gnu_ext) {
7245 next();
7246 index_last = expr_const();
7248 skip(']');
7249 s = type->ref;
7250 decl_design_flex(p, s, index_last);
7251 if (index < 0 || index_last >= s->c || index_last < index)
7252 tcc_error("index exceeds array bounds or range is empty");
7253 if (cur_field)
7254 (*cur_field)->c = index_last;
7255 type = pointed_type(type);
7256 elem_size = type_size(type, &align);
7257 c += index * elem_size;
7258 nb_elems = index_last - index + 1;
7259 } else {
7260 int cumofs;
7261 next();
7262 l = tok;
7263 struct_field:
7264 next();
7265 if ((type->t & VT_BTYPE) != VT_STRUCT)
7266 expect("struct/union type");
7267 cumofs = 0;
7268 f = find_field(type, l, &cumofs);
7269 if (!f)
7270 expect("field");
7271 if (cur_field)
7272 *cur_field = f;
7273 type = &f->type;
7274 c += cumofs + f->c;
7276 cur_field = NULL;
7278 if (!cur_field) {
7279 if (tok == '=') {
7280 next();
7281 } else if (!gnu_ext) {
7282 expect("=");
7284 } else {
7285 no_designator:
7286 if (type->t & VT_ARRAY) {
7287 index = (*cur_field)->c;
7288 s = type->ref;
7289 decl_design_flex(p, s, index);
7290 if (index >= s->c)
7291 tcc_error("too many initializers");
7292 type = pointed_type(type);
7293 elem_size = type_size(type, &align);
7294 c += index * elem_size;
7295 } else {
7296 f = *cur_field;
7297 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7298 *cur_field = f = f->next;
7299 if (!f)
7300 tcc_error("too many initializers");
7301 type = &f->type;
7302 c += f->c;
7306 if (!elem_size) /* for structs */
7307 elem_size = type_size(type, &align);
7309 /* Using designators the same element can be initialized more
7310 than once. In that case we need to delete possibly already
7311 existing relocations. */
7312 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7313 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7314 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7317 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7319 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7320 Sym aref = {0};
7321 CType t1;
7322 int i;
7323 if (p->sec || (type->t & VT_ARRAY)) {
7324 /* make init_putv/vstore believe it were a struct */
7325 aref.c = elem_size;
7326 t1.t = VT_STRUCT, t1.ref = &aref;
7327 type = &t1;
7329 if (p->sec)
7330 vpush_ref(type, p->sec, c, elem_size);
7331 else
7332 vset(type, VT_LOCAL|VT_LVAL, c);
7333 for (i = 1; i < nb_elems; i++) {
7334 vdup();
7335 init_putv(p, type, c + elem_size * i);
7337 vpop();
7340 c += nb_elems * elem_size;
7341 if (c - corig > al)
7342 al = c - corig;
7343 return al;
7346 /* store a value or an expression directly in global data or in local array */
7347 static void init_putv(init_params *p, CType *type, unsigned long c)
7349 int bt;
7350 void *ptr;
7351 CType dtype;
7352 int size, align;
7353 Section *sec = p->sec;
7354 uint64_t val;
7356 dtype = *type;
7357 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7359 size = type_size(type, &align);
7360 if (type->t & VT_BITFIELD)
7361 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7362 init_assert(p, c + size);
7364 if (sec) {
7365 /* XXX: not portable */
7366 /* XXX: generate error if incorrect relocation */
7367 gen_assign_cast(&dtype);
7368 bt = type->t & VT_BTYPE;
7370 if ((vtop->r & VT_SYM)
7371 && bt != VT_PTR
7372 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7373 || (type->t & VT_BITFIELD))
7374 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7376 tcc_error("initializer element is not computable at load time");
7378 if (NODATA_WANTED) {
7379 vtop--;
7380 return;
7383 ptr = sec->data + c;
7384 val = vtop->c.i;
7386 /* XXX: make code faster ? */
7387 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7388 vtop->sym->v >= SYM_FIRST_ANOM &&
7389 /* XXX This rejects compound literals like
7390 '(void *){ptr}'. The problem is that '&sym' is
7391 represented the same way, which would be ruled out
7392 by the SYM_FIRST_ANOM check above, but also '"string"'
7393 in 'char *p = "string"' is represented the same
7394 with the type being VT_PTR and the symbol being an
7395 anonymous one. That is, there's no difference in vtop
7396 between '(void *){x}' and '&(void *){x}'. Ignore
7397 pointer typed entities here. Hopefully no real code
7398 will ever use compound literals with scalar type. */
7399 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7400 /* These come from compound literals, memcpy stuff over. */
7401 Section *ssec;
7402 ElfSym *esym;
7403 ElfW_Rel *rel;
7404 esym = elfsym(vtop->sym);
7405 ssec = tcc_state->sections[esym->st_shndx];
7406 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7407 if (ssec->reloc) {
7408 /* We need to copy over all memory contents, and that
7409 includes relocations. Use the fact that relocs are
7410 created it order, so look from the end of relocs
7411 until we hit one before the copied region. */
7412 unsigned long relofs = ssec->reloc->data_offset;
7413 while (relofs >= sizeof(*rel)) {
7414 relofs -= sizeof(*rel);
7415 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7416 if (rel->r_offset >= esym->st_value + size)
7417 continue;
7418 if (rel->r_offset < esym->st_value)
7419 break;
7420 put_elf_reloca(symtab_section, sec,
7421 c + rel->r_offset - esym->st_value,
7422 ELFW(R_TYPE)(rel->r_info),
7423 ELFW(R_SYM)(rel->r_info),
7424 #if PTR_SIZE == 8
7425 rel->r_addend
7426 #else
7428 #endif
7432 } else {
7433 if (type->t & VT_BITFIELD) {
7434 int bit_pos, bit_size, bits, n;
7435 unsigned char *p, v, m;
7436 bit_pos = BIT_POS(vtop->type.t);
7437 bit_size = BIT_SIZE(vtop->type.t);
7438 p = (unsigned char*)ptr + (bit_pos >> 3);
7439 bit_pos &= 7, bits = 0;
7440 while (bit_size) {
7441 n = 8 - bit_pos;
7442 if (n > bit_size)
7443 n = bit_size;
7444 v = val >> bits << bit_pos;
7445 m = ((1 << n) - 1) << bit_pos;
7446 *p = (*p & ~m) | (v & m);
7447 bits += n, bit_size -= n, bit_pos = 0, ++p;
7449 } else
7450 switch(bt) {
7451 case VT_BOOL:
7452 *(char *)ptr = val != 0;
7453 break;
7454 case VT_BYTE:
7455 *(char *)ptr = val;
7456 break;
7457 case VT_SHORT:
7458 write16le(ptr, val);
7459 break;
7460 case VT_FLOAT:
7461 write32le(ptr, val);
7462 break;
7463 case VT_DOUBLE:
7464 write64le(ptr, val);
7465 break;
7466 case VT_LDOUBLE:
7467 #if defined TCC_IS_NATIVE_387
7468 /* Host and target platform may be different but both have x87.
7469 On windows, tcc does not use VT_LDOUBLE, except when it is a
7470 cross compiler. In this case a mingw gcc as host compiler
7471 comes here with 10-byte long doubles, while msvc or tcc won't.
7472 tcc itself can still translate by asm.
7473 In any case we avoid possibly random bytes 11 and 12.
7475 if (sizeof (long double) >= 10)
7476 memcpy(ptr, &vtop->c.ld, 10);
7477 #ifdef __TINYC__
7478 else if (sizeof (long double) == sizeof (double))
7479 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7480 #endif
7481 else if (vtop->c.ld == 0.0)
7483 else
7484 #endif
7485 /* For other platforms it should work natively, but may not work
7486 for cross compilers */
7487 if (sizeof(long double) == LDOUBLE_SIZE)
7488 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7489 else if (sizeof(double) == LDOUBLE_SIZE)
7490 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7491 #ifndef TCC_CROSS_TEST
7492 else
7493 tcc_error("can't cross compile long double constants");
7494 #endif
7495 break;
7497 #if PTR_SIZE == 8
7498 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7499 case VT_LLONG:
7500 case VT_PTR:
7501 if (vtop->r & VT_SYM)
7502 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7503 else
7504 write64le(ptr, val);
7505 break;
7506 case VT_INT:
7507 write32le(ptr, val);
7508 break;
7509 #else
7510 case VT_LLONG:
7511 write64le(ptr, val);
7512 break;
7513 case VT_PTR:
7514 case VT_INT:
7515 if (vtop->r & VT_SYM)
7516 greloc(sec, vtop->sym, c, R_DATA_PTR);
7517 write32le(ptr, val);
7518 break;
7519 #endif
7520 default:
7521 //tcc_internal_error("unexpected type");
7522 break;
7525 vtop--;
7526 } else {
7527 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7528 vswap();
7529 vstore();
7530 vpop();
7534 /* 't' contains the type and storage info. 'c' is the offset of the
7535 object in section 'sec'. If 'sec' is NULL, it means stack based
7536 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7537 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7538 size only evaluation is wanted (only for arrays). */
7539 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7541 int len, n, no_oblock, i;
7542 int size1, align1;
7543 Sym *s, *f;
7544 Sym indexsym;
7545 CType *t1;
7547 /* generate line number info */
7548 if (debug_modes && !p->sec)
7549 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7551 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7552 /* In case of strings we have special handling for arrays, so
7553 don't consume them as initializer value (which would commit them
7554 to some anonymous symbol). */
7555 tok != TOK_LSTR && tok != TOK_STR &&
7556 (!(flags & DIF_SIZE_ONLY)
7557 /* a struct may be initialized from a struct of same type, as in
7558 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7559 In that case we need to parse the element in order to check
7560 it for compatibility below */
7561 || (type->t & VT_BTYPE) == VT_STRUCT)
7563 int ncw_prev = nocode_wanted;
7564 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7565 ++nocode_wanted;
7566 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7567 nocode_wanted = ncw_prev;
7568 flags |= DIF_HAVE_ELEM;
7571 if (type->t & VT_ARRAY) {
7572 no_oblock = 1;
7573 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7574 tok == '{') {
7575 skip('{');
7576 no_oblock = 0;
7579 s = type->ref;
7580 n = s->c;
7581 t1 = pointed_type(type);
7582 size1 = type_size(t1, &align1);
7584 /* only parse strings here if correct type (otherwise: handle
7585 them as ((w)char *) expressions */
7586 if ((tok == TOK_LSTR &&
7587 #ifdef TCC_TARGET_PE
7588 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7589 #else
7590 (t1->t & VT_BTYPE) == VT_INT
7591 #endif
7592 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7593 len = 0;
7594 cstr_reset(&initstr);
7595 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7596 tcc_error("unhandled string literal merging");
7597 while (tok == TOK_STR || tok == TOK_LSTR) {
7598 if (initstr.size)
7599 initstr.size -= size1;
7600 if (tok == TOK_STR)
7601 len += tokc.str.size;
7602 else
7603 len += tokc.str.size / sizeof(nwchar_t);
7604 len--;
7605 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7606 next();
7608 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7609 && tok != TOK_EOF) {
7610 /* Not a lone literal but part of a bigger expression. */
7611 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7612 tokc.str.size = initstr.size;
7613 tokc.str.data = initstr.data;
7614 goto do_init_array;
7617 decl_design_flex(p, s, len);
7618 if (!(flags & DIF_SIZE_ONLY)) {
7619 int nb = n;
7620 if (len < nb)
7621 nb = len;
7622 if (len > nb)
7623 tcc_warning("initializer-string for array is too long");
7624 /* in order to go faster for common case (char
7625 string in global variable, we handle it
7626 specifically */
7627 if (p->sec && size1 == 1) {
7628 init_assert(p, c + nb);
7629 if (!NODATA_WANTED)
7630 memcpy(p->sec->data + c, initstr.data, nb);
7631 } else {
7632 for(i=0;i<n;i++) {
7633 if (i >= nb) {
7634 /* only add trailing zero if enough storage (no
7635 warning in this case since it is standard) */
7636 if (flags & DIF_CLEAR)
7637 break;
7638 if (n - i >= 4) {
7639 init_putz(p, c + i * size1, (n - i) * size1);
7640 break;
7642 ch = 0;
7643 } else if (size1 == 1)
7644 ch = ((unsigned char *)initstr.data)[i];
7645 else
7646 ch = ((nwchar_t *)initstr.data)[i];
7647 vpushi(ch);
7648 init_putv(p, t1, c + i * size1);
7652 } else {
7654 do_init_array:
7655 indexsym.c = 0;
7656 f = &indexsym;
7658 do_init_list:
7659 /* zero memory once in advance */
7660 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7661 init_putz(p, c, n*size1);
7662 flags |= DIF_CLEAR;
7665 len = 0;
7666 /* GNU extension: if the initializer is empty for a flex array,
7667 it's size is zero. We won't enter the loop, so set the size
7668 now. */
7669 decl_design_flex(p, s, len);
7670 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7671 len = decl_designator(p, type, c, &f, flags, len);
7672 flags &= ~DIF_HAVE_ELEM;
7673 if (type->t & VT_ARRAY) {
7674 ++indexsym.c;
7675 /* special test for multi dimensional arrays (may not
7676 be strictly correct if designators are used at the
7677 same time) */
7678 if (no_oblock && len >= n*size1)
7679 break;
7680 } else {
7681 if (s->type.t == VT_UNION)
7682 f = NULL;
7683 else
7684 f = f->next;
7685 if (no_oblock && f == NULL)
7686 break;
7689 if (tok == '}')
7690 break;
7691 skip(',');
7694 if (!no_oblock)
7695 skip('}');
7697 } else if ((flags & DIF_HAVE_ELEM)
7698 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7699 The source type might have VT_CONSTANT set, which is
7700 of course assignable to non-const elements. */
7701 && is_compatible_unqualified_types(type, &vtop->type)) {
7702 goto one_elem;
7704 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7705 no_oblock = 1;
7706 if ((flags & DIF_FIRST) || tok == '{') {
7707 skip('{');
7708 no_oblock = 0;
7710 s = type->ref;
7711 f = s->next;
7712 n = s->c;
7713 size1 = 1;
7714 goto do_init_list;
7716 } else if (tok == '{') {
7717 if (flags & DIF_HAVE_ELEM)
7718 skip(';');
7719 next();
7720 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7721 skip('}');
7723 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7724 /* If we supported only ISO C we wouldn't have to accept calling
7725 this on anything than an array if DIF_SIZE_ONLY (and even then
7726 only on the outermost level, so no recursion would be needed),
7727 because initializing a flex array member isn't supported.
7728 But GNU C supports it, so we need to recurse even into
7729 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7730 /* just skip expression */
7731 if (flags & DIF_HAVE_ELEM)
7732 vpop();
7733 else
7734 skip_or_save_block(NULL);
7736 } else {
7737 if (!(flags & DIF_HAVE_ELEM)) {
7738 /* This should happen only when we haven't parsed
7739 the init element above for fear of committing a
7740 string constant to memory too early. */
7741 if (tok != TOK_STR && tok != TOK_LSTR)
7742 expect("string constant");
7743 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7745 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7746 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7747 && vtop->c.i == 0
7748 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7750 vpop();
7751 else
7752 init_putv(p, type, c);
7756 /* parse an initializer for type 't' if 'has_init' is non zero, and
7757 allocate space in local or global data space ('r' is either
7758 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7759 variable 'v' of scope 'scope' is declared before initializers
7760 are parsed. If 'v' is zero, then a reference to the new object
7761 is put in the value stack. If 'has_init' is 2, a special parsing
7762 is done to handle string constants. */
7763 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7764 int has_init, int v, int scope)
7766 int size, align, addr;
7767 TokenString *init_str = NULL;
7769 Section *sec;
7770 Sym *flexible_array;
7771 Sym *sym;
7772 int saved_nocode_wanted = nocode_wanted;
7773 #ifdef CONFIG_TCC_BCHECK
7774 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7775 #endif
7776 init_params p = {0};
7778 /* Always allocate static or global variables */
7779 if (v && (r & VT_VALMASK) == VT_CONST)
7780 nocode_wanted |= 0x80000000;
7782 flexible_array = NULL;
7783 size = type_size(type, &align);
7785 /* exactly one flexible array may be initialized, either the
7786 toplevel array or the last member of the toplevel struct */
7788 if (size < 0) {
7789 /* If the base type itself was an array type of unspecified size
7790 (like in 'typedef int arr[]; arr x = {1};') then we will
7791 overwrite the unknown size by the real one for this decl.
7792 We need to unshare the ref symbol holding that size. */
7793 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7794 p.flex_array_ref = type->ref;
7796 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7797 Sym *field = type->ref->next;
7798 if (field) {
7799 while (field->next)
7800 field = field->next;
7801 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7802 flexible_array = field;
7803 p.flex_array_ref = field->type.ref;
7804 size = -1;
7809 if (size < 0) {
7810 /* If unknown size, do a dry-run 1st pass */
7811 if (!has_init)
7812 tcc_error("unknown type size");
7813 if (has_init == 2) {
7814 /* only get strings */
7815 init_str = tok_str_alloc();
7816 while (tok == TOK_STR || tok == TOK_LSTR) {
7817 tok_str_add_tok(init_str);
7818 next();
7820 tok_str_add(init_str, -1);
7821 tok_str_add(init_str, 0);
7822 } else
7823 skip_or_save_block(&init_str);
7824 unget_tok(0);
7826 /* compute size */
7827 begin_macro(init_str, 1);
7828 next();
7829 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7830 /* prepare second initializer parsing */
7831 macro_ptr = init_str->str;
7832 next();
7834 /* if still unknown size, error */
7835 size = type_size(type, &align);
7836 if (size < 0)
7837 tcc_error("unknown type size");
7839 /* If there's a flex member and it was used in the initializer
7840 adjust size. */
7841 if (flexible_array && flexible_array->type.ref->c > 0)
7842 size += flexible_array->type.ref->c
7843 * pointed_size(&flexible_array->type);
7846 /* take into account specified alignment if bigger */
7847 if (ad->a.aligned) {
7848 int speca = 1 << (ad->a.aligned - 1);
7849 if (speca > align)
7850 align = speca;
7851 } else if (ad->a.packed) {
7852 align = 1;
7855 if (!v && NODATA_WANTED)
7856 size = 0, align = 1;
7858 if ((r & VT_VALMASK) == VT_LOCAL) {
7859 sec = NULL;
7860 #ifdef CONFIG_TCC_BCHECK
7861 if (bcheck && v) {
7862 /* add padding between stack variables for bound checking */
7863 loc -= align;
7865 #endif
7866 loc = (loc - size) & -align;
7867 addr = loc;
7868 p.local_offset = addr + size;
7869 #ifdef CONFIG_TCC_BCHECK
7870 if (bcheck && v) {
7871 /* add padding between stack variables for bound checking */
7872 loc -= align;
7874 #endif
7875 if (v) {
7876 /* local variable */
7877 #ifdef CONFIG_TCC_ASM
7878 if (ad->asm_label) {
7879 int reg = asm_parse_regvar(ad->asm_label);
7880 if (reg >= 0)
7881 r = (r & ~VT_VALMASK) | reg;
7883 #endif
7884 sym = sym_push(v, type, r, addr);
7885 if (ad->cleanup_func) {
7886 Sym *cls = sym_push2(&all_cleanups,
7887 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7888 cls->prev_tok = sym;
7889 cls->next = ad->cleanup_func;
7890 cls->ncl = cur_scope->cl.s;
7891 cur_scope->cl.s = cls;
7894 sym->a = ad->a;
7895 } else {
7896 /* push local reference */
7897 vset(type, r, addr);
7899 } else {
7900 sym = NULL;
7901 if (v && scope == VT_CONST) {
7902 /* see if the symbol was already defined */
7903 sym = sym_find(v);
7904 if (sym) {
7905 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
7906 && sym->type.ref->c > type->ref->c) {
7907 /* flex array was already declared with explicit size
7908 extern int arr[10];
7909 int arr[] = { 1,2,3 }; */
7910 type->ref->c = sym->type.ref->c;
7911 size = type_size(type, &align);
7913 patch_storage(sym, ad, type);
7914 /* we accept several definitions of the same global variable. */
7915 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7916 goto no_alloc;
7920 /* allocate symbol in corresponding section */
7921 sec = ad->section;
7922 if (!sec) {
7923 CType *tp = type;
7924 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
7925 tp = &tp->ref->type;
7926 if (tp->t & VT_CONSTANT) {
7927 sec = rodata_section;
7928 } else if (has_init) {
7929 sec = data_section;
7930 /*if (tcc_state->g_debug & 4)
7931 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7932 } else if (tcc_state->nocommon)
7933 sec = bss_section;
7936 if (sec) {
7937 addr = section_add(sec, size, align);
7938 #ifdef CONFIG_TCC_BCHECK
7939 /* add padding if bound check */
7940 if (bcheck)
7941 section_add(sec, 1, 1);
7942 #endif
7943 } else {
7944 addr = align; /* SHN_COMMON is special, symbol value is align */
7945 sec = common_section;
7948 if (v) {
7949 if (!sym) {
7950 sym = sym_push(v, type, r | VT_SYM, 0);
7951 patch_storage(sym, ad, NULL);
7953 /* update symbol definition */
7954 put_extern_sym(sym, sec, addr, size);
7955 } else {
7956 /* push global reference */
7957 vpush_ref(type, sec, addr, size);
7958 sym = vtop->sym;
7959 vtop->r |= r;
7962 #ifdef CONFIG_TCC_BCHECK
7963 /* handles bounds now because the symbol must be defined
7964 before for the relocation */
7965 if (bcheck) {
7966 addr_t *bounds_ptr;
7968 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7969 /* then add global bound info */
7970 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7971 bounds_ptr[0] = 0; /* relocated */
7972 bounds_ptr[1] = size;
7974 #endif
7977 if (type->t & VT_VLA) {
7978 int a;
7980 if (NODATA_WANTED)
7981 goto no_alloc;
7983 /* save before-VLA stack pointer if needed */
7984 if (cur_scope->vla.num == 0) {
7985 if (cur_scope->prev && cur_scope->prev->vla.num) {
7986 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
7987 } else {
7988 gen_vla_sp_save(loc -= PTR_SIZE);
7989 cur_scope->vla.locorig = loc;
7993 vpush_type_size(type, &a);
7994 gen_vla_alloc(type, a);
7995 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7996 /* on _WIN64, because of the function args scratch area, the
7997 result of alloca differs from RSP and is returned in RAX. */
7998 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7999 #endif
8000 gen_vla_sp_save(addr);
8001 cur_scope->vla.loc = addr;
8002 cur_scope->vla.num++;
8003 } else if (has_init) {
8004 p.sec = sec;
8005 decl_initializer(&p, type, addr, DIF_FIRST);
8006 /* patch flexible array member size back to -1, */
8007 /* for possible subsequent similar declarations */
8008 if (flexible_array)
8009 flexible_array->type.ref->c = -1;
8012 no_alloc:
8013 /* restore parse state if needed */
8014 if (init_str) {
8015 end_macro();
8016 next();
8019 nocode_wanted = saved_nocode_wanted;
8022 /* generate vla code saved in post_type() */
8023 static void func_vla_arg_code(Sym *arg)
8025 int align;
8026 TokenString *vla_array_tok = NULL;
8028 if (arg->type.ref)
8029 func_vla_arg_code(arg->type.ref);
8031 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8032 loc -= type_size(&int_type, &align);
8033 loc &= -align;
8034 arg->type.ref->c = loc;
8036 unget_tok(0);
8037 vla_array_tok = tok_str_alloc();
8038 vla_array_tok->str = arg->type.ref->vla_array_str;
8039 begin_macro(vla_array_tok, 1);
8040 next();
8041 gexpr();
8042 end_macro();
8043 next();
8044 vpush_type_size(&arg->type.ref->type, &align);
8045 gen_op('*');
8046 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8047 vswap();
8048 vstore();
8049 vpop();
8053 static void func_vla_arg(Sym *sym)
8055 Sym *arg;
8057 for (arg = sym->type.ref->next; arg; arg = arg->next)
8058 if (arg->type.t & VT_VLA)
8059 func_vla_arg_code(arg);
8062 /* parse a function defined by symbol 'sym' and generate its code in
8063 'cur_text_section' */
8064 static void gen_function(Sym *sym)
8066 struct scope f = { 0 };
8067 cur_scope = root_scope = &f;
8068 nocode_wanted = 0;
8069 ind = cur_text_section->data_offset;
8070 if (sym->a.aligned) {
8071 size_t newoff = section_add(cur_text_section, 0,
8072 1 << (sym->a.aligned - 1));
8073 gen_fill_nops(newoff - ind);
8075 /* NOTE: we patch the symbol size later */
8076 put_extern_sym(sym, cur_text_section, ind, 0);
8077 if (sym->type.ref->f.func_ctor)
8078 add_array (tcc_state, ".init_array", sym->c);
8079 if (sym->type.ref->f.func_dtor)
8080 add_array (tcc_state, ".fini_array", sym->c);
8082 funcname = get_tok_str(sym->v, NULL);
8083 func_ind = ind;
8084 func_vt = sym->type.ref->type;
8085 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8087 /* put debug symbol */
8088 tcc_debug_funcstart(tcc_state, sym);
8089 /* push a dummy symbol to enable local sym storage */
8090 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8091 local_scope = 1; /* for function parameters */
8092 gfunc_prolog(sym);
8093 local_scope = 0;
8094 rsym = 0;
8095 clear_temp_local_var_list();
8096 func_vla_arg(sym);
8097 block(0);
8098 gsym(rsym);
8099 nocode_wanted = 0;
8100 /* reset local stack */
8101 pop_local_syms(NULL, 0);
8102 gfunc_epilog();
8103 cur_text_section->data_offset = ind;
8104 local_scope = 0;
8105 label_pop(&global_label_stack, NULL, 0);
8106 sym_pop(&all_cleanups, NULL, 0);
8107 /* patch symbol size */
8108 elfsym(sym)->st_size = ind - func_ind;
8109 /* end of function */
8110 tcc_debug_funcend(tcc_state, ind - func_ind);
8111 /* It's better to crash than to generate wrong code */
8112 cur_text_section = NULL;
8113 funcname = ""; /* for safety */
8114 func_vt.t = VT_VOID; /* for safety */
8115 func_var = 0; /* for safety */
8116 ind = 0; /* for safety */
8117 func_ind = -1;
8118 nocode_wanted = 0x80000000;
8119 check_vstack();
8120 /* do this after funcend debug info */
8121 next();
8124 static void gen_inline_functions(TCCState *s)
8126 Sym *sym;
8127 int inline_generated, i;
8128 struct InlineFunc *fn;
8130 tcc_open_bf(s, ":inline:", 0);
8131 /* iterate while inline function are referenced */
8132 do {
8133 inline_generated = 0;
8134 for (i = 0; i < s->nb_inline_fns; ++i) {
8135 fn = s->inline_fns[i];
8136 sym = fn->sym;
8137 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8138 /* the function was used or forced (and then not internal):
8139 generate its code and convert it to a normal function */
8140 fn->sym = NULL;
8141 tcc_debug_putfile(s, fn->filename);
8142 begin_macro(fn->func_str, 1);
8143 next();
8144 cur_text_section = text_section;
8145 gen_function(sym);
8146 end_macro();
8148 inline_generated = 1;
8151 } while (inline_generated);
8152 tcc_close();
8155 static void free_inline_functions(TCCState *s)
8157 int i;
8158 /* free tokens of unused inline functions */
8159 for (i = 0; i < s->nb_inline_fns; ++i) {
8160 struct InlineFunc *fn = s->inline_fns[i];
8161 if (fn->sym)
8162 tok_str_free(fn->func_str);
8164 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8167 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8168 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8169 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8171 int v, has_init, r, oldint;
8172 CType type, btype;
8173 Sym *sym;
8174 AttributeDef ad, adbase;
8176 while (1) {
8177 if (tok == TOK_STATIC_ASSERT) {
8178 CString error_str;
8179 int c;
8181 next();
8182 skip('(');
8183 c = expr_const();
8185 if (tok == ')') {
8186 if (!c)
8187 tcc_error("_Static_assert fail");
8188 next();
8189 goto static_assert_out;
8192 skip(',');
8193 parse_mult_str(&error_str, "string constant");
8194 if (c == 0)
8195 tcc_error("%s", (char *)error_str.data);
8196 cstr_free(&error_str);
8197 skip(')');
8198 static_assert_out:
8199 skip(';');
8200 continue;
8203 oldint = 0;
8204 if (!parse_btype(&btype, &adbase)) {
8205 if (is_for_loop_init)
8206 return 0;
8207 /* skip redundant ';' if not in old parameter decl scope */
8208 if (tok == ';' && l != VT_CMP) {
8209 next();
8210 continue;
8212 if (l != VT_CONST)
8213 break;
8214 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8215 /* global asm block */
8216 asm_global_instr();
8217 continue;
8219 if (tok >= TOK_UIDENT) {
8220 /* special test for old K&R protos without explicit int
8221 type. Only accepted when defining global data */
8222 btype.t = VT_INT;
8223 oldint = 1;
8224 } else {
8225 if (tok != TOK_EOF)
8226 expect("declaration");
8227 break;
8231 if (tok == ';') {
8232 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8233 v = btype.ref->v;
8234 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8235 tcc_warning("unnamed struct/union that defines no instances");
8236 next();
8237 continue;
8239 if (IS_ENUM(btype.t)) {
8240 next();
8241 continue;
8245 while (1) { /* iterate thru each declaration */
8246 type = btype;
8247 ad = adbase;
8248 type_decl(&type, &ad, &v, TYPE_DIRECT);
8249 #if 0
8251 char buf[500];
8252 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8253 printf("type = '%s'\n", buf);
8255 #endif
8256 if ((type.t & VT_BTYPE) == VT_FUNC) {
8257 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8258 tcc_error("function without file scope cannot be static");
8259 /* if old style function prototype, we accept a
8260 declaration list */
8261 sym = type.ref;
8262 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8263 decl0(VT_CMP, 0, sym);
8264 #ifdef TCC_TARGET_MACHO
8265 if (sym->f.func_alwinl
8266 && ((type.t & (VT_EXTERN | VT_INLINE))
8267 == (VT_EXTERN | VT_INLINE))) {
8268 /* always_inline functions must be handled as if they
8269 don't generate multiple global defs, even if extern
8270 inline, i.e. GNU inline semantics for those. Rewrite
8271 them into static inline. */
8272 type.t &= ~VT_EXTERN;
8273 type.t |= VT_STATIC;
8275 #endif
8276 /* always compile 'extern inline' */
8277 if (type.t & VT_EXTERN)
8278 type.t &= ~VT_INLINE;
8280 } else if (oldint) {
8281 tcc_warning("type defaults to int");
8284 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8285 ad.asm_label = asm_label_instr();
8286 /* parse one last attribute list, after asm label */
8287 parse_attribute(&ad);
8288 #if 0
8289 /* gcc does not allow __asm__("label") with function definition,
8290 but why not ... */
8291 if (tok == '{')
8292 expect(";");
8293 #endif
8296 #ifdef TCC_TARGET_PE
8297 if (ad.a.dllimport || ad.a.dllexport) {
8298 if (type.t & VT_STATIC)
8299 tcc_error("cannot have dll linkage with static");
8300 if (type.t & VT_TYPEDEF) {
8301 tcc_warning("'%s' attribute ignored for typedef",
8302 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8303 (ad.a.dllexport = 0, "dllexport"));
8304 } else if (ad.a.dllimport) {
8305 if ((type.t & VT_BTYPE) == VT_FUNC)
8306 ad.a.dllimport = 0;
8307 else
8308 type.t |= VT_EXTERN;
8311 #endif
8312 if (tok == '{') {
8313 if (l != VT_CONST)
8314 tcc_error("cannot use local functions");
8315 if ((type.t & VT_BTYPE) != VT_FUNC)
8316 expect("function definition");
8318 /* reject abstract declarators in function definition
8319 make old style params without decl have int type */
8320 sym = type.ref;
8321 while ((sym = sym->next) != NULL) {
8322 if (!(sym->v & ~SYM_FIELD))
8323 expect("identifier");
8324 if (sym->type.t == VT_VOID)
8325 sym->type = int_type;
8328 /* apply post-declaraton attributes */
8329 merge_funcattr(&type.ref->f, &ad.f);
8331 /* put function symbol */
8332 type.t &= ~VT_EXTERN;
8333 sym = external_sym(v, &type, 0, &ad);
8335 /* static inline functions are just recorded as a kind
8336 of macro. Their code will be emitted at the end of
8337 the compilation unit only if they are used */
8338 if (sym->type.t & VT_INLINE) {
8339 struct InlineFunc *fn;
8340 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8341 strcpy(fn->filename, file->filename);
8342 fn->sym = sym;
8343 skip_or_save_block(&fn->func_str);
8344 dynarray_add(&tcc_state->inline_fns,
8345 &tcc_state->nb_inline_fns, fn);
8346 } else {
8347 /* compute text section */
8348 cur_text_section = ad.section;
8349 if (!cur_text_section)
8350 cur_text_section = text_section;
8351 gen_function(sym);
8353 break;
8354 } else {
8355 if (l == VT_CMP) {
8356 /* find parameter in function parameter list */
8357 for (sym = func_sym->next; sym; sym = sym->next)
8358 if ((sym->v & ~SYM_FIELD) == v)
8359 goto found;
8360 tcc_error("declaration for parameter '%s' but no such parameter",
8361 get_tok_str(v, NULL));
8362 found:
8363 if (type.t & VT_STORAGE) /* 'register' is okay */
8364 tcc_error("storage class specified for '%s'",
8365 get_tok_str(v, NULL));
8366 if (sym->type.t != VT_VOID)
8367 tcc_error("redefinition of parameter '%s'",
8368 get_tok_str(v, NULL));
8369 convert_parameter_type(&type);
8370 sym->type = type;
8371 } else if (type.t & VT_TYPEDEF) {
8372 /* save typedefed type */
8373 /* XXX: test storage specifiers ? */
8374 sym = sym_find(v);
8375 if (sym && sym->sym_scope == local_scope) {
8376 if (!is_compatible_types(&sym->type, &type)
8377 || !(sym->type.t & VT_TYPEDEF))
8378 tcc_error("incompatible redefinition of '%s'",
8379 get_tok_str(v, NULL));
8380 sym->type = type;
8381 } else {
8382 sym = sym_push(v, &type, 0, 0);
8384 sym->a = ad.a;
8385 sym->f = ad.f;
8386 if (debug_modes)
8387 tcc_debug_typedef (tcc_state, sym);
8388 } else if ((type.t & VT_BTYPE) == VT_VOID
8389 && !(type.t & VT_EXTERN)) {
8390 tcc_error("declaration of void object");
8391 } else {
8392 r = 0;
8393 if ((type.t & VT_BTYPE) == VT_FUNC) {
8394 /* external function definition */
8395 /* specific case for func_call attribute */
8396 type.ref->f = ad.f;
8397 } else if (!(type.t & VT_ARRAY)) {
8398 /* not lvalue if array */
8399 r |= VT_LVAL;
8401 has_init = (tok == '=');
8402 if (has_init && (type.t & VT_VLA))
8403 tcc_error("variable length array cannot be initialized");
8404 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8405 || (type.t & VT_BTYPE) == VT_FUNC
8406 /* as with GCC, uninitialized global arrays with no size
8407 are considered extern: */
8408 || ((type.t & VT_ARRAY) && !has_init
8409 && l == VT_CONST && type.ref->c < 0)
8411 /* external variable or function */
8412 type.t |= VT_EXTERN;
8413 sym = external_sym(v, &type, r, &ad);
8414 if (ad.alias_target) {
8415 /* Aliases need to be emitted when their target
8416 symbol is emitted, even if perhaps unreferenced.
8417 We only support the case where the base is
8418 already defined, otherwise we would need
8419 deferring to emit the aliases until the end of
8420 the compile unit. */
8421 Sym *alias_target = sym_find(ad.alias_target);
8422 ElfSym *esym = elfsym(alias_target);
8423 if (!esym)
8424 tcc_error("unsupported forward __alias__ attribute");
8425 put_extern_sym2(sym, esym->st_shndx,
8426 esym->st_value, esym->st_size, 1);
8428 } else {
8429 if (type.t & VT_STATIC)
8430 r |= VT_CONST;
8431 else
8432 r |= l;
8433 if (has_init)
8434 next();
8435 else if (l == VT_CONST)
8436 /* uninitialized global variables may be overridden */
8437 type.t |= VT_EXTERN;
8438 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8441 if (tok != ',') {
8442 if (is_for_loop_init)
8443 return 1;
8444 skip(';');
8445 break;
8447 next();
8451 return 0;
8454 static void decl(int l)
8456 decl0(l, 0, NULL);
8459 /* ------------------------------------------------------------------------- */
8460 #undef gjmp_addr
8461 #undef gjmp
8462 /* ------------------------------------------------------------------------- */