Increase VSTACK_SIZE to 512.
[tinycc.git] / tccgen.c
blob765e74253c214b0bb3ca3fb7537be3577f4da851
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
64 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
65 ST_DATA int func_vc;
66 ST_DATA int func_ind;
67 ST_DATA const char *funcname;
68 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
69 static CString initstr;
71 #if PTR_SIZE == 4
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
74 #elif LONG_SIZE == 4
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
77 #else
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
80 #endif
82 static struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int nocode_wanted;
89 int *bsym;
90 struct scope *scope;
91 struct switch_t *prev;
92 SValue sv;
93 } *cur_switch; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable {
98 int location; //offset on stack. Svalue.c.i
99 short size;
100 short align;
101 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
102 static int nb_temp_local_vars;
104 static struct scope {
105 struct scope *prev;
106 struct { int loc, locorig, num; } vla;
107 struct { Sym *s; int n; } cl;
108 int *bsym, *csym;
109 Sym *lstk, *llstk;
110 } *cur_scope, *loop_scope, *root_scope;
112 typedef struct {
113 Section *sec;
114 int local_offset;
115 Sym *flex_array_ref;
116 } init_params;
118 #if 1
119 #define precedence_parser
120 static void init_prec(void);
121 #endif
123 static void gen_cast(CType *type);
124 static void gen_cast_s(int t);
125 static inline CType *pointed_type(CType *type);
126 static int is_compatible_types(CType *type1, CType *type2);
127 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
128 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
129 static void parse_expr_type(CType *type);
130 static void init_putv(init_params *p, CType *type, unsigned long c);
131 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
132 static void block(int is_expr);
133 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
134 static int decl(int l);
135 static void expr_eq(void);
136 static void vpush_type_size(CType *type, int *a);
137 static int is_compatible_unqualified_types(CType *type1, CType *type2);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty, unsigned long long v);
140 static void vpush(CType *type);
141 static int gvtst(int inv, int t);
142 static void gen_inline_functions(TCCState *s);
143 static void free_inline_functions(TCCState *s);
144 static void skip_or_save_block(TokenString **str);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size,int align);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType *st, CType *dt);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC void gsym(int t)
156 if (t) {
157 gsym_addr(t, ind);
158 CODE_ON();
162 /* Clear 'nocode_wanted' if current pc is a label */
163 static int gind()
165 int t = ind;
166 CODE_ON();
167 if (debug_modes)
168 tcc_tcov_block_begin(tcc_state);
169 return t;
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t)
175 gjmp_addr(t);
176 CODE_OFF();
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t)
182 t = gjmp(t);
183 CODE_OFF();
184 return t;
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN int is_float(int t)
194 int bt = t & VT_BTYPE;
195 return bt == VT_LDOUBLE
196 || bt == VT_DOUBLE
197 || bt == VT_FLOAT
198 || bt == VT_QFLOAT;
201 static inline int is_integer_btype(int bt)
203 return bt == VT_BYTE
204 || bt == VT_BOOL
205 || bt == VT_SHORT
206 || bt == VT_INT
207 || bt == VT_LLONG;
210 static int btype_size(int bt)
212 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
213 bt == VT_SHORT ? 2 :
214 bt == VT_INT ? 4 :
215 bt == VT_LLONG ? 8 :
216 bt == VT_PTR ? PTR_SIZE : 0;
219 /* returns function return register from type */
220 static int R_RET(int t)
222 if (!is_float(t))
223 return REG_IRET;
224 #ifdef TCC_TARGET_X86_64
225 if ((t & VT_BTYPE) == VT_LDOUBLE)
226 return TREG_ST0;
227 #elif defined TCC_TARGET_RISCV64
228 if ((t & VT_BTYPE) == VT_LDOUBLE)
229 return REG_IRET;
230 #endif
231 return REG_FRET;
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t)
237 t &= VT_BTYPE;
238 #if PTR_SIZE == 4
239 if (t == VT_LLONG)
240 return REG_IRE2;
241 #elif defined TCC_TARGET_X86_64
242 if (t == VT_QLONG)
243 return REG_IRE2;
244 if (t == VT_QFLOAT)
245 return REG_FRE2;
246 #elif defined TCC_TARGET_RISCV64
247 if (t == VT_LDOUBLE)
248 return REG_IRE2;
249 #endif
250 return VT_CONST;
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue *sv, int t)
259 sv->r = R_RET(t), sv->r2 = R2_RET(t);
262 /* returns function return register class for type t */
263 static int RC_RET(int t)
265 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t)
271 if (!is_float(t))
272 return RC_INT;
273 #ifdef TCC_TARGET_X86_64
274 if ((t & VT_BTYPE) == VT_LDOUBLE)
275 return RC_ST0;
276 if ((t & VT_BTYPE) == VT_QFLOAT)
277 return RC_FRET;
278 #elif defined TCC_TARGET_RISCV64
279 if ((t & VT_BTYPE) == VT_LDOUBLE)
280 return RC_INT;
281 #endif
282 return RC_FLOAT;
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t, int rc)
288 if (!USING_TWO_WORDS(t))
289 return 0;
290 #ifdef RC_IRE2
291 if (rc == RC_IRET)
292 return RC_IRE2;
293 #endif
294 #ifdef RC_FRE2
295 if (rc == RC_FRET)
296 return RC_FRE2;
297 #endif
298 if (rc & RC_FLOAT)
299 return RC_FLOAT;
300 return RC_INT;
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC int ieee_finite(double d)
308 int p[4];
309 memcpy(p, &d, sizeof(double));
310 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
317 #endif
319 ST_FUNC void test_lvalue(void)
321 if (!(vtop->r & VT_LVAL))
322 expect("lvalue");
325 ST_FUNC void check_vstack(void)
327 if (vtop != vstack - 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop - vstack + 1));
332 /* vstack debugging aid */
333 #if 0
334 void pv (const char *lbl, int a, int b)
336 int i;
337 for (i = a; i < a + b; ++i) {
338 SValue *p = &vtop[-i];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
343 #endif
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC void tccgen_init(TCCState *s1)
349 vtop = vstack - 1;
350 memset(vtop, 0, sizeof *vtop);
352 /* define some often used types */
353 int_type.t = VT_INT;
355 char_type.t = VT_BYTE;
356 if (s1->char_is_unsigned)
357 char_type.t |= VT_UNSIGNED;
358 char_pointer_type = char_type;
359 mk_pointer(&char_pointer_type);
361 func_old_type.t = VT_FUNC;
362 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
363 func_old_type.ref->f.func_call = FUNC_CDECL;
364 func_old_type.ref->f.func_type = FUNC_OLD;
365 #ifdef precedence_parser
366 init_prec();
367 #endif
368 cstr_new(&initstr);
371 ST_FUNC int tccgen_compile(TCCState *s1)
373 cur_text_section = NULL;
374 funcname = "";
375 func_ind = -1;
376 anon_sym = SYM_FIRST_ANOM;
377 const_wanted = 0;
378 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
379 local_scope = 0;
380 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
382 tcc_debug_start(s1);
383 tcc_tcov_start (s1);
384 #ifdef TCC_TARGET_ARM
385 arm_init(s1);
386 #endif
387 #ifdef INC_DEBUG
388 printf("%s: **** new file\n", file->filename);
389 #endif
390 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
391 next();
392 decl(VT_CONST);
393 gen_inline_functions(s1);
394 check_vstack();
395 /* end of translation unit info */
396 tcc_debug_end(s1);
397 tcc_tcov_end(s1);
398 return 0;
401 ST_FUNC void tccgen_finish(TCCState *s1)
403 cstr_free(&initstr);
404 free_inline_functions(s1);
405 sym_pop(&global_stack, NULL, 0);
406 sym_pop(&local_stack, NULL, 0);
407 /* free preprocessor macros */
408 free_defines(NULL);
409 /* free sym_pools */
410 dynarray_reset(&sym_pools, &nb_sym_pools);
411 sym_free_first = NULL;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym *elfsym(Sym *s)
417 if (!s || !s->c)
418 return NULL;
419 return &((ElfSym *)symtab_section->data)[s->c];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC void update_storage(Sym *sym)
425 ElfSym *esym;
426 int sym_bind, old_sym_bind;
428 esym = elfsym(sym);
429 if (!esym)
430 return;
432 if (sym->a.visibility)
433 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
434 | sym->a.visibility;
436 if (sym->type.t & (VT_STATIC | VT_INLINE))
437 sym_bind = STB_LOCAL;
438 else if (sym->a.weak)
439 sym_bind = STB_WEAK;
440 else
441 sym_bind = STB_GLOBAL;
442 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
443 if (sym_bind != old_sym_bind) {
444 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
447 #ifdef TCC_TARGET_PE
448 if (sym->a.dllimport)
449 esym->st_other |= ST_PE_IMPORT;
450 if (sym->a.dllexport)
451 esym->st_other |= ST_PE_EXPORT;
452 #endif
454 #if 0
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym->v, NULL),
457 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
458 sym->a.visibility,
459 sym->a.dllexport,
460 sym->a.dllimport
462 #endif
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
470 addr_t value, unsigned long size,
471 int can_add_underscore)
473 int sym_type, sym_bind, info, other, t;
474 ElfSym *esym;
475 const char *name;
476 char buf1[256];
478 if (!sym->c) {
479 name = get_tok_str(sym->v, NULL);
480 t = sym->type.t;
481 if ((t & VT_BTYPE) == VT_FUNC) {
482 sym_type = STT_FUNC;
483 } else if ((t & VT_BTYPE) == VT_VOID) {
484 sym_type = STT_NOTYPE;
485 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
486 sym_type = STT_FUNC;
487 } else {
488 sym_type = STT_OBJECT;
490 if (t & (VT_STATIC | VT_INLINE))
491 sym_bind = STB_LOCAL;
492 else
493 sym_bind = STB_GLOBAL;
494 other = 0;
496 #ifdef TCC_TARGET_PE
497 if (sym_type == STT_FUNC && sym->type.ref) {
498 Sym *ref = sym->type.ref;
499 if (ref->a.nodecorate) {
500 can_add_underscore = 0;
502 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
503 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
504 name = buf1;
505 other |= ST_PE_STDCALL;
506 can_add_underscore = 0;
509 #endif
511 if (sym->asm_label) {
512 name = get_tok_str(sym->asm_label, NULL);
513 can_add_underscore = 0;
516 if (tcc_state->leading_underscore && can_add_underscore) {
517 buf1[0] = '_';
518 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
519 name = buf1;
522 info = ELFW(ST_INFO)(sym_bind, sym_type);
523 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
525 if (debug_modes)
526 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
528 } else {
529 esym = elfsym(sym);
530 esym->st_value = value;
531 esym->st_size = size;
532 esym->st_shndx = sh_num;
534 update_storage(sym);
537 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
539 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
540 return;
541 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
546 addr_t addend)
548 int c = 0;
550 if (nocode_wanted && s == cur_text_section)
551 return;
553 if (sym) {
554 if (0 == sym->c)
555 put_extern_sym(sym, NULL, 0, 0);
556 c = sym->c;
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section, s, offset, type, c, addend);
563 #if PTR_SIZE == 4
564 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
566 greloca(s, sym, offset, type, 0);
568 #endif
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym *__sym_malloc(void)
574 Sym *sym_pool, *sym, *last_sym;
575 int i;
577 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
578 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
580 last_sym = sym_free_first;
581 sym = sym_pool;
582 for(i = 0; i < SYM_POOL_NB; i++) {
583 sym->next = last_sym;
584 last_sym = sym;
585 sym++;
587 sym_free_first = last_sym;
588 return last_sym;
591 static inline Sym *sym_malloc(void)
593 Sym *sym;
594 #ifndef SYM_DEBUG
595 sym = sym_free_first;
596 if (!sym)
597 sym = __sym_malloc();
598 sym_free_first = sym->next;
599 return sym;
600 #else
601 sym = tcc_malloc(sizeof(Sym));
602 return sym;
603 #endif
606 ST_INLN void sym_free(Sym *sym)
608 #ifndef SYM_DEBUG
609 sym->next = sym_free_first;
610 sym_free_first = sym;
611 #else
612 tcc_free(sym);
613 #endif
616 /* push, without hashing */
617 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
619 Sym *s;
621 s = sym_malloc();
622 memset(s, 0, sizeof *s);
623 s->v = v;
624 s->type.t = t;
625 s->c = c;
626 /* add in stack */
627 s->prev = *ps;
628 *ps = s;
629 return s;
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym *sym_find2(Sym *s, int v)
636 while (s) {
637 if (s->v == v)
638 return s;
639 else if (s->v == -1)
640 return NULL;
641 s = s->prev;
643 return NULL;
646 /* structure lookup */
647 ST_INLN Sym *struct_find(int v)
649 v -= TOK_IDENT;
650 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
651 return NULL;
652 return table_ident[v]->sym_struct;
655 /* find an identifier */
656 ST_INLN Sym *sym_find(int v)
658 v -= TOK_IDENT;
659 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
660 return NULL;
661 return table_ident[v]->sym_identifier;
664 static int sym_scope(Sym *s)
666 if (IS_ENUM_VAL (s->type.t))
667 return s->type.ref->sym_scope;
668 else
669 return s->sym_scope;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
675 Sym *s, **ps;
676 TokenSym *ts;
678 if (local_stack)
679 ps = &local_stack;
680 else
681 ps = &global_stack;
682 s = sym_push2(ps, v, type->t, c);
683 s->type.ref = type->ref;
684 s->r = r;
685 /* don't record fields or anonymous symbols */
686 /* XXX: simplify */
687 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
688 /* record symbol in token array */
689 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
690 if (v & SYM_STRUCT)
691 ps = &ts->sym_struct;
692 else
693 ps = &ts->sym_identifier;
694 s->prev_tok = *ps;
695 *ps = s;
696 s->sym_scope = local_scope;
697 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v & ~SYM_STRUCT, NULL));
701 return s;
704 /* push a global identifier */
705 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
707 Sym *s, **ps;
708 s = sym_push2(&global_stack, v, t, c);
709 s->r = VT_CONST | VT_SYM;
710 /* don't record anonymous symbol */
711 if (v < SYM_FIRST_ANOM) {
712 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps != NULL && (*ps)->sym_scope)
716 ps = &(*ps)->prev_tok;
717 s->prev_tok = *ps;
718 *ps = s;
720 return s;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
727 Sym *s, *ss, **ps;
728 TokenSym *ts;
729 int v;
731 s = *ptop;
732 while(s != b) {
733 ss = s->prev;
734 v = s->v;
735 /* remove symbol in token array */
736 /* XXX: simplify */
737 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
738 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
739 if (v & SYM_STRUCT)
740 ps = &ts->sym_struct;
741 else
742 ps = &ts->sym_identifier;
743 *ps = s->prev_tok;
745 if (!keep)
746 sym_free(s);
747 s = ss;
749 if (!keep)
750 *ptop = b;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop->r == VT_CMP && !nocode_wanted)
769 gv(RC_INT);
772 static void vsetc(CType *type, int r, CValue *vc)
774 if (vtop >= vstack + (VSTACK_SIZE - 1))
775 tcc_error("memory full (vstack)");
776 vcheck_cmp();
777 vtop++;
778 vtop->type = *type;
779 vtop->r = r;
780 vtop->r2 = VT_CONST;
781 vtop->c = *vc;
782 vtop->sym = NULL;
785 ST_FUNC void vswap(void)
787 SValue tmp;
789 vcheck_cmp();
790 tmp = vtop[0];
791 vtop[0] = vtop[-1];
792 vtop[-1] = tmp;
795 /* pop stack value */
796 ST_FUNC void vpop(void)
798 int v;
799 v = vtop->r & VT_VALMASK;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
802 if (v == TREG_ST0) {
803 o(0xd8dd); /* fstp %st(0) */
804 } else
805 #endif
806 if (v == VT_CMP) {
807 /* need to put correct jump if && or || without test */
808 gsym(vtop->jtrue);
809 gsym(vtop->jfalse);
811 vtop--;
814 /* push constant of type "type" with useless value */
815 static void vpush(CType *type)
817 vset(type, VT_CONST, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty, unsigned long long v)
823 CValue cval;
824 CType ctype;
825 ctype.t = ty;
826 ctype.ref = NULL;
827 cval.i = v;
828 vsetc(&ctype, VT_CONST, &cval);
831 /* push integer constant */
832 ST_FUNC void vpushi(int v)
834 vpush64(VT_INT, v);
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v)
840 vpush64(VT_SIZE_T, v);
843 /* push long long constant */
844 static inline void vpushll(long long v)
846 vpush64(VT_LLONG, v);
849 ST_FUNC void vset(CType *type, int r, int v)
851 CValue cval;
852 cval.i = v;
853 vsetc(type, r, &cval);
856 static void vseti(int r, int v)
858 CType type;
859 type.t = VT_INT;
860 type.ref = NULL;
861 vset(&type, r, v);
864 ST_FUNC void vpushv(SValue *v)
866 if (vtop >= vstack + (VSTACK_SIZE - 1))
867 tcc_error("memory full (vstack)");
868 vtop++;
869 *vtop = *v;
872 static void vdup(void)
874 vpushv(vtop);
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC void vrotb(int n)
882 int i;
883 SValue tmp;
885 vcheck_cmp();
886 tmp = vtop[-n + 1];
887 for(i=-n+1;i!=0;i++)
888 vtop[i] = vtop[i+1];
889 vtop[0] = tmp;
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC void vrote(SValue *e, int n)
897 int i;
898 SValue tmp;
900 vcheck_cmp();
901 tmp = *e;
902 for(i = 0;i < n - 1; i++)
903 e[-i] = e[-i - 1];
904 e[-n + 1] = tmp;
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC void vrott(int n)
912 vrote(vtop, n);
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC void vset_VT_CMP(int op)
921 vtop->r = VT_CMP;
922 vtop->cmp_op = op;
923 vtop->jfalse = 0;
924 vtop->jtrue = 0;
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op = vtop->cmp_op;
932 if (vtop->jtrue || vtop->jfalse) {
933 int origt = vtop->type.t;
934 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
935 int inv = op & (op < 2); /* small optimization */
936 vseti(VT_JMP+inv, gvtst(inv, 0));
937 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
938 } else {
939 /* otherwise convert flags (rsp. 0/1) to register */
940 vtop->c.i = op;
941 if (op < 2) /* doesn't seem to happen */
942 vtop->r = VT_CONST;
946 /* Set CPU Flags, doesn't yet jump */
947 static void gvtst_set(int inv, int t)
949 int *p;
951 if (vtop->r != VT_CMP) {
952 vpushi(0);
953 gen_op(TOK_NE);
954 if (vtop->r != VT_CMP) /* must be VT_CONST then */
955 vset_VT_CMP(vtop->c.i != 0);
958 p = inv ? &vtop->jfalse : &vtop->jtrue;
959 *p = gjmp_append(*p, t);
962 /* Generate value test
964 * Generate a test for any value (jump, comparison and integers) */
965 static int gvtst(int inv, int t)
967 int op, x, u;
969 gvtst_set(inv, t);
970 t = vtop->jtrue, u = vtop->jfalse;
971 if (inv)
972 x = u, u = t, t = x;
973 op = vtop->cmp_op;
975 /* jump to the wanted target */
976 if (op > 1)
977 t = gjmp_cond(op ^ inv, t);
978 else if (op != inv)
979 t = gjmp(t);
980 /* resolve complementary jumps to here */
981 gsym(u);
983 vtop--;
984 return t;
987 /* generate a zero or nozero test */
988 static void gen_test_zero(int op)
990 if (vtop->r == VT_CMP) {
991 int j;
992 if (op == TOK_EQ) {
993 j = vtop->jfalse;
994 vtop->jfalse = vtop->jtrue;
995 vtop->jtrue = j;
996 vtop->cmp_op ^= 1;
998 } else {
999 vpushi(0);
1000 gen_op(op);
1004 /* ------------------------------------------------------------------------- */
1005 /* push a symbol value of TYPE */
1006 ST_FUNC void vpushsym(CType *type, Sym *sym)
1008 CValue cval;
1009 cval.i = 0;
1010 vsetc(type, VT_CONST | VT_SYM, &cval);
1011 vtop->sym = sym;
1014 /* Return a static symbol pointing to a section */
1015 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1017 int v;
1018 Sym *sym;
1020 v = anon_sym++;
1021 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1022 sym->type.t |= VT_STATIC;
1023 put_extern_sym(sym, sec, offset, size);
1024 return sym;
1027 /* push a reference to a section offset by adding a dummy symbol */
1028 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1030 vpushsym(type, get_sym_ref(type, sec, offset, size));
1033 /* define a new external reference to a symbol 'v' of type 'u' */
1034 ST_FUNC Sym *external_global_sym(int v, CType *type)
1036 Sym *s;
1038 s = sym_find(v);
1039 if (!s) {
1040 /* push forward reference */
1041 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1042 s->type.ref = type->ref;
1043 } else if (IS_ASM_SYM(s)) {
1044 s->type.t = type->t | (s->type.t & VT_EXTERN);
1045 s->type.ref = type->ref;
1046 update_storage(s);
1048 return s;
1051 /* create an external reference with no specific type similar to asm labels.
1052 This avoids type conflicts if the symbol is used from C too */
1053 ST_FUNC Sym *external_helper_sym(int v)
1055 CType ct = { VT_ASM_FUNC, NULL };
1056 return external_global_sym(v, &ct);
1059 /* push a reference to an helper function (such as memmove) */
1060 ST_FUNC void vpush_helper_func(int v)
1062 vpushsym(&func_old_type, external_helper_sym(v));
1065 /* Merge symbol attributes. */
1066 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1068 if (sa1->aligned && !sa->aligned)
1069 sa->aligned = sa1->aligned;
1070 sa->packed |= sa1->packed;
1071 sa->weak |= sa1->weak;
1072 sa->nodebug |= sa1->nodebug;
1073 if (sa1->visibility != STV_DEFAULT) {
1074 int vis = sa->visibility;
1075 if (vis == STV_DEFAULT
1076 || vis > sa1->visibility)
1077 vis = sa1->visibility;
1078 sa->visibility = vis;
1080 sa->dllexport |= sa1->dllexport;
1081 sa->nodecorate |= sa1->nodecorate;
1082 sa->dllimport |= sa1->dllimport;
1085 /* Merge function attributes. */
1086 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1088 if (fa1->func_call && !fa->func_call)
1089 fa->func_call = fa1->func_call;
1090 if (fa1->func_type && !fa->func_type)
1091 fa->func_type = fa1->func_type;
1092 if (fa1->func_args && !fa->func_args)
1093 fa->func_args = fa1->func_args;
1094 if (fa1->func_noreturn)
1095 fa->func_noreturn = 1;
1096 if (fa1->func_ctor)
1097 fa->func_ctor = 1;
1098 if (fa1->func_dtor)
1099 fa->func_dtor = 1;
1102 /* Merge attributes. */
1103 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1105 merge_symattr(&ad->a, &ad1->a);
1106 merge_funcattr(&ad->f, &ad1->f);
1108 if (ad1->section)
1109 ad->section = ad1->section;
1110 if (ad1->alias_target)
1111 ad->alias_target = ad1->alias_target;
1112 if (ad1->asm_label)
1113 ad->asm_label = ad1->asm_label;
1114 if (ad1->attr_mode)
1115 ad->attr_mode = ad1->attr_mode;
1118 /* Merge some type attributes. */
1119 static void patch_type(Sym *sym, CType *type)
1121 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1122 if (!(sym->type.t & VT_EXTERN))
1123 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1124 sym->type.t &= ~VT_EXTERN;
1127 if (IS_ASM_SYM(sym)) {
1128 /* stay static if both are static */
1129 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1130 sym->type.ref = type->ref;
1133 if (!is_compatible_types(&sym->type, type)) {
1134 tcc_error("incompatible types for redefinition of '%s'",
1135 get_tok_str(sym->v, NULL));
1137 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1138 int static_proto = sym->type.t & VT_STATIC;
1139 /* warn if static follows non-static function declaration */
1140 if ((type->t & VT_STATIC) && !static_proto
1141 /* XXX this test for inline shouldn't be here. Until we
1142 implement gnu-inline mode again it silences a warning for
1143 mingw caused by our workarounds. */
1144 && !((type->t | sym->type.t) & VT_INLINE))
1145 tcc_warning("static storage ignored for redefinition of '%s'",
1146 get_tok_str(sym->v, NULL));
1148 /* set 'inline' if both agree or if one has static */
1149 if ((type->t | sym->type.t) & VT_INLINE) {
1150 if (!((type->t ^ sym->type.t) & VT_INLINE)
1151 || ((type->t | sym->type.t) & VT_STATIC))
1152 static_proto |= VT_INLINE;
1155 if (0 == (type->t & VT_EXTERN)) {
1156 struct FuncAttr f = sym->type.ref->f;
1157 /* put complete type, use static from prototype */
1158 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1159 sym->type.ref = type->ref;
1160 merge_funcattr(&sym->type.ref->f, &f);
1161 } else {
1162 sym->type.t &= ~VT_INLINE | static_proto;
1165 if (sym->type.ref->f.func_type == FUNC_OLD
1166 && type->ref->f.func_type != FUNC_OLD) {
1167 sym->type.ref = type->ref;
1170 } else {
1171 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1172 /* set array size if it was omitted in extern declaration */
1173 sym->type.ref->c = type->ref->c;
1175 if ((type->t ^ sym->type.t) & VT_STATIC)
1176 tcc_warning("storage mismatch for redefinition of '%s'",
1177 get_tok_str(sym->v, NULL));
1181 /* Merge some storage attributes. */
1182 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1184 if (type)
1185 patch_type(sym, type);
1187 #ifdef TCC_TARGET_PE
1188 if (sym->a.dllimport != ad->a.dllimport)
1189 tcc_error("incompatible dll linkage for redefinition of '%s'",
1190 get_tok_str(sym->v, NULL));
1191 #endif
1192 merge_symattr(&sym->a, &ad->a);
1193 if (ad->asm_label)
1194 sym->asm_label = ad->asm_label;
1195 update_storage(sym);
1198 /* copy sym to other stack */
1199 static Sym *sym_copy(Sym *s0, Sym **ps)
1201 Sym *s;
1202 s = sym_malloc(), *s = *s0;
1203 s->prev = *ps, *ps = s;
1204 if (s->v < SYM_FIRST_ANOM) {
1205 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1206 s->prev_tok = *ps, *ps = s;
1208 return s;
1211 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1212 static void sym_copy_ref(Sym *s, Sym **ps)
1214 int bt = s->type.t & VT_BTYPE;
1215 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1216 Sym **sp = &s->type.ref;
1217 for (s = *sp, *sp = NULL; s; s = s->next) {
1218 Sym *s2 = sym_copy(s, ps);
1219 sp = &(*sp = s2)->next;
1220 sym_copy_ref(s2, ps);
1225 /* define a new external reference to a symbol 'v' */
1226 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1228 Sym *s;
1230 /* look for global symbol */
1231 s = sym_find(v);
1232 while (s && s->sym_scope)
1233 s = s->prev_tok;
1235 if (!s) {
1236 /* push forward reference */
1237 s = global_identifier_push(v, type->t, 0);
1238 s->r |= r;
1239 s->a = ad->a;
1240 s->asm_label = ad->asm_label;
1241 s->type.ref = type->ref;
1242 /* copy type to the global stack */
1243 if (local_stack)
1244 sym_copy_ref(s, &global_stack);
1245 } else {
1246 patch_storage(s, ad, type);
1248 /* push variables on local_stack if any */
1249 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1250 s = sym_copy(s, &local_stack);
1251 return s;
1254 /* save registers up to (vtop - n) stack entry */
1255 ST_FUNC void save_regs(int n)
1257 SValue *p, *p1;
1258 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1259 save_reg(p->r);
1262 /* save r to the memory stack, and mark it as being free */
1263 ST_FUNC void save_reg(int r)
1265 save_reg_upstack(r, 0);
1268 /* save r to the memory stack, and mark it as being free,
1269 if seen up to (vtop - n) stack entry */
1270 ST_FUNC void save_reg_upstack(int r, int n)
1272 int l, size, align, bt;
1273 SValue *p, *p1, sv;
1275 if ((r &= VT_VALMASK) >= VT_CONST)
1276 return;
1277 if (nocode_wanted)
1278 return;
1279 l = 0;
1280 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1281 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1282 /* must save value on stack if not already done */
1283 if (!l) {
1284 bt = p->type.t & VT_BTYPE;
1285 if (bt == VT_VOID)
1286 continue;
1287 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1288 bt = VT_PTR;
1289 sv.type.t = bt;
1290 size = type_size(&sv.type, &align);
1291 l = get_temp_local_var(size,align);
1292 sv.r = VT_LOCAL | VT_LVAL;
1293 sv.c.i = l;
1294 store(p->r & VT_VALMASK, &sv);
1295 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1296 /* x86 specific: need to pop fp register ST0 if saved */
1297 if (r == TREG_ST0) {
1298 o(0xd8dd); /* fstp %st(0) */
1300 #endif
1301 /* special long long case */
1302 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1303 sv.c.i += PTR_SIZE;
1304 store(p->r2, &sv);
1307 /* mark that stack entry as being saved on the stack */
1308 if (p->r & VT_LVAL) {
1309 /* also clear the bounded flag because the
1310 relocation address of the function was stored in
1311 p->c.i */
1312 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1313 } else {
1314 p->r = VT_LVAL | VT_LOCAL;
1316 p->sym = NULL;
1317 p->r2 = VT_CONST;
1318 p->c.i = l;
1323 #ifdef TCC_TARGET_ARM
1324 /* find a register of class 'rc2' with at most one reference on stack.
1325 * If none, call get_reg(rc) */
1326 ST_FUNC int get_reg_ex(int rc, int rc2)
1328 int r;
1329 SValue *p;
1331 for(r=0;r<NB_REGS;r++) {
1332 if (reg_classes[r] & rc2) {
1333 int n;
1334 n=0;
1335 for(p = vstack; p <= vtop; p++) {
1336 if ((p->r & VT_VALMASK) == r ||
1337 p->r2 == r)
1338 n++;
1340 if (n <= 1)
1341 return r;
1344 return get_reg(rc);
1346 #endif
1348 /* find a free register of class 'rc'. If none, save one register */
1349 ST_FUNC int get_reg(int rc)
1351 int r;
1352 SValue *p;
1354 /* find a free register */
1355 for(r=0;r<NB_REGS;r++) {
1356 if (reg_classes[r] & rc) {
1357 if (nocode_wanted)
1358 return r;
1359 for(p=vstack;p<=vtop;p++) {
1360 if ((p->r & VT_VALMASK) == r ||
1361 p->r2 == r)
1362 goto notfound;
1364 return r;
1366 notfound: ;
1369 /* no register left : free the first one on the stack (VERY
1370 IMPORTANT to start from the bottom to ensure that we don't
1371 spill registers used in gen_opi()) */
1372 for(p=vstack;p<=vtop;p++) {
1373 /* look at second register (if long long) */
1374 r = p->r2;
1375 if (r < VT_CONST && (reg_classes[r] & rc))
1376 goto save_found;
1377 r = p->r & VT_VALMASK;
1378 if (r < VT_CONST && (reg_classes[r] & rc)) {
1379 save_found:
1380 save_reg(r);
1381 return r;
1384 /* Should never comes here */
1385 return -1;
1388 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1389 static int get_temp_local_var(int size,int align){
1390 int i;
1391 struct temp_local_variable *temp_var;
1392 int found_var;
1393 SValue *p;
1394 int r;
1395 char free;
1396 char found;
1397 found=0;
1398 for(i=0;i<nb_temp_local_vars;i++){
1399 temp_var=&arr_temp_local_vars[i];
1400 if(temp_var->size<size||align!=temp_var->align){
1401 continue;
1403 /*check if temp_var is free*/
1404 free=1;
1405 for(p=vstack;p<=vtop;p++) {
1406 r=p->r&VT_VALMASK;
1407 if(r==VT_LOCAL||r==VT_LLOCAL){
1408 if(p->c.i==temp_var->location){
1409 free=0;
1410 break;
1414 if(free){
1415 found_var=temp_var->location;
1416 found=1;
1417 break;
1420 if(!found){
1421 loc = (loc - size) & -align;
1422 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1423 temp_var=&arr_temp_local_vars[i];
1424 temp_var->location=loc;
1425 temp_var->size=size;
1426 temp_var->align=align;
1427 nb_temp_local_vars++;
1429 found_var=loc;
1431 return found_var;
1434 static void clear_temp_local_var_list(){
1435 nb_temp_local_vars=0;
1438 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1439 if needed */
1440 static void move_reg(int r, int s, int t)
1442 SValue sv;
1444 if (r != s) {
1445 save_reg(r);
1446 sv.type.t = t;
1447 sv.type.ref = NULL;
1448 sv.r = s;
1449 sv.c.i = 0;
1450 load(r, &sv);
1454 /* get address of vtop (vtop MUST BE an lvalue) */
1455 ST_FUNC void gaddrof(void)
1457 vtop->r &= ~VT_LVAL;
1458 /* tricky: if saved lvalue, then we can go back to lvalue */
1459 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1460 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1463 #ifdef CONFIG_TCC_BCHECK
1464 /* generate a bounded pointer addition */
1465 static void gen_bounded_ptr_add(void)
1467 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1468 if (save) {
1469 vpushv(&vtop[-1]);
1470 vrott(3);
1472 vpush_helper_func(TOK___bound_ptr_add);
1473 vrott(3);
1474 gfunc_call(2);
1475 vtop -= save;
1476 vpushi(0);
1477 /* returned pointer is in REG_IRET */
1478 vtop->r = REG_IRET | VT_BOUNDED;
1479 if (nocode_wanted)
1480 return;
1481 /* relocation offset of the bounding function call point */
1482 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1485 /* patch pointer addition in vtop so that pointer dereferencing is
1486 also tested */
1487 static void gen_bounded_ptr_deref(void)
1489 addr_t func;
1490 int size, align;
1491 ElfW_Rel *rel;
1492 Sym *sym;
1494 if (nocode_wanted)
1495 return;
1497 size = type_size(&vtop->type, &align);
1498 switch(size) {
1499 case 1: func = TOK___bound_ptr_indir1; break;
1500 case 2: func = TOK___bound_ptr_indir2; break;
1501 case 4: func = TOK___bound_ptr_indir4; break;
1502 case 8: func = TOK___bound_ptr_indir8; break;
1503 case 12: func = TOK___bound_ptr_indir12; break;
1504 case 16: func = TOK___bound_ptr_indir16; break;
1505 default:
1506 /* may happen with struct member access */
1507 return;
1509 sym = external_helper_sym(func);
1510 if (!sym->c)
1511 put_extern_sym(sym, NULL, 0, 0);
1512 /* patch relocation */
1513 /* XXX: find a better solution ? */
1514 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1515 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1518 /* generate lvalue bound code */
1519 static void gbound(void)
1521 CType type1;
1523 vtop->r &= ~VT_MUSTBOUND;
1524 /* if lvalue, then use checking code before dereferencing */
1525 if (vtop->r & VT_LVAL) {
1526 /* if not VT_BOUNDED value, then make one */
1527 if (!(vtop->r & VT_BOUNDED)) {
1528 /* must save type because we must set it to int to get pointer */
1529 type1 = vtop->type;
1530 vtop->type.t = VT_PTR;
1531 gaddrof();
1532 vpushi(0);
1533 gen_bounded_ptr_add();
1534 vtop->r |= VT_LVAL;
1535 vtop->type = type1;
1537 /* then check for dereferencing */
1538 gen_bounded_ptr_deref();
1542 /* we need to call __bound_ptr_add before we start to load function
1543 args into registers */
1544 ST_FUNC void gbound_args(int nb_args)
1546 int i, v;
1547 SValue *sv;
1549 for (i = 1; i <= nb_args; ++i)
1550 if (vtop[1 - i].r & VT_MUSTBOUND) {
1551 vrotb(i);
1552 gbound();
1553 vrott(i);
1556 sv = vtop - nb_args;
1557 if (sv->r & VT_SYM) {
1558 v = sv->sym->v;
1559 if (v == TOK_setjmp
1560 || v == TOK__setjmp
1561 #ifndef TCC_TARGET_PE
1562 || v == TOK_sigsetjmp
1563 || v == TOK___sigsetjmp
1564 #endif
1566 vpush_helper_func(TOK___bound_setjmp);
1567 vpushv(sv + 1);
1568 gfunc_call(1);
1569 func_bound_add_epilog = 1;
1571 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1572 if (v == TOK_alloca)
1573 func_bound_add_epilog = 1;
1574 #endif
1575 #if TARGETOS_NetBSD
1576 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1577 sv->sym->asm_label = TOK___bound_longjmp;
1578 #endif
1582 /* Add bounds for local symbols from S to E (via ->prev) */
1583 static void add_local_bounds(Sym *s, Sym *e)
1585 for (; s != e; s = s->prev) {
1586 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1587 continue;
1588 /* Add arrays/structs/unions because we always take address */
1589 if ((s->type.t & VT_ARRAY)
1590 || (s->type.t & VT_BTYPE) == VT_STRUCT
1591 || s->a.addrtaken) {
1592 /* add local bound info */
1593 int align, size = type_size(&s->type, &align);
1594 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1595 2 * sizeof(addr_t));
1596 bounds_ptr[0] = s->c;
1597 bounds_ptr[1] = size;
1601 #endif
1603 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1604 static void pop_local_syms(Sym *b, int keep)
1606 #ifdef CONFIG_TCC_BCHECK
1607 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1608 add_local_bounds(local_stack, b);
1609 #endif
1610 if (debug_modes)
1611 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1612 sym_pop(&local_stack, b, keep);
1615 static void incr_bf_adr(int o)
1617 vtop->type = char_pointer_type;
1618 gaddrof();
1619 vpushs(o);
1620 gen_op('+');
1621 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1622 vtop->r |= VT_LVAL;
1625 /* single-byte load mode for packed or otherwise unaligned bitfields */
1626 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1628 int n, o, bits;
1629 save_reg_upstack(vtop->r, 1);
1630 vpush64(type->t & VT_BTYPE, 0); // B X
1631 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1632 do {
1633 vswap(); // X B
1634 incr_bf_adr(o);
1635 vdup(); // X B B
1636 n = 8 - bit_pos;
1637 if (n > bit_size)
1638 n = bit_size;
1639 if (bit_pos)
1640 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1641 if (n < 8)
1642 vpushi((1 << n) - 1), gen_op('&');
1643 gen_cast(type);
1644 if (bits)
1645 vpushi(bits), gen_op(TOK_SHL);
1646 vrotb(3); // B Y X
1647 gen_op('|'); // B X
1648 bits += n, bit_size -= n, o = 1;
1649 } while (bit_size);
1650 vswap(), vpop();
1651 if (!(type->t & VT_UNSIGNED)) {
1652 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1653 vpushi(n), gen_op(TOK_SHL);
1654 vpushi(n), gen_op(TOK_SAR);
1658 /* single-byte store mode for packed or otherwise unaligned bitfields */
1659 static void store_packed_bf(int bit_pos, int bit_size)
1661 int bits, n, o, m, c;
1662 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1663 vswap(); // X B
1664 save_reg_upstack(vtop->r, 1);
1665 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1666 do {
1667 incr_bf_adr(o); // X B
1668 vswap(); //B X
1669 c ? vdup() : gv_dup(); // B V X
1670 vrott(3); // X B V
1671 if (bits)
1672 vpushi(bits), gen_op(TOK_SHR);
1673 if (bit_pos)
1674 vpushi(bit_pos), gen_op(TOK_SHL);
1675 n = 8 - bit_pos;
1676 if (n > bit_size)
1677 n = bit_size;
1678 if (n < 8) {
1679 m = ((1 << n) - 1) << bit_pos;
1680 vpushi(m), gen_op('&'); // X B V1
1681 vpushv(vtop-1); // X B V1 B
1682 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1683 gen_op('&'); // X B V1 B1
1684 gen_op('|'); // X B V2
1686 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1687 vstore(), vpop(); // X B
1688 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1689 } while (bit_size);
1690 vpop(), vpop();
1693 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1695 int t;
1696 if (0 == sv->type.ref)
1697 return 0;
1698 t = sv->type.ref->auxtype;
1699 if (t != -1 && t != VT_STRUCT) {
1700 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1701 sv->r |= VT_LVAL;
1703 return t;
1706 /* store vtop a register belonging to class 'rc'. lvalues are
1707 converted to values. Cannot be used if cannot be converted to
1708 register value (such as structures). */
1709 ST_FUNC int gv(int rc)
1711 int r, r2, r_ok, r2_ok, rc2, bt;
1712 int bit_pos, bit_size, size, align;
1714 /* NOTE: get_reg can modify vstack[] */
1715 if (vtop->type.t & VT_BITFIELD) {
1716 CType type;
1718 bit_pos = BIT_POS(vtop->type.t);
1719 bit_size = BIT_SIZE(vtop->type.t);
1720 /* remove bit field info to avoid loops */
1721 vtop->type.t &= ~VT_STRUCT_MASK;
1723 type.ref = NULL;
1724 type.t = vtop->type.t & VT_UNSIGNED;
1725 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1726 type.t |= VT_UNSIGNED;
1728 r = adjust_bf(vtop, bit_pos, bit_size);
1730 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1731 type.t |= VT_LLONG;
1732 else
1733 type.t |= VT_INT;
1735 if (r == VT_STRUCT) {
1736 load_packed_bf(&type, bit_pos, bit_size);
1737 } else {
1738 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1739 /* cast to int to propagate signedness in following ops */
1740 gen_cast(&type);
1741 /* generate shifts */
1742 vpushi(bits - (bit_pos + bit_size));
1743 gen_op(TOK_SHL);
1744 vpushi(bits - bit_size);
1745 /* NOTE: transformed to SHR if unsigned */
1746 gen_op(TOK_SAR);
1748 r = gv(rc);
1749 } else {
1750 if (is_float(vtop->type.t) &&
1751 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1752 /* CPUs usually cannot use float constants, so we store them
1753 generically in data segment */
1754 init_params p = { rodata_section };
1755 unsigned long offset;
1756 size = type_size(&vtop->type, &align);
1757 if (NODATA_WANTED)
1758 size = 0, align = 1;
1759 offset = section_add(p.sec, size, align);
1760 vpush_ref(&vtop->type, p.sec, offset, size);
1761 vswap();
1762 init_putv(&p, &vtop->type, offset);
1763 vtop->r |= VT_LVAL;
1765 #ifdef CONFIG_TCC_BCHECK
1766 if (vtop->r & VT_MUSTBOUND)
1767 gbound();
1768 #endif
1770 bt = vtop->type.t & VT_BTYPE;
1772 #ifdef TCC_TARGET_RISCV64
1773 /* XXX mega hack */
1774 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1775 rc = RC_INT;
1776 #endif
1777 rc2 = RC2_TYPE(bt, rc);
1779 /* need to reload if:
1780 - constant
1781 - lvalue (need to dereference pointer)
1782 - already a register, but not in the right class */
1783 r = vtop->r & VT_VALMASK;
1784 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1785 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1787 if (!r_ok || !r2_ok) {
1788 if (!r_ok)
1789 r = get_reg(rc);
1790 if (rc2) {
1791 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1792 int original_type = vtop->type.t;
1794 /* two register type load :
1795 expand to two words temporarily */
1796 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1797 /* load constant */
1798 unsigned long long ll = vtop->c.i;
1799 vtop->c.i = ll; /* first word */
1800 load(r, vtop);
1801 vtop->r = r; /* save register value */
1802 vpushi(ll >> 32); /* second word */
1803 } else if (vtop->r & VT_LVAL) {
1804 /* We do not want to modifier the long long pointer here.
1805 So we save any other instances down the stack */
1806 save_reg_upstack(vtop->r, 1);
1807 /* load from memory */
1808 vtop->type.t = load_type;
1809 load(r, vtop);
1810 vdup();
1811 vtop[-1].r = r; /* save register value */
1812 /* increment pointer to get second word */
1813 vtop->type.t = VT_PTRDIFF_T;
1814 gaddrof();
1815 vpushs(PTR_SIZE);
1816 gen_op('+');
1817 vtop->r |= VT_LVAL;
1818 vtop->type.t = load_type;
1819 } else {
1820 /* move registers */
1821 if (!r_ok)
1822 load(r, vtop);
1823 if (r2_ok && vtop->r2 < VT_CONST)
1824 goto done;
1825 vdup();
1826 vtop[-1].r = r; /* save register value */
1827 vtop->r = vtop[-1].r2;
1829 /* Allocate second register. Here we rely on the fact that
1830 get_reg() tries first to free r2 of an SValue. */
1831 r2 = get_reg(rc2);
1832 load(r2, vtop);
1833 vpop();
1834 /* write second register */
1835 vtop->r2 = r2;
1836 done:
1837 vtop->type.t = original_type;
1838 } else {
1839 if (vtop->r == VT_CMP)
1840 vset_VT_JMP();
1841 /* one register type load */
1842 load(r, vtop);
1845 vtop->r = r;
1846 #ifdef TCC_TARGET_C67
1847 /* uses register pairs for doubles */
1848 if (bt == VT_DOUBLE)
1849 vtop->r2 = r+1;
1850 #endif
1852 return r;
1855 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1856 ST_FUNC void gv2(int rc1, int rc2)
1858 /* generate more generic register first. But VT_JMP or VT_CMP
1859 values must be generated first in all cases to avoid possible
1860 reload errors */
1861 if (vtop->r != VT_CMP && rc1 <= rc2) {
1862 vswap();
1863 gv(rc1);
1864 vswap();
1865 gv(rc2);
1866 /* test if reload is needed for first register */
1867 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1868 vswap();
1869 gv(rc1);
1870 vswap();
1872 } else {
1873 gv(rc2);
1874 vswap();
1875 gv(rc1);
1876 vswap();
1877 /* test if reload is needed for first register */
1878 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1879 gv(rc2);
1884 #if PTR_SIZE == 4
1885 /* expand 64bit on stack in two ints */
1886 ST_FUNC void lexpand(void)
1888 int u, v;
1889 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1890 v = vtop->r & (VT_VALMASK | VT_LVAL);
1891 if (v == VT_CONST) {
1892 vdup();
1893 vtop[0].c.i >>= 32;
1894 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1895 vdup();
1896 vtop[0].c.i += 4;
1897 } else {
1898 gv(RC_INT);
1899 vdup();
1900 vtop[0].r = vtop[-1].r2;
1901 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1903 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1905 #endif
1907 #if PTR_SIZE == 4
1908 /* build a long long from two ints */
1909 static void lbuild(int t)
1911 gv2(RC_INT, RC_INT);
1912 vtop[-1].r2 = vtop[0].r;
1913 vtop[-1].type.t = t;
1914 vpop();
1916 #endif
1918 /* convert stack entry to register and duplicate its value in another
1919 register */
1920 static void gv_dup(void)
1922 int t, rc, r;
1924 t = vtop->type.t;
1925 #if PTR_SIZE == 4
1926 if ((t & VT_BTYPE) == VT_LLONG) {
1927 if (t & VT_BITFIELD) {
1928 gv(RC_INT);
1929 t = vtop->type.t;
1931 lexpand();
1932 gv_dup();
1933 vswap();
1934 vrotb(3);
1935 gv_dup();
1936 vrotb(4);
1937 /* stack: H L L1 H1 */
1938 lbuild(t);
1939 vrotb(3);
1940 vrotb(3);
1941 vswap();
1942 lbuild(t);
1943 vswap();
1944 return;
1946 #endif
1947 /* duplicate value */
1948 rc = RC_TYPE(t);
1949 gv(rc);
1950 r = get_reg(rc);
1951 vdup();
1952 load(r, vtop);
1953 vtop->r = r;
1956 #if PTR_SIZE == 4
1957 /* generate CPU independent (unsigned) long long operations */
1958 static void gen_opl(int op)
1960 int t, a, b, op1, c, i;
1961 int func;
1962 unsigned short reg_iret = REG_IRET;
1963 unsigned short reg_lret = REG_IRE2;
1964 SValue tmp;
1966 switch(op) {
1967 case '/':
1968 case TOK_PDIV:
1969 func = TOK___divdi3;
1970 goto gen_func;
1971 case TOK_UDIV:
1972 func = TOK___udivdi3;
1973 goto gen_func;
1974 case '%':
1975 func = TOK___moddi3;
1976 goto gen_mod_func;
1977 case TOK_UMOD:
1978 func = TOK___umoddi3;
1979 gen_mod_func:
1980 #ifdef TCC_ARM_EABI
1981 reg_iret = TREG_R2;
1982 reg_lret = TREG_R3;
1983 #endif
1984 gen_func:
1985 /* call generic long long function */
1986 vpush_helper_func(func);
1987 vrott(3);
1988 gfunc_call(2);
1989 vpushi(0);
1990 vtop->r = reg_iret;
1991 vtop->r2 = reg_lret;
1992 break;
1993 case '^':
1994 case '&':
1995 case '|':
1996 case '*':
1997 case '+':
1998 case '-':
1999 //pv("gen_opl A",0,2);
2000 t = vtop->type.t;
2001 vswap();
2002 lexpand();
2003 vrotb(3);
2004 lexpand();
2005 /* stack: L1 H1 L2 H2 */
2006 tmp = vtop[0];
2007 vtop[0] = vtop[-3];
2008 vtop[-3] = tmp;
2009 tmp = vtop[-2];
2010 vtop[-2] = vtop[-3];
2011 vtop[-3] = tmp;
2012 vswap();
2013 /* stack: H1 H2 L1 L2 */
2014 //pv("gen_opl B",0,4);
2015 if (op == '*') {
2016 vpushv(vtop - 1);
2017 vpushv(vtop - 1);
2018 gen_op(TOK_UMULL);
2019 lexpand();
2020 /* stack: H1 H2 L1 L2 ML MH */
2021 for(i=0;i<4;i++)
2022 vrotb(6);
2023 /* stack: ML MH H1 H2 L1 L2 */
2024 tmp = vtop[0];
2025 vtop[0] = vtop[-2];
2026 vtop[-2] = tmp;
2027 /* stack: ML MH H1 L2 H2 L1 */
2028 gen_op('*');
2029 vrotb(3);
2030 vrotb(3);
2031 gen_op('*');
2032 /* stack: ML MH M1 M2 */
2033 gen_op('+');
2034 gen_op('+');
2035 } else if (op == '+' || op == '-') {
2036 /* XXX: add non carry method too (for MIPS or alpha) */
2037 if (op == '+')
2038 op1 = TOK_ADDC1;
2039 else
2040 op1 = TOK_SUBC1;
2041 gen_op(op1);
2042 /* stack: H1 H2 (L1 op L2) */
2043 vrotb(3);
2044 vrotb(3);
2045 gen_op(op1 + 1); /* TOK_xxxC2 */
2046 } else {
2047 gen_op(op);
2048 /* stack: H1 H2 (L1 op L2) */
2049 vrotb(3);
2050 vrotb(3);
2051 /* stack: (L1 op L2) H1 H2 */
2052 gen_op(op);
2053 /* stack: (L1 op L2) (H1 op H2) */
2055 /* stack: L H */
2056 lbuild(t);
2057 break;
2058 case TOK_SAR:
2059 case TOK_SHR:
2060 case TOK_SHL:
2061 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2062 t = vtop[-1].type.t;
2063 vswap();
2064 lexpand();
2065 vrotb(3);
2066 /* stack: L H shift */
2067 c = (int)vtop->c.i;
2068 /* constant: simpler */
2069 /* NOTE: all comments are for SHL. the other cases are
2070 done by swapping words */
2071 vpop();
2072 if (op != TOK_SHL)
2073 vswap();
2074 if (c >= 32) {
2075 /* stack: L H */
2076 vpop();
2077 if (c > 32) {
2078 vpushi(c - 32);
2079 gen_op(op);
2081 if (op != TOK_SAR) {
2082 vpushi(0);
2083 } else {
2084 gv_dup();
2085 vpushi(31);
2086 gen_op(TOK_SAR);
2088 vswap();
2089 } else {
2090 vswap();
2091 gv_dup();
2092 /* stack: H L L */
2093 vpushi(c);
2094 gen_op(op);
2095 vswap();
2096 vpushi(32 - c);
2097 if (op == TOK_SHL)
2098 gen_op(TOK_SHR);
2099 else
2100 gen_op(TOK_SHL);
2101 vrotb(3);
2102 /* stack: L L H */
2103 vpushi(c);
2104 if (op == TOK_SHL)
2105 gen_op(TOK_SHL);
2106 else
2107 gen_op(TOK_SHR);
2108 gen_op('|');
2110 if (op != TOK_SHL)
2111 vswap();
2112 lbuild(t);
2113 } else {
2114 /* XXX: should provide a faster fallback on x86 ? */
2115 switch(op) {
2116 case TOK_SAR:
2117 func = TOK___ashrdi3;
2118 goto gen_func;
2119 case TOK_SHR:
2120 func = TOK___lshrdi3;
2121 goto gen_func;
2122 case TOK_SHL:
2123 func = TOK___ashldi3;
2124 goto gen_func;
2127 break;
2128 default:
2129 /* compare operations */
2130 t = vtop->type.t;
2131 vswap();
2132 lexpand();
2133 vrotb(3);
2134 lexpand();
2135 /* stack: L1 H1 L2 H2 */
2136 tmp = vtop[-1];
2137 vtop[-1] = vtop[-2];
2138 vtop[-2] = tmp;
2139 /* stack: L1 L2 H1 H2 */
2140 save_regs(4);
2141 /* compare high */
2142 op1 = op;
2143 /* when values are equal, we need to compare low words. since
2144 the jump is inverted, we invert the test too. */
2145 if (op1 == TOK_LT)
2146 op1 = TOK_LE;
2147 else if (op1 == TOK_GT)
2148 op1 = TOK_GE;
2149 else if (op1 == TOK_ULT)
2150 op1 = TOK_ULE;
2151 else if (op1 == TOK_UGT)
2152 op1 = TOK_UGE;
2153 a = 0;
2154 b = 0;
2155 gen_op(op1);
2156 if (op == TOK_NE) {
2157 b = gvtst(0, 0);
2158 } else {
2159 a = gvtst(1, 0);
2160 if (op != TOK_EQ) {
2161 /* generate non equal test */
2162 vpushi(0);
2163 vset_VT_CMP(TOK_NE);
2164 b = gvtst(0, 0);
2167 /* compare low. Always unsigned */
2168 op1 = op;
2169 if (op1 == TOK_LT)
2170 op1 = TOK_ULT;
2171 else if (op1 == TOK_LE)
2172 op1 = TOK_ULE;
2173 else if (op1 == TOK_GT)
2174 op1 = TOK_UGT;
2175 else if (op1 == TOK_GE)
2176 op1 = TOK_UGE;
2177 gen_op(op1);
2178 #if 0//def TCC_TARGET_I386
2179 if (op == TOK_NE) { gsym(b); break; }
2180 if (op == TOK_EQ) { gsym(a); break; }
2181 #endif
2182 gvtst_set(1, a);
2183 gvtst_set(0, b);
2184 break;
2187 #endif
2189 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2191 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2192 return (a ^ b) >> 63 ? -x : x;
2195 static int gen_opic_lt(uint64_t a, uint64_t b)
2197 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2200 /* handle integer constant optimizations and various machine
2201 independent opt */
2202 static void gen_opic(int op)
2204 SValue *v1 = vtop - 1;
2205 SValue *v2 = vtop;
2206 int t1 = v1->type.t & VT_BTYPE;
2207 int t2 = v2->type.t & VT_BTYPE;
2208 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2209 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2210 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2211 uint64_t l1 = c1 ? v1->c.i : 0;
2212 uint64_t l2 = c2 ? v2->c.i : 0;
2213 int shm = (t1 == VT_LLONG) ? 63 : 31;
2215 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2216 l1 = ((uint32_t)l1 |
2217 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2218 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2219 l2 = ((uint32_t)l2 |
2220 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2222 if (c1 && c2) {
2223 switch(op) {
2224 case '+': l1 += l2; break;
2225 case '-': l1 -= l2; break;
2226 case '&': l1 &= l2; break;
2227 case '^': l1 ^= l2; break;
2228 case '|': l1 |= l2; break;
2229 case '*': l1 *= l2; break;
2231 case TOK_PDIV:
2232 case '/':
2233 case '%':
2234 case TOK_UDIV:
2235 case TOK_UMOD:
2236 /* if division by zero, generate explicit division */
2237 if (l2 == 0) {
2238 if (const_wanted && !(nocode_wanted & unevalmask))
2239 tcc_error("division by zero in constant");
2240 goto general_case;
2242 switch(op) {
2243 default: l1 = gen_opic_sdiv(l1, l2); break;
2244 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2245 case TOK_UDIV: l1 = l1 / l2; break;
2246 case TOK_UMOD: l1 = l1 % l2; break;
2248 break;
2249 case TOK_SHL: l1 <<= (l2 & shm); break;
2250 case TOK_SHR: l1 >>= (l2 & shm); break;
2251 case TOK_SAR:
2252 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2253 break;
2254 /* tests */
2255 case TOK_ULT: l1 = l1 < l2; break;
2256 case TOK_UGE: l1 = l1 >= l2; break;
2257 case TOK_EQ: l1 = l1 == l2; break;
2258 case TOK_NE: l1 = l1 != l2; break;
2259 case TOK_ULE: l1 = l1 <= l2; break;
2260 case TOK_UGT: l1 = l1 > l2; break;
2261 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2262 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2263 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2264 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2265 /* logical */
2266 case TOK_LAND: l1 = l1 && l2; break;
2267 case TOK_LOR: l1 = l1 || l2; break;
2268 default:
2269 goto general_case;
2271 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2272 l1 = ((uint32_t)l1 |
2273 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2274 v1->c.i = l1;
2275 vtop--;
2276 } else {
2277 nonconst = VT_NONCONST;
2278 /* if commutative ops, put c2 as constant */
2279 if (c1 && (op == '+' || op == '&' || op == '^' ||
2280 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2281 vswap();
2282 c2 = c1; //c = c1, c1 = c2, c2 = c;
2283 l2 = l1; //l = l1, l1 = l2, l2 = l;
2285 if (!const_wanted &&
2286 c1 && ((l1 == 0 &&
2287 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2288 (l1 == -1 && op == TOK_SAR))) {
2289 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2290 vtop--;
2291 } else if (!const_wanted &&
2292 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2293 (op == '|' &&
2294 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2295 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2296 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2297 if (l2 == 1)
2298 vtop->c.i = 0;
2299 vswap();
2300 vtop--;
2301 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2302 op == TOK_PDIV) &&
2303 l2 == 1) ||
2304 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2305 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2306 l2 == 0) ||
2307 (op == '&' &&
2308 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2309 /* filter out NOP operations like x*1, x-0, x&-1... */
2310 vtop--;
2311 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2312 /* try to use shifts instead of muls or divs */
2313 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2314 int n = -1;
2315 while (l2) {
2316 l2 >>= 1;
2317 n++;
2319 vtop->c.i = n;
2320 if (op == '*')
2321 op = TOK_SHL;
2322 else if (op == TOK_PDIV)
2323 op = TOK_SAR;
2324 else
2325 op = TOK_SHR;
2327 goto general_case;
2328 } else if (c2 && (op == '+' || op == '-') &&
2329 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2330 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2331 /* symbol + constant case */
2332 if (op == '-')
2333 l2 = -l2;
2334 l2 += vtop[-1].c.i;
2335 /* The backends can't always deal with addends to symbols
2336 larger than +-1<<31. Don't construct such. */
2337 if ((int)l2 != l2)
2338 goto general_case;
2339 vtop--;
2340 vtop->c.i = l2;
2341 } else {
2342 general_case:
2343 /* call low level op generator */
2344 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2345 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2346 gen_opl(op);
2347 else
2348 gen_opi(op);
2351 if (vtop->r == VT_CONST)
2352 vtop->r |= nonconst;
2355 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2356 # define gen_negf gen_opf
2357 #elif defined TCC_TARGET_ARM
2358 void gen_negf(int op)
2360 /* arm will detect 0-x and replace by vneg */
2361 vpushi(0), vswap(), gen_op('-');
2363 #else
2364 /* XXX: implement in gen_opf() for other backends too */
2365 void gen_negf(int op)
2367 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2368 subtract(-0, x), but with them it's really a sign flip
2369 operation. We implement this with bit manipulation and have
2370 to do some type reinterpretation for this, which TCC can do
2371 only via memory. */
2373 int align, size, bt;
2375 size = type_size(&vtop->type, &align);
2376 bt = vtop->type.t & VT_BTYPE;
2377 save_reg(gv(RC_TYPE(bt)));
2378 vdup();
2379 incr_bf_adr(size - 1);
2380 vdup();
2381 vpushi(0x80); /* flip sign */
2382 gen_op('^');
2383 vstore();
2384 vpop();
2386 #endif
2388 /* generate a floating point operation with constant propagation */
2389 static void gen_opif(int op)
2391 int c1, c2;
2392 SValue *v1, *v2;
2393 #if defined _MSC_VER && defined __x86_64__
2394 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2395 volatile
2396 #endif
2397 long double f1, f2;
2399 v1 = vtop - 1;
2400 v2 = vtop;
2401 if (op == TOK_NEG)
2402 v1 = v2;
2404 /* currently, we cannot do computations with forward symbols */
2405 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2406 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2407 if (c1 && c2) {
2408 if (v1->type.t == VT_FLOAT) {
2409 f1 = v1->c.f;
2410 f2 = v2->c.f;
2411 } else if (v1->type.t == VT_DOUBLE) {
2412 f1 = v1->c.d;
2413 f2 = v2->c.d;
2414 } else {
2415 f1 = v1->c.ld;
2416 f2 = v2->c.ld;
2418 /* NOTE: we only do constant propagation if finite number (not
2419 NaN or infinity) (ANSI spec) */
2420 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2421 goto general_case;
2422 switch(op) {
2423 case '+': f1 += f2; break;
2424 case '-': f1 -= f2; break;
2425 case '*': f1 *= f2; break;
2426 case '/':
2427 if (f2 == 0.0) {
2428 union { float f; unsigned u; } x1, x2, y;
2429 /* If not in initializer we need to potentially generate
2430 FP exceptions at runtime, otherwise we want to fold. */
2431 if (!const_wanted)
2432 goto general_case;
2433 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2434 when used to compile the f1 /= f2 below, would be -nan */
2435 x1.f = f1, x2.f = f2;
2436 if (f1 == 0.0)
2437 y.u = 0x7fc00000; /* nan */
2438 else
2439 y.u = 0x7f800000; /* infinity */
2440 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2441 f1 = y.f;
2442 break;
2444 f1 /= f2;
2445 break;
2446 case TOK_NEG:
2447 f1 = -f1;
2448 goto unary_result;
2449 /* XXX: also handles tests ? */
2450 default:
2451 goto general_case;
2453 vtop--;
2454 unary_result:
2455 /* XXX: overflow test ? */
2456 if (v1->type.t == VT_FLOAT) {
2457 v1->c.f = f1;
2458 } else if (v1->type.t == VT_DOUBLE) {
2459 v1->c.d = f1;
2460 } else {
2461 v1->c.ld = f1;
2463 } else {
2464 general_case:
2465 if (op == TOK_NEG) {
2466 gen_negf(op);
2467 } else {
2468 gen_opf(op);
2473 /* print a type. If 'varstr' is not NULL, then the variable is also
2474 printed in the type */
2475 /* XXX: union */
2476 /* XXX: add array and function pointers */
2477 static void type_to_str(char *buf, int buf_size,
2478 CType *type, const char *varstr)
2480 int bt, v, t;
2481 Sym *s, *sa;
2482 char buf1[256];
2483 const char *tstr;
2485 t = type->t;
2486 bt = t & VT_BTYPE;
2487 buf[0] = '\0';
2489 if (t & VT_EXTERN)
2490 pstrcat(buf, buf_size, "extern ");
2491 if (t & VT_STATIC)
2492 pstrcat(buf, buf_size, "static ");
2493 if (t & VT_TYPEDEF)
2494 pstrcat(buf, buf_size, "typedef ");
2495 if (t & VT_INLINE)
2496 pstrcat(buf, buf_size, "inline ");
2497 if (bt != VT_PTR) {
2498 if (t & VT_VOLATILE)
2499 pstrcat(buf, buf_size, "volatile ");
2500 if (t & VT_CONSTANT)
2501 pstrcat(buf, buf_size, "const ");
2503 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2504 || ((t & VT_UNSIGNED)
2505 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2506 && !IS_ENUM(t)
2508 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2510 buf_size -= strlen(buf);
2511 buf += strlen(buf);
2513 switch(bt) {
2514 case VT_VOID:
2515 tstr = "void";
2516 goto add_tstr;
2517 case VT_BOOL:
2518 tstr = "_Bool";
2519 goto add_tstr;
2520 case VT_BYTE:
2521 tstr = "char";
2522 goto add_tstr;
2523 case VT_SHORT:
2524 tstr = "short";
2525 goto add_tstr;
2526 case VT_INT:
2527 tstr = "int";
2528 goto maybe_long;
2529 case VT_LLONG:
2530 tstr = "long long";
2531 maybe_long:
2532 if (t & VT_LONG)
2533 tstr = "long";
2534 if (!IS_ENUM(t))
2535 goto add_tstr;
2536 tstr = "enum ";
2537 goto tstruct;
2538 case VT_FLOAT:
2539 tstr = "float";
2540 goto add_tstr;
2541 case VT_DOUBLE:
2542 tstr = "double";
2543 if (!(t & VT_LONG))
2544 goto add_tstr;
2545 case VT_LDOUBLE:
2546 tstr = "long double";
2547 add_tstr:
2548 pstrcat(buf, buf_size, tstr);
2549 break;
2550 case VT_STRUCT:
2551 tstr = "struct ";
2552 if (IS_UNION(t))
2553 tstr = "union ";
2554 tstruct:
2555 pstrcat(buf, buf_size, tstr);
2556 v = type->ref->v & ~SYM_STRUCT;
2557 if (v >= SYM_FIRST_ANOM)
2558 pstrcat(buf, buf_size, "<anonymous>");
2559 else
2560 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2561 break;
2562 case VT_FUNC:
2563 s = type->ref;
2564 buf1[0]=0;
2565 if (varstr && '*' == *varstr) {
2566 pstrcat(buf1, sizeof(buf1), "(");
2567 pstrcat(buf1, sizeof(buf1), varstr);
2568 pstrcat(buf1, sizeof(buf1), ")");
2570 pstrcat(buf1, buf_size, "(");
2571 sa = s->next;
2572 while (sa != NULL) {
2573 char buf2[256];
2574 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2575 pstrcat(buf1, sizeof(buf1), buf2);
2576 sa = sa->next;
2577 if (sa)
2578 pstrcat(buf1, sizeof(buf1), ", ");
2580 if (s->f.func_type == FUNC_ELLIPSIS)
2581 pstrcat(buf1, sizeof(buf1), ", ...");
2582 pstrcat(buf1, sizeof(buf1), ")");
2583 type_to_str(buf, buf_size, &s->type, buf1);
2584 goto no_var;
2585 case VT_PTR:
2586 s = type->ref;
2587 if (t & (VT_ARRAY|VT_VLA)) {
2588 if (varstr && '*' == *varstr)
2589 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2590 else
2591 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2592 type_to_str(buf, buf_size, &s->type, buf1);
2593 goto no_var;
2595 pstrcpy(buf1, sizeof(buf1), "*");
2596 if (t & VT_CONSTANT)
2597 pstrcat(buf1, buf_size, "const ");
2598 if (t & VT_VOLATILE)
2599 pstrcat(buf1, buf_size, "volatile ");
2600 if (varstr)
2601 pstrcat(buf1, sizeof(buf1), varstr);
2602 type_to_str(buf, buf_size, &s->type, buf1);
2603 goto no_var;
2605 if (varstr) {
2606 pstrcat(buf, buf_size, " ");
2607 pstrcat(buf, buf_size, varstr);
2609 no_var: ;
2612 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2614 char buf1[256], buf2[256];
2615 type_to_str(buf1, sizeof(buf1), st, NULL);
2616 type_to_str(buf2, sizeof(buf2), dt, NULL);
2617 tcc_error(fmt, buf1, buf2);
2620 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2622 char buf1[256], buf2[256];
2623 type_to_str(buf1, sizeof(buf1), st, NULL);
2624 type_to_str(buf2, sizeof(buf2), dt, NULL);
2625 tcc_warning(fmt, buf1, buf2);
2628 static int pointed_size(CType *type)
2630 int align;
2631 return type_size(pointed_type(type), &align);
2634 static inline int is_null_pointer(SValue *p)
2636 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2637 return 0;
2638 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2639 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2640 ((p->type.t & VT_BTYPE) == VT_PTR &&
2641 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2642 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2643 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2647 /* compare function types. OLD functions match any new functions */
2648 static int is_compatible_func(CType *type1, CType *type2)
2650 Sym *s1, *s2;
2652 s1 = type1->ref;
2653 s2 = type2->ref;
2654 if (s1->f.func_call != s2->f.func_call)
2655 return 0;
2656 if (s1->f.func_type != s2->f.func_type
2657 && s1->f.func_type != FUNC_OLD
2658 && s2->f.func_type != FUNC_OLD)
2659 return 0;
2660 for (;;) {
2661 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2662 return 0;
2663 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2664 return 1;
2665 s1 = s1->next;
2666 s2 = s2->next;
2667 if (!s1)
2668 return !s2;
2669 if (!s2)
2670 return 0;
2674 /* return true if type1 and type2 are the same. If unqualified is
2675 true, qualifiers on the types are ignored.
2677 static int compare_types(CType *type1, CType *type2, int unqualified)
2679 int bt1, t1, t2;
2681 t1 = type1->t & VT_TYPE;
2682 t2 = type2->t & VT_TYPE;
2683 if (unqualified) {
2684 /* strip qualifiers before comparing */
2685 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2686 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2689 /* Default Vs explicit signedness only matters for char */
2690 if ((t1 & VT_BTYPE) != VT_BYTE) {
2691 t1 &= ~VT_DEFSIGN;
2692 t2 &= ~VT_DEFSIGN;
2694 /* XXX: bitfields ? */
2695 if (t1 != t2)
2696 return 0;
2698 if ((t1 & VT_ARRAY)
2699 && !(type1->ref->c < 0
2700 || type2->ref->c < 0
2701 || type1->ref->c == type2->ref->c))
2702 return 0;
2704 /* test more complicated cases */
2705 bt1 = t1 & VT_BTYPE;
2706 if (bt1 == VT_PTR) {
2707 type1 = pointed_type(type1);
2708 type2 = pointed_type(type2);
2709 return is_compatible_types(type1, type2);
2710 } else if (bt1 == VT_STRUCT) {
2711 return (type1->ref == type2->ref);
2712 } else if (bt1 == VT_FUNC) {
2713 return is_compatible_func(type1, type2);
2714 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2715 /* If both are enums then they must be the same, if only one is then
2716 t1 and t2 must be equal, which was checked above already. */
2717 return type1->ref == type2->ref;
2718 } else {
2719 return 1;
2723 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2724 type is stored in DEST if non-null (except for pointer plus/minus) . */
2725 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2727 CType *type1 = &op1->type, *type2 = &op2->type, type;
2728 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2729 int ret = 1;
2731 type.t = VT_VOID;
2732 type.ref = NULL;
2734 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2735 ret = op == '?' ? 1 : 0;
2736 /* NOTE: as an extension, we accept void on only one side */
2737 type.t = VT_VOID;
2738 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2739 if (op == '+') ; /* Handled in caller */
2740 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2741 /* If one is a null ptr constant the result type is the other. */
2742 else if (is_null_pointer (op2)) type = *type1;
2743 else if (is_null_pointer (op1)) type = *type2;
2744 else if (bt1 != bt2) {
2745 /* accept comparison or cond-expr between pointer and integer
2746 with a warning */
2747 if ((op == '?' || TOK_ISCOND(op))
2748 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2749 tcc_warning("pointer/integer mismatch in %s",
2750 op == '?' ? "conditional expression" : "comparison");
2751 else if (op != '-' || !is_integer_btype(bt2))
2752 ret = 0;
2753 type = *(bt1 == VT_PTR ? type1 : type2);
2754 } else {
2755 CType *pt1 = pointed_type(type1);
2756 CType *pt2 = pointed_type(type2);
2757 int pbt1 = pt1->t & VT_BTYPE;
2758 int pbt2 = pt2->t & VT_BTYPE;
2759 int newquals, copied = 0;
2760 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2761 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2762 if (op != '?' && !TOK_ISCOND(op))
2763 ret = 0;
2764 else
2765 type_incompatibility_warning(type1, type2,
2766 op == '?'
2767 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2768 : "pointer type mismatch in comparison('%s' and '%s')");
2770 if (op == '?') {
2771 /* pointers to void get preferred, otherwise the
2772 pointed to types minus qualifs should be compatible */
2773 type = *((pbt1 == VT_VOID) ? type1 : type2);
2774 /* combine qualifs */
2775 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2776 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2777 & newquals)
2779 /* copy the pointer target symbol */
2780 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2781 0, type.ref->c);
2782 copied = 1;
2783 pointed_type(&type)->t |= newquals;
2785 /* pointers to incomplete arrays get converted to
2786 pointers to completed ones if possible */
2787 if (pt1->t & VT_ARRAY
2788 && pt2->t & VT_ARRAY
2789 && pointed_type(&type)->ref->c < 0
2790 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2792 if (!copied)
2793 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2794 0, type.ref->c);
2795 pointed_type(&type)->ref =
2796 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2797 0, pointed_type(&type)->ref->c);
2798 pointed_type(&type)->ref->c =
2799 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2803 if (TOK_ISCOND(op))
2804 type.t = VT_SIZE_T;
2805 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2806 if (op != '?' || !compare_types(type1, type2, 1))
2807 ret = 0;
2808 type = *type1;
2809 } else if (is_float(bt1) || is_float(bt2)) {
2810 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2811 type.t = VT_LDOUBLE;
2812 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2813 type.t = VT_DOUBLE;
2814 } else {
2815 type.t = VT_FLOAT;
2817 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2818 /* cast to biggest op */
2819 type.t = VT_LLONG | VT_LONG;
2820 if (bt1 == VT_LLONG)
2821 type.t &= t1;
2822 if (bt2 == VT_LLONG)
2823 type.t &= t2;
2824 /* convert to unsigned if it does not fit in a long long */
2825 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2826 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2827 type.t |= VT_UNSIGNED;
2828 } else {
2829 /* integer operations */
2830 type.t = VT_INT | (VT_LONG & (t1 | t2));
2831 /* convert to unsigned if it does not fit in an integer */
2832 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2833 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2834 type.t |= VT_UNSIGNED;
2836 if (dest)
2837 *dest = type;
2838 return ret;
2841 /* generic gen_op: handles types problems */
2842 ST_FUNC void gen_op(int op)
2844 int t1, t2, bt1, bt2, t;
2845 CType type1, combtype;
2847 redo:
2848 t1 = vtop[-1].type.t;
2849 t2 = vtop[0].type.t;
2850 bt1 = t1 & VT_BTYPE;
2851 bt2 = t2 & VT_BTYPE;
2853 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2854 if (bt2 == VT_FUNC) {
2855 mk_pointer(&vtop->type);
2856 gaddrof();
2858 if (bt1 == VT_FUNC) {
2859 vswap();
2860 mk_pointer(&vtop->type);
2861 gaddrof();
2862 vswap();
2864 goto redo;
2865 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2866 tcc_error_noabort("invalid operand types for binary operation");
2867 vpop();
2868 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2869 /* at least one operand is a pointer */
2870 /* relational op: must be both pointers */
2871 int align;
2872 if (TOK_ISCOND(op))
2873 goto std_op;
2874 /* if both pointers, then it must be the '-' op */
2875 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2876 if (op != '-')
2877 tcc_error("cannot use pointers here");
2878 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2879 vrott(3);
2880 gen_opic(op);
2881 vtop->type.t = VT_PTRDIFF_T;
2882 vswap();
2883 gen_op(TOK_PDIV);
2884 } else {
2885 /* exactly one pointer : must be '+' or '-'. */
2886 if (op != '-' && op != '+')
2887 tcc_error("cannot use pointers here");
2888 /* Put pointer as first operand */
2889 if (bt2 == VT_PTR) {
2890 vswap();
2891 t = t1, t1 = t2, t2 = t;
2893 #if PTR_SIZE == 4
2894 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2895 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2896 gen_cast_s(VT_INT);
2897 #endif
2898 type1 = vtop[-1].type;
2899 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2900 gen_op('*');
2901 #ifdef CONFIG_TCC_BCHECK
2902 if (tcc_state->do_bounds_check && !const_wanted) {
2903 /* if bounded pointers, we generate a special code to
2904 test bounds */
2905 if (op == '-') {
2906 vpushi(0);
2907 vswap();
2908 gen_op('-');
2910 gen_bounded_ptr_add();
2911 } else
2912 #endif
2914 gen_opic(op);
2916 type1.t &= ~(VT_ARRAY|VT_VLA);
2917 /* put again type if gen_opic() swaped operands */
2918 vtop->type = type1;
2920 } else {
2921 /* floats can only be used for a few operations */
2922 if (is_float(combtype.t)
2923 && op != '+' && op != '-' && op != '*' && op != '/'
2924 && !TOK_ISCOND(op))
2925 tcc_error("invalid operands for binary operation");
2926 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2927 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2928 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2929 t |= VT_UNSIGNED;
2930 t |= (VT_LONG & t1);
2931 combtype.t = t;
2933 std_op:
2934 t = t2 = combtype.t;
2935 /* XXX: currently, some unsigned operations are explicit, so
2936 we modify them here */
2937 if (t & VT_UNSIGNED) {
2938 if (op == TOK_SAR)
2939 op = TOK_SHR;
2940 else if (op == '/')
2941 op = TOK_UDIV;
2942 else if (op == '%')
2943 op = TOK_UMOD;
2944 else if (op == TOK_LT)
2945 op = TOK_ULT;
2946 else if (op == TOK_GT)
2947 op = TOK_UGT;
2948 else if (op == TOK_LE)
2949 op = TOK_ULE;
2950 else if (op == TOK_GE)
2951 op = TOK_UGE;
2953 vswap();
2954 gen_cast_s(t);
2955 vswap();
2956 /* special case for shifts and long long: we keep the shift as
2957 an integer */
2958 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2959 t2 = VT_INT;
2960 gen_cast_s(t2);
2961 if (is_float(t))
2962 gen_opif(op);
2963 else
2964 gen_opic(op);
2965 if (TOK_ISCOND(op)) {
2966 /* relational op: the result is an int */
2967 vtop->type.t = VT_INT;
2968 } else {
2969 vtop->type.t = t;
2972 // Make sure that we have converted to an rvalue:
2973 if (vtop->r & VT_LVAL)
2974 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2977 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2978 #define gen_cvt_itof1 gen_cvt_itof
2979 #else
2980 /* generic itof for unsigned long long case */
2981 static void gen_cvt_itof1(int t)
2983 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2984 (VT_LLONG | VT_UNSIGNED)) {
2986 if (t == VT_FLOAT)
2987 vpush_helper_func(TOK___floatundisf);
2988 #if LDOUBLE_SIZE != 8
2989 else if (t == VT_LDOUBLE)
2990 vpush_helper_func(TOK___floatundixf);
2991 #endif
2992 else
2993 vpush_helper_func(TOK___floatundidf);
2994 vrott(2);
2995 gfunc_call(1);
2996 vpushi(0);
2997 PUT_R_RET(vtop, t);
2998 } else {
2999 gen_cvt_itof(t);
3002 #endif
3004 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3005 #define gen_cvt_ftoi1 gen_cvt_ftoi
3006 #else
3007 /* generic ftoi for unsigned long long case */
3008 static void gen_cvt_ftoi1(int t)
3010 int st;
3011 if (t == (VT_LLONG | VT_UNSIGNED)) {
3012 /* not handled natively */
3013 st = vtop->type.t & VT_BTYPE;
3014 if (st == VT_FLOAT)
3015 vpush_helper_func(TOK___fixunssfdi);
3016 #if LDOUBLE_SIZE != 8
3017 else if (st == VT_LDOUBLE)
3018 vpush_helper_func(TOK___fixunsxfdi);
3019 #endif
3020 else
3021 vpush_helper_func(TOK___fixunsdfdi);
3022 vrott(2);
3023 gfunc_call(1);
3024 vpushi(0);
3025 PUT_R_RET(vtop, t);
3026 } else {
3027 gen_cvt_ftoi(t);
3030 #endif
3032 /* special delayed cast for char/short */
3033 static void force_charshort_cast(void)
3035 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3036 int dbt = vtop->type.t;
3037 vtop->r &= ~VT_MUSTCAST;
3038 vtop->type.t = sbt;
3039 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3040 vtop->type.t = dbt;
3043 static void gen_cast_s(int t)
3045 CType type;
3046 type.t = t;
3047 type.ref = NULL;
3048 gen_cast(&type);
3051 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3052 static void gen_cast(CType *type)
3054 int sbt, dbt, sf, df, c;
3055 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3057 /* special delayed cast for char/short */
3058 if (vtop->r & VT_MUSTCAST)
3059 force_charshort_cast();
3061 /* bitfields first get cast to ints */
3062 if (vtop->type.t & VT_BITFIELD)
3063 gv(RC_INT);
3065 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3066 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3067 if (sbt == VT_FUNC)
3068 sbt = VT_PTR;
3070 again:
3071 if (sbt != dbt) {
3072 sf = is_float(sbt);
3073 df = is_float(dbt);
3074 dbt_bt = dbt & VT_BTYPE;
3075 sbt_bt = sbt & VT_BTYPE;
3076 if (dbt_bt == VT_VOID)
3077 goto done;
3078 if (sbt_bt == VT_VOID) {
3079 error:
3080 cast_error(&vtop->type, type);
3083 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3084 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3085 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3086 #endif
3087 if (c) {
3088 /* constant case: we can do it now */
3089 /* XXX: in ISOC, cannot do it if error in convert */
3090 if (sbt == VT_FLOAT)
3091 vtop->c.ld = vtop->c.f;
3092 else if (sbt == VT_DOUBLE)
3093 vtop->c.ld = vtop->c.d;
3095 if (df) {
3096 if (sbt_bt == VT_LLONG) {
3097 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3098 vtop->c.ld = vtop->c.i;
3099 else
3100 vtop->c.ld = -(long double)-vtop->c.i;
3101 } else if(!sf) {
3102 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3103 vtop->c.ld = (uint32_t)vtop->c.i;
3104 else
3105 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3108 if (dbt == VT_FLOAT)
3109 vtop->c.f = (float)vtop->c.ld;
3110 else if (dbt == VT_DOUBLE)
3111 vtop->c.d = (double)vtop->c.ld;
3112 } else if (sf && dbt == VT_BOOL) {
3113 vtop->c.i = (vtop->c.ld != 0);
3114 } else {
3115 if(sf)
3116 vtop->c.i = vtop->c.ld;
3117 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3119 else if (sbt & VT_UNSIGNED)
3120 vtop->c.i = (uint32_t)vtop->c.i;
3121 else
3122 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3124 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3126 else if (dbt == VT_BOOL)
3127 vtop->c.i = (vtop->c.i != 0);
3128 else {
3129 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3130 dbt_bt == VT_SHORT ? 0xffff :
3131 0xffffffff;
3132 vtop->c.i &= m;
3133 if (!(dbt & VT_UNSIGNED))
3134 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3137 goto done;
3139 } else if (dbt == VT_BOOL
3140 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3141 == (VT_CONST | VT_SYM)) {
3142 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3143 vtop->r = VT_CONST;
3144 vtop->c.i = 1;
3145 goto done;
3148 /* cannot generate code for global or static initializers */
3149 if (nocode_wanted & DATA_ONLY_WANTED)
3150 goto done;
3152 /* non constant case: generate code */
3153 if (dbt == VT_BOOL) {
3154 gen_test_zero(TOK_NE);
3155 goto done;
3158 if (sf || df) {
3159 if (sf && df) {
3160 /* convert from fp to fp */
3161 gen_cvt_ftof(dbt);
3162 } else if (df) {
3163 /* convert int to fp */
3164 gen_cvt_itof1(dbt);
3165 } else {
3166 /* convert fp to int */
3167 sbt = dbt;
3168 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3169 sbt = VT_INT;
3170 gen_cvt_ftoi1(sbt);
3171 goto again; /* may need char/short cast */
3173 goto done;
3176 ds = btype_size(dbt_bt);
3177 ss = btype_size(sbt_bt);
3178 if (ds == 0 || ss == 0)
3179 goto error;
3181 if (IS_ENUM(type->t) && type->ref->c < 0)
3182 tcc_error("cast to incomplete type");
3184 /* same size and no sign conversion needed */
3185 if (ds == ss && ds >= 4)
3186 goto done;
3187 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3188 tcc_warning("cast between pointer and integer of different size");
3189 if (sbt_bt == VT_PTR) {
3190 /* put integer type to allow logical operations below */
3191 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3195 /* processor allows { int a = 0, b = *(char*)&a; }
3196 That means that if we cast to less width, we can just
3197 change the type and read it still later. */
3198 #define ALLOW_SUBTYPE_ACCESS 1
3200 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3201 /* value still in memory */
3202 if (ds <= ss)
3203 goto done;
3204 /* ss <= 4 here */
3205 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3206 gv(RC_INT);
3207 goto done; /* no 64bit envolved */
3210 gv(RC_INT);
3212 trunc = 0;
3213 #if PTR_SIZE == 4
3214 if (ds == 8) {
3215 /* generate high word */
3216 if (sbt & VT_UNSIGNED) {
3217 vpushi(0);
3218 gv(RC_INT);
3219 } else {
3220 gv_dup();
3221 vpushi(31);
3222 gen_op(TOK_SAR);
3224 lbuild(dbt);
3225 } else if (ss == 8) {
3226 /* from long long: just take low order word */
3227 lexpand();
3228 vpop();
3230 ss = 4;
3232 #elif PTR_SIZE == 8
3233 if (ds == 8) {
3234 /* need to convert from 32bit to 64bit */
3235 if (sbt & VT_UNSIGNED) {
3236 #if defined(TCC_TARGET_RISCV64)
3237 /* RISC-V keeps 32bit vals in registers sign-extended.
3238 So here we need a zero-extension. */
3239 trunc = 32;
3240 #else
3241 goto done;
3242 #endif
3243 } else {
3244 gen_cvt_sxtw();
3245 goto done;
3247 ss = ds, ds = 4, dbt = sbt;
3248 } else if (ss == 8) {
3249 /* RISC-V keeps 32bit vals in registers sign-extended.
3250 So here we need a sign-extension for signed types and
3251 zero-extension. for unsigned types. */
3252 #if !defined(TCC_TARGET_RISCV64)
3253 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3254 #endif
3255 } else {
3256 ss = 4;
3258 #endif
3260 if (ds >= ss)
3261 goto done;
3262 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3263 if (ss == 4) {
3264 gen_cvt_csti(dbt);
3265 goto done;
3267 #endif
3268 bits = (ss - ds) * 8;
3269 /* for unsigned, gen_op will convert SAR to SHR */
3270 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3271 vpushi(bits);
3272 gen_op(TOK_SHL);
3273 vpushi(bits - trunc);
3274 gen_op(TOK_SAR);
3275 vpushi(trunc);
3276 gen_op(TOK_SHR);
3278 done:
3279 vtop->type = *type;
3280 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3283 /* return type size as known at compile time. Put alignment at 'a' */
3284 ST_FUNC int type_size(CType *type, int *a)
3286 Sym *s;
3287 int bt;
3289 bt = type->t & VT_BTYPE;
3290 if (bt == VT_STRUCT) {
3291 /* struct/union */
3292 s = type->ref;
3293 *a = s->r;
3294 return s->c;
3295 } else if (bt == VT_PTR) {
3296 if (type->t & VT_ARRAY) {
3297 int ts;
3299 s = type->ref;
3300 ts = type_size(&s->type, a);
3302 if (ts < 0 && s->c < 0)
3303 ts = -ts;
3305 return ts * s->c;
3306 } else {
3307 *a = PTR_SIZE;
3308 return PTR_SIZE;
3310 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3311 *a = 0;
3312 return -1; /* incomplete enum */
3313 } else if (bt == VT_LDOUBLE) {
3314 *a = LDOUBLE_ALIGN;
3315 return LDOUBLE_SIZE;
3316 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3317 #ifdef TCC_TARGET_I386
3318 #ifdef TCC_TARGET_PE
3319 *a = 8;
3320 #else
3321 *a = 4;
3322 #endif
3323 #elif defined(TCC_TARGET_ARM)
3324 #ifdef TCC_ARM_EABI
3325 *a = 8;
3326 #else
3327 *a = 4;
3328 #endif
3329 #else
3330 *a = 8;
3331 #endif
3332 return 8;
3333 } else if (bt == VT_INT || bt == VT_FLOAT) {
3334 *a = 4;
3335 return 4;
3336 } else if (bt == VT_SHORT) {
3337 *a = 2;
3338 return 2;
3339 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3340 *a = 8;
3341 return 16;
3342 } else {
3343 /* char, void, function, _Bool */
3344 *a = 1;
3345 return 1;
3349 /* push type size as known at runtime time on top of value stack. Put
3350 alignment at 'a' */
3351 static void vpush_type_size(CType *type, int *a)
3353 if (type->t & VT_VLA) {
3354 type_size(&type->ref->type, a);
3355 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3356 } else {
3357 int size = type_size(type, a);
3358 if (size < 0)
3359 tcc_error("unknown type size");
3360 #if PTR_SIZE == 8
3361 vpushll(size);
3362 #else
3363 vpushi(size);
3364 #endif
3368 /* return the pointed type of t */
3369 static inline CType *pointed_type(CType *type)
3371 return &type->ref->type;
3374 /* modify type so that its it is a pointer to type. */
3375 ST_FUNC void mk_pointer(CType *type)
3377 Sym *s;
3378 s = sym_push(SYM_FIELD, type, 0, -1);
3379 type->t = VT_PTR | (type->t & VT_STORAGE);
3380 type->ref = s;
3383 /* return true if type1 and type2 are exactly the same (including
3384 qualifiers).
3386 static int is_compatible_types(CType *type1, CType *type2)
3388 return compare_types(type1,type2,0);
3391 /* return true if type1 and type2 are the same (ignoring qualifiers).
3393 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3395 return compare_types(type1,type2,1);
3398 static void cast_error(CType *st, CType *dt)
3400 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3403 /* verify type compatibility to store vtop in 'dt' type */
3404 static void verify_assign_cast(CType *dt)
3406 CType *st, *type1, *type2;
3407 int dbt, sbt, qualwarn, lvl;
3409 st = &vtop->type; /* source type */
3410 dbt = dt->t & VT_BTYPE;
3411 sbt = st->t & VT_BTYPE;
3412 if (dt->t & VT_CONSTANT)
3413 tcc_warning("assignment of read-only location");
3414 switch(dbt) {
3415 case VT_VOID:
3416 if (sbt != dbt)
3417 tcc_error("assignment to void expression");
3418 break;
3419 case VT_PTR:
3420 /* special cases for pointers */
3421 /* '0' can also be a pointer */
3422 if (is_null_pointer(vtop))
3423 break;
3424 /* accept implicit pointer to integer cast with warning */
3425 if (is_integer_btype(sbt)) {
3426 tcc_warning("assignment makes pointer from integer without a cast");
3427 break;
3429 type1 = pointed_type(dt);
3430 if (sbt == VT_PTR)
3431 type2 = pointed_type(st);
3432 else if (sbt == VT_FUNC)
3433 type2 = st; /* a function is implicitly a function pointer */
3434 else
3435 goto error;
3436 if (is_compatible_types(type1, type2))
3437 break;
3438 for (qualwarn = lvl = 0;; ++lvl) {
3439 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3440 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3441 qualwarn = 1;
3442 dbt = type1->t & (VT_BTYPE|VT_LONG);
3443 sbt = type2->t & (VT_BTYPE|VT_LONG);
3444 if (dbt != VT_PTR || sbt != VT_PTR)
3445 break;
3446 type1 = pointed_type(type1);
3447 type2 = pointed_type(type2);
3449 if (!is_compatible_unqualified_types(type1, type2)) {
3450 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3451 /* void * can match anything */
3452 } else if (dbt == sbt
3453 && is_integer_btype(sbt & VT_BTYPE)
3454 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3455 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3456 /* Like GCC don't warn by default for merely changes
3457 in pointer target signedness. Do warn for different
3458 base types, though, in particular for unsigned enums
3459 and signed int targets. */
3460 } else {
3461 tcc_warning("assignment from incompatible pointer type");
3462 break;
3465 if (qualwarn)
3466 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3467 break;
3468 case VT_BYTE:
3469 case VT_SHORT:
3470 case VT_INT:
3471 case VT_LLONG:
3472 if (sbt == VT_PTR || sbt == VT_FUNC) {
3473 tcc_warning("assignment makes integer from pointer without a cast");
3474 } else if (sbt == VT_STRUCT) {
3475 goto case_VT_STRUCT;
3477 /* XXX: more tests */
3478 break;
3479 case VT_STRUCT:
3480 case_VT_STRUCT:
3481 if (!is_compatible_unqualified_types(dt, st)) {
3482 error:
3483 cast_error(st, dt);
3485 break;
3489 static void gen_assign_cast(CType *dt)
3491 verify_assign_cast(dt);
3492 gen_cast(dt);
3495 /* store vtop in lvalue pushed on stack */
3496 ST_FUNC void vstore(void)
3498 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3500 ft = vtop[-1].type.t;
3501 sbt = vtop->type.t & VT_BTYPE;
3502 dbt = ft & VT_BTYPE;
3503 verify_assign_cast(&vtop[-1].type);
3505 if (sbt == VT_STRUCT) {
3506 /* if structure, only generate pointer */
3507 /* structure assignment : generate memcpy */
3508 size = type_size(&vtop->type, &align);
3509 /* destination, keep on stack() as result */
3510 vpushv(vtop - 1);
3511 #ifdef CONFIG_TCC_BCHECK
3512 if (vtop->r & VT_MUSTBOUND)
3513 gbound(); /* check would be wrong after gaddrof() */
3514 #endif
3515 vtop->type.t = VT_PTR;
3516 gaddrof();
3517 /* source */
3518 vswap();
3519 #ifdef CONFIG_TCC_BCHECK
3520 if (vtop->r & VT_MUSTBOUND)
3521 gbound();
3522 #endif
3523 vtop->type.t = VT_PTR;
3524 gaddrof();
3526 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3527 if (1
3528 #ifdef CONFIG_TCC_BCHECK
3529 && !tcc_state->do_bounds_check
3530 #endif
3532 gen_struct_copy(size);
3533 } else
3534 #endif
3536 /* type size */
3537 vpushi(size);
3538 /* Use memmove, rather than memcpy, as dest and src may be same: */
3539 #ifdef TCC_ARM_EABI
3540 if(!(align & 7))
3541 vpush_helper_func(TOK_memmove8);
3542 else if(!(align & 3))
3543 vpush_helper_func(TOK_memmove4);
3544 else
3545 #endif
3546 vpush_helper_func(TOK_memmove);
3547 vrott(4);
3548 gfunc_call(3);
3551 } else if (ft & VT_BITFIELD) {
3552 /* bitfield store handling */
3554 /* save lvalue as expression result (example: s.b = s.a = n;) */
3555 vdup(), vtop[-1] = vtop[-2];
3557 bit_pos = BIT_POS(ft);
3558 bit_size = BIT_SIZE(ft);
3559 /* remove bit field info to avoid loops */
3560 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3562 if (dbt == VT_BOOL) {
3563 gen_cast(&vtop[-1].type);
3564 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3566 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3567 if (dbt != VT_BOOL) {
3568 gen_cast(&vtop[-1].type);
3569 dbt = vtop[-1].type.t & VT_BTYPE;
3571 if (r == VT_STRUCT) {
3572 store_packed_bf(bit_pos, bit_size);
3573 } else {
3574 unsigned long long mask = (1ULL << bit_size) - 1;
3575 if (dbt != VT_BOOL) {
3576 /* mask source */
3577 if (dbt == VT_LLONG)
3578 vpushll(mask);
3579 else
3580 vpushi((unsigned)mask);
3581 gen_op('&');
3583 /* shift source */
3584 vpushi(bit_pos);
3585 gen_op(TOK_SHL);
3586 vswap();
3587 /* duplicate destination */
3588 vdup();
3589 vrott(3);
3590 /* load destination, mask and or with source */
3591 if (dbt == VT_LLONG)
3592 vpushll(~(mask << bit_pos));
3593 else
3594 vpushi(~((unsigned)mask << bit_pos));
3595 gen_op('&');
3596 gen_op('|');
3597 /* store result */
3598 vstore();
3599 /* ... and discard */
3600 vpop();
3602 } else if (dbt == VT_VOID) {
3603 --vtop;
3604 } else {
3605 /* optimize char/short casts */
3606 delayed_cast = 0;
3607 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3608 && is_integer_btype(sbt)
3610 if ((vtop->r & VT_MUSTCAST)
3611 && btype_size(dbt) > btype_size(sbt)
3613 force_charshort_cast();
3614 delayed_cast = 1;
3615 } else {
3616 gen_cast(&vtop[-1].type);
3619 #ifdef CONFIG_TCC_BCHECK
3620 /* bound check case */
3621 if (vtop[-1].r & VT_MUSTBOUND) {
3622 vswap();
3623 gbound();
3624 vswap();
3626 #endif
3627 gv(RC_TYPE(dbt)); /* generate value */
3629 if (delayed_cast) {
3630 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3631 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3632 vtop->type.t = ft & VT_TYPE;
3635 /* if lvalue was saved on stack, must read it */
3636 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3637 SValue sv;
3638 r = get_reg(RC_INT);
3639 sv.type.t = VT_PTRDIFF_T;
3640 sv.r = VT_LOCAL | VT_LVAL;
3641 sv.c.i = vtop[-1].c.i;
3642 load(r, &sv);
3643 vtop[-1].r = r | VT_LVAL;
3646 r = vtop->r & VT_VALMASK;
3647 /* two word case handling :
3648 store second register at word + 4 (or +8 for x86-64) */
3649 if (USING_TWO_WORDS(dbt)) {
3650 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3651 vtop[-1].type.t = load_type;
3652 store(r, vtop - 1);
3653 vswap();
3654 /* convert to int to increment easily */
3655 vtop->type.t = VT_PTRDIFF_T;
3656 gaddrof();
3657 vpushs(PTR_SIZE);
3658 gen_op('+');
3659 vtop->r |= VT_LVAL;
3660 vswap();
3661 vtop[-1].type.t = load_type;
3662 /* XXX: it works because r2 is spilled last ! */
3663 store(vtop->r2, vtop - 1);
3664 } else {
3665 /* single word */
3666 store(r, vtop - 1);
3668 vswap();
3669 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3673 /* post defines POST/PRE add. c is the token ++ or -- */
3674 ST_FUNC void inc(int post, int c)
3676 test_lvalue();
3677 vdup(); /* save lvalue */
3678 if (post) {
3679 gv_dup(); /* duplicate value */
3680 vrotb(3);
3681 vrotb(3);
3683 /* add constant */
3684 vpushi(c - TOK_MID);
3685 gen_op('+');
3686 vstore(); /* store value */
3687 if (post)
3688 vpop(); /* if post op, return saved value */
3691 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3693 /* read the string */
3694 if (tok != TOK_STR)
3695 expect(msg);
3696 cstr_new(astr);
3697 while (tok == TOK_STR) {
3698 /* XXX: add \0 handling too ? */
3699 cstr_cat(astr, tokc.str.data, -1);
3700 next();
3702 cstr_ccat(astr, '\0');
3705 /* If I is >= 1 and a power of two, returns log2(i)+1.
3706 If I is 0 returns 0. */
3707 ST_FUNC int exact_log2p1(int i)
3709 int ret;
3710 if (!i)
3711 return 0;
3712 for (ret = 1; i >= 1 << 8; ret += 8)
3713 i >>= 8;
3714 if (i >= 1 << 4)
3715 ret += 4, i >>= 4;
3716 if (i >= 1 << 2)
3717 ret += 2, i >>= 2;
3718 if (i >= 1 << 1)
3719 ret++;
3720 return ret;
3723 /* Parse __attribute__((...)) GNUC extension. */
3724 static void parse_attribute(AttributeDef *ad)
3726 int t, n;
3727 CString astr;
3729 redo:
3730 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3731 return;
3732 next();
3733 skip('(');
3734 skip('(');
3735 while (tok != ')') {
3736 if (tok < TOK_IDENT)
3737 expect("attribute name");
3738 t = tok;
3739 next();
3740 switch(t) {
3741 case TOK_CLEANUP1:
3742 case TOK_CLEANUP2:
3744 Sym *s;
3746 skip('(');
3747 s = sym_find(tok);
3748 if (!s) {
3749 tcc_warning_c(warn_implicit_function_declaration)(
3750 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3751 s = external_global_sym(tok, &func_old_type);
3752 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3753 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3754 ad->cleanup_func = s;
3755 next();
3756 skip(')');
3757 break;
3759 case TOK_CONSTRUCTOR1:
3760 case TOK_CONSTRUCTOR2:
3761 ad->f.func_ctor = 1;
3762 break;
3763 case TOK_DESTRUCTOR1:
3764 case TOK_DESTRUCTOR2:
3765 ad->f.func_dtor = 1;
3766 break;
3767 case TOK_ALWAYS_INLINE1:
3768 case TOK_ALWAYS_INLINE2:
3769 ad->f.func_alwinl = 1;
3770 break;
3771 case TOK_SECTION1:
3772 case TOK_SECTION2:
3773 skip('(');
3774 parse_mult_str(&astr, "section name");
3775 ad->section = find_section(tcc_state, (char *)astr.data);
3776 skip(')');
3777 cstr_free(&astr);
3778 break;
3779 case TOK_ALIAS1:
3780 case TOK_ALIAS2:
3781 skip('(');
3782 parse_mult_str(&astr, "alias(\"target\")");
3783 ad->alias_target = /* save string as token, for later */
3784 tok_alloc((char*)astr.data, astr.size-1)->tok;
3785 skip(')');
3786 cstr_free(&astr);
3787 break;
3788 case TOK_VISIBILITY1:
3789 case TOK_VISIBILITY2:
3790 skip('(');
3791 parse_mult_str(&astr,
3792 "visibility(\"default|hidden|internal|protected\")");
3793 if (!strcmp (astr.data, "default"))
3794 ad->a.visibility = STV_DEFAULT;
3795 else if (!strcmp (astr.data, "hidden"))
3796 ad->a.visibility = STV_HIDDEN;
3797 else if (!strcmp (astr.data, "internal"))
3798 ad->a.visibility = STV_INTERNAL;
3799 else if (!strcmp (astr.data, "protected"))
3800 ad->a.visibility = STV_PROTECTED;
3801 else
3802 expect("visibility(\"default|hidden|internal|protected\")");
3803 skip(')');
3804 cstr_free(&astr);
3805 break;
3806 case TOK_ALIGNED1:
3807 case TOK_ALIGNED2:
3808 if (tok == '(') {
3809 next();
3810 n = expr_const();
3811 if (n <= 0 || (n & (n - 1)) != 0)
3812 tcc_error("alignment must be a positive power of two");
3813 skip(')');
3814 } else {
3815 n = MAX_ALIGN;
3817 ad->a.aligned = exact_log2p1(n);
3818 if (n != 1 << (ad->a.aligned - 1))
3819 tcc_error("alignment of %d is larger than implemented", n);
3820 break;
3821 case TOK_PACKED1:
3822 case TOK_PACKED2:
3823 ad->a.packed = 1;
3824 break;
3825 case TOK_WEAK1:
3826 case TOK_WEAK2:
3827 ad->a.weak = 1;
3828 break;
3829 case TOK_NODEBUG1:
3830 case TOK_NODEBUG2:
3831 ad->a.nodebug = 1;
3832 break;
3833 case TOK_UNUSED1:
3834 case TOK_UNUSED2:
3835 /* currently, no need to handle it because tcc does not
3836 track unused objects */
3837 break;
3838 case TOK_NORETURN1:
3839 case TOK_NORETURN2:
3840 ad->f.func_noreturn = 1;
3841 break;
3842 case TOK_CDECL1:
3843 case TOK_CDECL2:
3844 case TOK_CDECL3:
3845 ad->f.func_call = FUNC_CDECL;
3846 break;
3847 case TOK_STDCALL1:
3848 case TOK_STDCALL2:
3849 case TOK_STDCALL3:
3850 ad->f.func_call = FUNC_STDCALL;
3851 break;
3852 #ifdef TCC_TARGET_I386
3853 case TOK_REGPARM1:
3854 case TOK_REGPARM2:
3855 skip('(');
3856 n = expr_const();
3857 if (n > 3)
3858 n = 3;
3859 else if (n < 0)
3860 n = 0;
3861 if (n > 0)
3862 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3863 skip(')');
3864 break;
3865 case TOK_FASTCALL1:
3866 case TOK_FASTCALL2:
3867 case TOK_FASTCALL3:
3868 ad->f.func_call = FUNC_FASTCALLW;
3869 break;
3870 #endif
3871 case TOK_MODE:
3872 skip('(');
3873 switch(tok) {
3874 case TOK_MODE_DI:
3875 ad->attr_mode = VT_LLONG + 1;
3876 break;
3877 case TOK_MODE_QI:
3878 ad->attr_mode = VT_BYTE + 1;
3879 break;
3880 case TOK_MODE_HI:
3881 ad->attr_mode = VT_SHORT + 1;
3882 break;
3883 case TOK_MODE_SI:
3884 case TOK_MODE_word:
3885 ad->attr_mode = VT_INT + 1;
3886 break;
3887 default:
3888 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3889 break;
3891 next();
3892 skip(')');
3893 break;
3894 case TOK_DLLEXPORT:
3895 ad->a.dllexport = 1;
3896 break;
3897 case TOK_NODECORATE:
3898 ad->a.nodecorate = 1;
3899 break;
3900 case TOK_DLLIMPORT:
3901 ad->a.dllimport = 1;
3902 break;
3903 default:
3904 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3905 /* skip parameters */
3906 if (tok == '(') {
3907 int parenthesis = 0;
3908 do {
3909 if (tok == '(')
3910 parenthesis++;
3911 else if (tok == ')')
3912 parenthesis--;
3913 next();
3914 } while (parenthesis && tok != -1);
3916 break;
3918 if (tok != ',')
3919 break;
3920 next();
3922 skip(')');
3923 skip(')');
3924 goto redo;
3927 static Sym * find_field (CType *type, int v, int *cumofs)
3929 Sym *s = type->ref;
3930 int v1 = v | SYM_FIELD;
3932 while ((s = s->next) != NULL) {
3933 if (s->v == v1) {
3934 *cumofs += s->c;
3935 return s;
3937 if ((s->type.t & VT_BTYPE) == VT_STRUCT
3938 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
3939 /* try to find field in anonymous sub-struct/union */
3940 Sym *ret = find_field (&s->type, v1, cumofs);
3941 if (ret) {
3942 *cumofs += s->c;
3943 return ret;
3948 if (!(v & SYM_FIELD)) { /* top-level call */
3949 s = type->ref;
3950 if (s->c < 0)
3951 tcc_error("dereferencing incomplete type '%s'",
3952 get_tok_str(s->v & ~SYM_STRUCT, 0));
3953 else
3954 tcc_error("field not found: %s",
3955 get_tok_str(v, &tokc));
3957 return NULL;
3960 static void check_fields (CType *type, int check)
3962 Sym *s = type->ref;
3964 while ((s = s->next) != NULL) {
3965 int v = s->v & ~SYM_FIELD;
3966 if (v < SYM_FIRST_ANOM) {
3967 TokenSym *ts = table_ident[v - TOK_IDENT];
3968 if (check && (ts->tok & SYM_FIELD))
3969 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3970 ts->tok ^= SYM_FIELD;
3971 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3972 check_fields (&s->type, check);
3976 static void struct_layout(CType *type, AttributeDef *ad)
3978 int size, align, maxalign, offset, c, bit_pos, bit_size;
3979 int packed, a, bt, prevbt, prev_bit_size;
3980 int pcc = !tcc_state->ms_bitfields;
3981 int pragma_pack = *tcc_state->pack_stack_ptr;
3982 Sym *f;
3984 maxalign = 1;
3985 offset = 0;
3986 c = 0;
3987 bit_pos = 0;
3988 prevbt = VT_STRUCT; /* make it never match */
3989 prev_bit_size = 0;
3991 //#define BF_DEBUG
3993 for (f = type->ref->next; f; f = f->next) {
3994 if (f->type.t & VT_BITFIELD)
3995 bit_size = BIT_SIZE(f->type.t);
3996 else
3997 bit_size = -1;
3998 size = type_size(&f->type, &align);
3999 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4000 packed = 0;
4002 if (pcc && bit_size == 0) {
4003 /* in pcc mode, packing does not affect zero-width bitfields */
4005 } else {
4006 /* in pcc mode, attribute packed overrides if set. */
4007 if (pcc && (f->a.packed || ad->a.packed))
4008 align = packed = 1;
4010 /* pragma pack overrides align if lesser and packs bitfields always */
4011 if (pragma_pack) {
4012 packed = 1;
4013 if (pragma_pack < align)
4014 align = pragma_pack;
4015 /* in pcc mode pragma pack also overrides individual align */
4016 if (pcc && pragma_pack < a)
4017 a = 0;
4020 /* some individual align was specified */
4021 if (a)
4022 align = a;
4024 if (type->ref->type.t == VT_UNION) {
4025 if (pcc && bit_size >= 0)
4026 size = (bit_size + 7) >> 3;
4027 offset = 0;
4028 if (size > c)
4029 c = size;
4031 } else if (bit_size < 0) {
4032 if (pcc)
4033 c += (bit_pos + 7) >> 3;
4034 c = (c + align - 1) & -align;
4035 offset = c;
4036 if (size > 0)
4037 c += size;
4038 bit_pos = 0;
4039 prevbt = VT_STRUCT;
4040 prev_bit_size = 0;
4042 } else {
4043 /* A bit-field. Layout is more complicated. There are two
4044 options: PCC (GCC) compatible and MS compatible */
4045 if (pcc) {
4046 /* In PCC layout a bit-field is placed adjacent to the
4047 preceding bit-fields, except if:
4048 - it has zero-width
4049 - an individual alignment was given
4050 - it would overflow its base type container and
4051 there is no packing */
4052 if (bit_size == 0) {
4053 new_field:
4054 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4055 bit_pos = 0;
4056 } else if (f->a.aligned) {
4057 goto new_field;
4058 } else if (!packed) {
4059 int a8 = align * 8;
4060 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4061 if (ofs > size / align)
4062 goto new_field;
4065 /* in pcc mode, long long bitfields have type int if they fit */
4066 if (size == 8 && bit_size <= 32)
4067 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4069 while (bit_pos >= align * 8)
4070 c += align, bit_pos -= align * 8;
4071 offset = c;
4073 /* In PCC layout named bit-fields influence the alignment
4074 of the containing struct using the base types alignment,
4075 except for packed fields (which here have correct align). */
4076 if (f->v & SYM_FIRST_ANOM
4077 // && bit_size // ??? gcc on ARM/rpi does that
4079 align = 1;
4081 } else {
4082 bt = f->type.t & VT_BTYPE;
4083 if ((bit_pos + bit_size > size * 8)
4084 || (bit_size > 0) == (bt != prevbt)
4086 c = (c + align - 1) & -align;
4087 offset = c;
4088 bit_pos = 0;
4089 /* In MS bitfield mode a bit-field run always uses
4090 at least as many bits as the underlying type.
4091 To start a new run it's also required that this
4092 or the last bit-field had non-zero width. */
4093 if (bit_size || prev_bit_size)
4094 c += size;
4096 /* In MS layout the records alignment is normally
4097 influenced by the field, except for a zero-width
4098 field at the start of a run (but by further zero-width
4099 fields it is again). */
4100 if (bit_size == 0 && prevbt != bt)
4101 align = 1;
4102 prevbt = bt;
4103 prev_bit_size = bit_size;
4106 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4107 | (bit_pos << VT_STRUCT_SHIFT);
4108 bit_pos += bit_size;
4110 if (align > maxalign)
4111 maxalign = align;
4113 #ifdef BF_DEBUG
4114 printf("set field %s offset %-2d size %-2d align %-2d",
4115 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4116 if (f->type.t & VT_BITFIELD) {
4117 printf(" pos %-2d bits %-2d",
4118 BIT_POS(f->type.t),
4119 BIT_SIZE(f->type.t)
4122 printf("\n");
4123 #endif
4125 f->c = offset;
4126 f->r = 0;
4129 if (pcc)
4130 c += (bit_pos + 7) >> 3;
4132 /* store size and alignment */
4133 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4134 if (a < maxalign)
4135 a = maxalign;
4136 type->ref->r = a;
4137 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4138 /* can happen if individual align for some member was given. In
4139 this case MSVC ignores maxalign when aligning the size */
4140 a = pragma_pack;
4141 if (a < bt)
4142 a = bt;
4144 c = (c + a - 1) & -a;
4145 type->ref->c = c;
4147 #ifdef BF_DEBUG
4148 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4149 #endif
4151 /* check whether we can access bitfields by their type */
4152 for (f = type->ref->next; f; f = f->next) {
4153 int s, px, cx, c0;
4154 CType t;
4156 if (0 == (f->type.t & VT_BITFIELD))
4157 continue;
4158 f->type.ref = f;
4159 f->auxtype = -1;
4160 bit_size = BIT_SIZE(f->type.t);
4161 if (bit_size == 0)
4162 continue;
4163 bit_pos = BIT_POS(f->type.t);
4164 size = type_size(&f->type, &align);
4166 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4167 #ifdef TCC_TARGET_ARM
4168 && !(f->c & (align - 1))
4169 #endif
4171 continue;
4173 /* try to access the field using a different type */
4174 c0 = -1, s = align = 1;
4175 t.t = VT_BYTE;
4176 for (;;) {
4177 px = f->c * 8 + bit_pos;
4178 cx = (px >> 3) & -align;
4179 px = px - (cx << 3);
4180 if (c0 == cx)
4181 break;
4182 s = (px + bit_size + 7) >> 3;
4183 if (s > 4) {
4184 t.t = VT_LLONG;
4185 } else if (s > 2) {
4186 t.t = VT_INT;
4187 } else if (s > 1) {
4188 t.t = VT_SHORT;
4189 } else {
4190 t.t = VT_BYTE;
4192 s = type_size(&t, &align);
4193 c0 = cx;
4196 if (px + bit_size <= s * 8 && cx + s <= c
4197 #ifdef TCC_TARGET_ARM
4198 && !(cx & (align - 1))
4199 #endif
4201 /* update offset and bit position */
4202 f->c = cx;
4203 bit_pos = px;
4204 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4205 | (bit_pos << VT_STRUCT_SHIFT);
4206 if (s != size)
4207 f->auxtype = t.t;
4208 #ifdef BF_DEBUG
4209 printf("FIX field %s offset %-2d size %-2d align %-2d "
4210 "pos %-2d bits %-2d\n",
4211 get_tok_str(f->v & ~SYM_FIELD, NULL),
4212 cx, s, align, px, bit_size);
4213 #endif
4214 } else {
4215 /* fall back to load/store single-byte wise */
4216 f->auxtype = VT_STRUCT;
4217 #ifdef BF_DEBUG
4218 printf("FIX field %s : load byte-wise\n",
4219 get_tok_str(f->v & ~SYM_FIELD, NULL));
4220 #endif
4225 static void do_Static_assert(void);
4227 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4228 static void struct_decl(CType *type, int u)
4230 int v, c, size, align, flexible;
4231 int bit_size, bsize, bt;
4232 Sym *s, *ss, **ps;
4233 AttributeDef ad, ad1;
4234 CType type1, btype;
4236 memset(&ad, 0, sizeof ad);
4237 next();
4238 parse_attribute(&ad);
4239 if (tok != '{') {
4240 v = tok;
4241 next();
4242 /* struct already defined ? return it */
4243 if (v < TOK_IDENT)
4244 expect("struct/union/enum name");
4245 s = struct_find(v);
4246 if (s && (s->sym_scope == local_scope || tok != '{')) {
4247 if (u == s->type.t)
4248 goto do_decl;
4249 if (u == VT_ENUM && IS_ENUM(s->type.t))
4250 goto do_decl;
4251 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4253 } else {
4254 v = anon_sym++;
4256 /* Record the original enum/struct/union token. */
4257 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4258 type1.ref = NULL;
4259 /* we put an undefined size for struct/union */
4260 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4261 s->r = 0; /* default alignment is zero as gcc */
4262 do_decl:
4263 type->t = s->type.t;
4264 type->ref = s;
4266 if (tok == '{') {
4267 next();
4268 if (s->c != -1)
4269 tcc_error("struct/union/enum already defined");
4270 s->c = -2;
4271 /* cannot be empty */
4272 /* non empty enums are not allowed */
4273 ps = &s->next;
4274 if (u == VT_ENUM) {
4275 long long ll = 0, pl = 0, nl = 0;
4276 CType t;
4277 t.ref = s;
4278 /* enum symbols have static storage */
4279 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4280 for(;;) {
4281 v = tok;
4282 if (v < TOK_UIDENT)
4283 expect("identifier");
4284 ss = sym_find(v);
4285 if (ss && !local_stack)
4286 tcc_error("redefinition of enumerator '%s'",
4287 get_tok_str(v, NULL));
4288 next();
4289 if (tok == '=') {
4290 next();
4291 ll = expr_const64();
4293 ss = sym_push(v, &t, VT_CONST, 0);
4294 ss->enum_val = ll;
4295 *ps = ss, ps = &ss->next;
4296 if (ll < nl)
4297 nl = ll;
4298 if (ll > pl)
4299 pl = ll;
4300 if (tok != ',')
4301 break;
4302 next();
4303 ll++;
4304 /* NOTE: we accept a trailing comma */
4305 if (tok == '}')
4306 break;
4308 skip('}');
4309 /* set integral type of the enum */
4310 t.t = VT_INT;
4311 if (nl >= 0) {
4312 if (pl != (unsigned)pl)
4313 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4314 t.t |= VT_UNSIGNED;
4315 } else if (pl != (int)pl || nl != (int)nl)
4316 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4317 s->type.t = type->t = t.t | VT_ENUM;
4318 s->c = 0;
4319 /* set type for enum members */
4320 for (ss = s->next; ss; ss = ss->next) {
4321 ll = ss->enum_val;
4322 if (ll == (int)ll) /* default is int if it fits */
4323 continue;
4324 if (t.t & VT_UNSIGNED) {
4325 ss->type.t |= VT_UNSIGNED;
4326 if (ll == (unsigned)ll)
4327 continue;
4329 ss->type.t = (ss->type.t & ~VT_BTYPE)
4330 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4332 } else {
4333 c = 0;
4334 flexible = 0;
4335 while (tok != '}') {
4336 if (tok == TOK_STATIC_ASSERT) {
4337 do_Static_assert();
4338 continue;
4340 if (!parse_btype(&btype, &ad1, 0)) {
4341 skip(';');
4342 continue;
4344 while (1) {
4345 if (flexible)
4346 tcc_error("flexible array member '%s' not at the end of struct",
4347 get_tok_str(v, NULL));
4348 bit_size = -1;
4349 v = 0;
4350 type1 = btype;
4351 if (tok != ':') {
4352 if (tok != ';')
4353 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4354 if (v == 0) {
4355 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4356 expect("identifier");
4357 else {
4358 int v = btype.ref->v;
4359 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4360 if (tcc_state->ms_extensions == 0)
4361 expect("identifier");
4365 if (type_size(&type1, &align) < 0) {
4366 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4367 flexible = 1;
4368 else
4369 tcc_error("field '%s' has incomplete type",
4370 get_tok_str(v, NULL));
4372 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4373 (type1.t & VT_BTYPE) == VT_VOID ||
4374 (type1.t & VT_STORAGE))
4375 tcc_error("invalid type for '%s'",
4376 get_tok_str(v, NULL));
4378 if (tok == ':') {
4379 next();
4380 bit_size = expr_const();
4381 /* XXX: handle v = 0 case for messages */
4382 if (bit_size < 0)
4383 tcc_error("negative width in bit-field '%s'",
4384 get_tok_str(v, NULL));
4385 if (v && bit_size == 0)
4386 tcc_error("zero width for bit-field '%s'",
4387 get_tok_str(v, NULL));
4388 parse_attribute(&ad1);
4390 size = type_size(&type1, &align);
4391 if (bit_size >= 0) {
4392 bt = type1.t & VT_BTYPE;
4393 if (bt != VT_INT &&
4394 bt != VT_BYTE &&
4395 bt != VT_SHORT &&
4396 bt != VT_BOOL &&
4397 bt != VT_LLONG)
4398 tcc_error("bitfields must have scalar type");
4399 bsize = size * 8;
4400 if (bit_size > bsize) {
4401 tcc_error("width of '%s' exceeds its type",
4402 get_tok_str(v, NULL));
4403 } else if (bit_size == bsize
4404 && !ad.a.packed && !ad1.a.packed) {
4405 /* no need for bit fields */
4407 } else if (bit_size == 64) {
4408 tcc_error("field width 64 not implemented");
4409 } else {
4410 type1.t = (type1.t & ~VT_STRUCT_MASK)
4411 | VT_BITFIELD
4412 | (bit_size << (VT_STRUCT_SHIFT + 6));
4415 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4416 /* Remember we've seen a real field to check
4417 for placement of flexible array member. */
4418 c = 1;
4420 /* If member is a struct or bit-field, enforce
4421 placing into the struct (as anonymous). */
4422 if (v == 0 &&
4423 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4424 bit_size >= 0)) {
4425 v = anon_sym++;
4427 if (v) {
4428 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4429 ss->a = ad1.a;
4430 *ps = ss;
4431 ps = &ss->next;
4433 if (tok == ';' || tok == TOK_EOF)
4434 break;
4435 skip(',');
4437 skip(';');
4439 skip('}');
4440 parse_attribute(&ad);
4441 if (ad.cleanup_func) {
4442 tcc_warning("attribute '__cleanup__' ignored on type");
4444 check_fields(type, 1);
4445 check_fields(type, 0);
4446 struct_layout(type, &ad);
4447 if (debug_modes)
4448 tcc_debug_fix_anon(tcc_state, type);
4453 static void sym_to_attr(AttributeDef *ad, Sym *s)
4455 merge_symattr(&ad->a, &s->a);
4456 merge_funcattr(&ad->f, &s->f);
4459 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4460 are added to the element type, copied because it could be a typedef. */
4461 static void parse_btype_qualify(CType *type, int qualifiers)
4463 while (type->t & VT_ARRAY) {
4464 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4465 type = &type->ref->type;
4467 type->t |= qualifiers;
4470 /* return 0 if no type declaration. otherwise, return the basic type
4471 and skip it.
4473 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4475 int t, u, bt, st, type_found, typespec_found, g, n;
4476 Sym *s;
4477 CType type1;
4479 memset(ad, 0, sizeof(AttributeDef));
4480 type_found = 0;
4481 typespec_found = 0;
4482 t = VT_INT;
4483 bt = st = -1;
4484 type->ref = NULL;
4486 while(1) {
4487 switch(tok) {
4488 case TOK_EXTENSION:
4489 /* currently, we really ignore extension */
4490 next();
4491 continue;
4493 /* basic types */
4494 case TOK_CHAR:
4495 u = VT_BYTE;
4496 basic_type:
4497 next();
4498 basic_type1:
4499 if (u == VT_SHORT || u == VT_LONG) {
4500 if (st != -1 || (bt != -1 && bt != VT_INT))
4501 tmbt: tcc_error("too many basic types");
4502 st = u;
4503 } else {
4504 if (bt != -1 || (st != -1 && u != VT_INT))
4505 goto tmbt;
4506 bt = u;
4508 if (u != VT_INT)
4509 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4510 typespec_found = 1;
4511 break;
4512 case TOK_VOID:
4513 u = VT_VOID;
4514 goto basic_type;
4515 case TOK_SHORT:
4516 u = VT_SHORT;
4517 goto basic_type;
4518 case TOK_INT:
4519 u = VT_INT;
4520 goto basic_type;
4521 case TOK_ALIGNAS:
4522 { int n;
4523 AttributeDef ad1;
4524 next();
4525 skip('(');
4526 memset(&ad1, 0, sizeof(AttributeDef));
4527 if (parse_btype(&type1, &ad1, 0)) {
4528 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4529 if (ad1.a.aligned)
4530 n = 1 << (ad1.a.aligned - 1);
4531 else
4532 type_size(&type1, &n);
4533 } else {
4534 n = expr_const();
4535 if (n < 0 || (n & (n - 1)) != 0)
4536 tcc_error("alignment must be a positive power of two");
4538 skip(')');
4539 ad->a.aligned = exact_log2p1(n);
4541 continue;
4542 case TOK_LONG:
4543 if ((t & VT_BTYPE) == VT_DOUBLE) {
4544 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4545 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4546 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4547 } else {
4548 u = VT_LONG;
4549 goto basic_type;
4551 next();
4552 break;
4553 #ifdef TCC_TARGET_ARM64
4554 case TOK_UINT128:
4555 /* GCC's __uint128_t appears in some Linux header files. Make it a
4556 synonym for long double to get the size and alignment right. */
4557 u = VT_LDOUBLE;
4558 goto basic_type;
4559 #endif
4560 case TOK_BOOL:
4561 u = VT_BOOL;
4562 goto basic_type;
4563 case TOK_COMPLEX:
4564 tcc_error("_Complex is not yet supported");
4565 case TOK_FLOAT:
4566 u = VT_FLOAT;
4567 goto basic_type;
4568 case TOK_DOUBLE:
4569 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4570 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4571 } else {
4572 u = VT_DOUBLE;
4573 goto basic_type;
4575 next();
4576 break;
4577 case TOK_ENUM:
4578 struct_decl(&type1, VT_ENUM);
4579 basic_type2:
4580 u = type1.t;
4581 type->ref = type1.ref;
4582 goto basic_type1;
4583 case TOK_STRUCT:
4584 struct_decl(&type1, VT_STRUCT);
4585 goto basic_type2;
4586 case TOK_UNION:
4587 struct_decl(&type1, VT_UNION);
4588 goto basic_type2;
4590 /* type modifiers */
4591 case TOK__Atomic:
4592 next();
4593 type->t = t;
4594 parse_btype_qualify(type, VT_ATOMIC);
4595 t = type->t;
4596 if (tok == '(') {
4597 parse_expr_type(&type1);
4598 /* remove all storage modifiers except typedef */
4599 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4600 if (type1.ref)
4601 sym_to_attr(ad, type1.ref);
4602 goto basic_type2;
4604 break;
4605 case TOK_CONST1:
4606 case TOK_CONST2:
4607 case TOK_CONST3:
4608 type->t = t;
4609 parse_btype_qualify(type, VT_CONSTANT);
4610 t = type->t;
4611 next();
4612 break;
4613 case TOK_VOLATILE1:
4614 case TOK_VOLATILE2:
4615 case TOK_VOLATILE3:
4616 type->t = t;
4617 parse_btype_qualify(type, VT_VOLATILE);
4618 t = type->t;
4619 next();
4620 break;
4621 case TOK_SIGNED1:
4622 case TOK_SIGNED2:
4623 case TOK_SIGNED3:
4624 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4625 tcc_error("signed and unsigned modifier");
4626 t |= VT_DEFSIGN;
4627 next();
4628 typespec_found = 1;
4629 break;
4630 case TOK_REGISTER:
4631 case TOK_AUTO:
4632 case TOK_RESTRICT1:
4633 case TOK_RESTRICT2:
4634 case TOK_RESTRICT3:
4635 next();
4636 break;
4637 case TOK_UNSIGNED:
4638 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4639 tcc_error("signed and unsigned modifier");
4640 t |= VT_DEFSIGN | VT_UNSIGNED;
4641 next();
4642 typespec_found = 1;
4643 break;
4645 /* storage */
4646 case TOK_EXTERN:
4647 g = VT_EXTERN;
4648 goto storage;
4649 case TOK_STATIC:
4650 g = VT_STATIC;
4651 goto storage;
4652 case TOK_TYPEDEF:
4653 g = VT_TYPEDEF;
4654 goto storage;
4655 storage:
4656 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4657 tcc_error("multiple storage classes");
4658 t |= g;
4659 next();
4660 break;
4661 case TOK_INLINE1:
4662 case TOK_INLINE2:
4663 case TOK_INLINE3:
4664 t |= VT_INLINE;
4665 next();
4666 break;
4667 case TOK_NORETURN3:
4668 next();
4669 ad->f.func_noreturn = 1;
4670 break;
4671 /* GNUC attribute */
4672 case TOK_ATTRIBUTE1:
4673 case TOK_ATTRIBUTE2:
4674 parse_attribute(ad);
4675 if (ad->attr_mode) {
4676 u = ad->attr_mode -1;
4677 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4679 continue;
4680 /* GNUC typeof */
4681 case TOK_TYPEOF1:
4682 case TOK_TYPEOF2:
4683 case TOK_TYPEOF3:
4684 next();
4685 parse_expr_type(&type1);
4686 /* remove all storage modifiers except typedef */
4687 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4688 if (type1.ref)
4689 sym_to_attr(ad, type1.ref);
4690 goto basic_type2;
4691 case TOK_THREAD_LOCAL:
4692 tcc_error("_Thread_local is not implemented");
4693 default:
4694 if (typespec_found)
4695 goto the_end;
4696 s = sym_find(tok);
4697 if (!s || !(s->type.t & VT_TYPEDEF))
4698 goto the_end;
4700 n = tok, next();
4701 if (tok == ':' && ignore_label) {
4702 /* ignore if it's a label */
4703 unget_tok(n);
4704 goto the_end;
4707 t &= ~(VT_BTYPE|VT_LONG);
4708 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4709 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4710 type->ref = s->type.ref;
4711 if (t)
4712 parse_btype_qualify(type, t);
4713 t = type->t;
4714 /* get attributes from typedef */
4715 sym_to_attr(ad, s);
4716 typespec_found = 1;
4717 st = bt = -2;
4718 break;
4720 type_found = 1;
4722 the_end:
4723 if (tcc_state->char_is_unsigned) {
4724 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4725 t |= VT_UNSIGNED;
4727 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4728 bt = t & (VT_BTYPE|VT_LONG);
4729 if (bt == VT_LONG)
4730 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4731 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4732 if (bt == VT_LDOUBLE)
4733 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4734 #endif
4735 type->t = t;
4736 return type_found;
4739 /* convert a function parameter type (array to pointer and function to
4740 function pointer) */
4741 static inline void convert_parameter_type(CType *pt)
4743 /* remove const and volatile qualifiers (XXX: const could be used
4744 to indicate a const function parameter */
4745 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4746 /* array must be transformed to pointer according to ANSI C */
4747 pt->t &= ~VT_ARRAY;
4748 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4749 mk_pointer(pt);
4753 ST_FUNC void parse_asm_str(CString *astr)
4755 skip('(');
4756 parse_mult_str(astr, "string constant");
4759 /* Parse an asm label and return the token */
4760 static int asm_label_instr(void)
4762 int v;
4763 CString astr;
4765 next();
4766 parse_asm_str(&astr);
4767 skip(')');
4768 #ifdef ASM_DEBUG
4769 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4770 #endif
4771 v = tok_alloc(astr.data, astr.size - 1)->tok;
4772 cstr_free(&astr);
4773 return v;
4776 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4778 int n, l, t1, arg_size, align;
4779 Sym **plast, *s, *first;
4780 AttributeDef ad1;
4781 CType pt;
4782 TokenString *vla_array_tok = NULL;
4783 int *vla_array_str = NULL;
4785 if (tok == '(') {
4786 /* function type, or recursive declarator (return if so) */
4787 next();
4788 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4789 return 0;
4790 if (tok == ')')
4791 l = 0;
4792 else if (parse_btype(&pt, &ad1, 0))
4793 l = FUNC_NEW;
4794 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4795 merge_attr (ad, &ad1);
4796 return 0;
4797 } else
4798 l = FUNC_OLD;
4800 first = NULL;
4801 plast = &first;
4802 arg_size = 0;
4803 ++local_scope;
4804 if (l) {
4805 for(;;) {
4806 /* read param name and compute offset */
4807 if (l != FUNC_OLD) {
4808 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4809 break;
4810 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4811 if ((pt.t & VT_BTYPE) == VT_VOID)
4812 tcc_error("parameter declared as void");
4813 if (n == 0)
4814 n = SYM_FIELD;
4815 } else {
4816 n = tok;
4817 pt.t = VT_VOID; /* invalid type */
4818 pt.ref = NULL;
4819 next();
4821 if (n < TOK_UIDENT)
4822 expect("identifier");
4823 convert_parameter_type(&pt);
4824 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4825 /* these symbols may be evaluated for VLArrays (see below, under
4826 nocode_wanted) which is why we push them here as normal symbols
4827 temporarily. Example: int func(int a, int b[++a]); */
4828 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4829 *plast = s;
4830 plast = &s->next;
4831 if (tok == ')')
4832 break;
4833 skip(',');
4834 if (l == FUNC_NEW && tok == TOK_DOTS) {
4835 l = FUNC_ELLIPSIS;
4836 next();
4837 break;
4839 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4840 tcc_error("invalid type");
4842 } else
4843 /* if no parameters, then old type prototype */
4844 l = FUNC_OLD;
4845 skip(')');
4846 /* remove parameter symbols from token table, keep on stack */
4847 if (first) {
4848 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4849 for (s = first; s; s = s->next)
4850 s->v |= SYM_FIELD;
4852 --local_scope;
4853 /* NOTE: const is ignored in returned type as it has a special
4854 meaning in gcc / C++ */
4855 type->t &= ~VT_CONSTANT;
4856 /* some ancient pre-K&R C allows a function to return an array
4857 and the array brackets to be put after the arguments, such
4858 that "int c()[]" means something like "int[] c()" */
4859 if (tok == '[') {
4860 next();
4861 skip(']'); /* only handle simple "[]" */
4862 mk_pointer(type);
4864 /* we push a anonymous symbol which will contain the function prototype */
4865 ad->f.func_args = arg_size;
4866 ad->f.func_type = l;
4867 s = sym_push(SYM_FIELD, type, 0, 0);
4868 s->a = ad->a;
4869 s->f = ad->f;
4870 s->next = first;
4871 type->t = VT_FUNC;
4872 type->ref = s;
4873 } else if (tok == '[') {
4874 int saved_nocode_wanted = nocode_wanted;
4875 /* array definition */
4876 next();
4877 n = -1;
4878 t1 = 0;
4879 if (td & TYPE_PARAM) while (1) {
4880 /* XXX The optional type-quals and static should only be accepted
4881 in parameter decls. The '*' as well, and then even only
4882 in prototypes (not function defs). */
4883 switch (tok) {
4884 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4885 case TOK_CONST1:
4886 case TOK_VOLATILE1:
4887 case TOK_STATIC:
4888 case '*':
4889 next();
4890 continue;
4891 default:
4892 break;
4894 if (tok != ']') {
4895 /* Code generation is not done now but has to be done
4896 at start of function. Save code here for later use. */
4897 nocode_wanted = 1;
4898 skip_or_save_block(&vla_array_tok);
4899 unget_tok(0);
4900 vla_array_str = vla_array_tok->str;
4901 begin_macro(vla_array_tok, 2);
4902 next();
4903 gexpr();
4904 end_macro();
4905 next();
4906 goto check;
4908 break;
4910 } else if (tok != ']') {
4911 if (!local_stack || (storage & VT_STATIC))
4912 vpushi(expr_const());
4913 else {
4914 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4915 length must always be evaluated, even under nocode_wanted,
4916 so that its size slot is initialized (e.g. under sizeof
4917 or typeof). */
4918 nocode_wanted = 0;
4919 gexpr();
4921 check:
4922 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4923 n = vtop->c.i;
4924 if (n < 0)
4925 tcc_error("invalid array size");
4926 } else {
4927 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4928 tcc_error("size of variable length array should be an integer");
4929 n = 0;
4930 t1 = VT_VLA;
4933 skip(']');
4934 /* parse next post type */
4935 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4937 if ((type->t & VT_BTYPE) == VT_FUNC)
4938 tcc_error("declaration of an array of functions");
4939 if ((type->t & VT_BTYPE) == VT_VOID
4940 || type_size(type, &align) < 0)
4941 tcc_error("declaration of an array of incomplete type elements");
4943 t1 |= type->t & VT_VLA;
4945 if (t1 & VT_VLA) {
4946 if (n < 0) {
4947 if (td & TYPE_NEST)
4948 tcc_error("need explicit inner array size in VLAs");
4950 else {
4951 loc -= type_size(&int_type, &align);
4952 loc &= -align;
4953 n = loc;
4955 vpush_type_size(type, &align);
4956 gen_op('*');
4957 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4958 vswap();
4959 vstore();
4962 if (n != -1)
4963 vpop();
4964 nocode_wanted = saved_nocode_wanted;
4966 /* we push an anonymous symbol which will contain the array
4967 element type */
4968 s = sym_push(SYM_FIELD, type, 0, n);
4969 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4970 type->ref = s;
4972 if (vla_array_str) {
4973 if (t1 & VT_VLA)
4974 s->vla_array_str = vla_array_str;
4975 else
4976 tok_str_free_str(vla_array_str);
4979 return 1;
4982 /* Parse a type declarator (except basic type), and return the type
4983 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4984 expected. 'type' should contain the basic type. 'ad' is the
4985 attribute definition of the basic type. It can be modified by
4986 type_decl(). If this (possibly abstract) declarator is a pointer chain
4987 it returns the innermost pointed to type (equals *type, but is a different
4988 pointer), otherwise returns type itself, that's used for recursive calls. */
4989 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4991 CType *post, *ret;
4992 int qualifiers, storage;
4994 /* recursive type, remove storage bits first, apply them later again */
4995 storage = type->t & VT_STORAGE;
4996 type->t &= ~VT_STORAGE;
4997 post = ret = type;
4999 while (tok == '*') {
5000 qualifiers = 0;
5001 redo:
5002 next();
5003 switch(tok) {
5004 case TOK__Atomic:
5005 qualifiers |= VT_ATOMIC;
5006 goto redo;
5007 case TOK_CONST1:
5008 case TOK_CONST2:
5009 case TOK_CONST3:
5010 qualifiers |= VT_CONSTANT;
5011 goto redo;
5012 case TOK_VOLATILE1:
5013 case TOK_VOLATILE2:
5014 case TOK_VOLATILE3:
5015 qualifiers |= VT_VOLATILE;
5016 goto redo;
5017 case TOK_RESTRICT1:
5018 case TOK_RESTRICT2:
5019 case TOK_RESTRICT3:
5020 goto redo;
5021 /* XXX: clarify attribute handling */
5022 case TOK_ATTRIBUTE1:
5023 case TOK_ATTRIBUTE2:
5024 parse_attribute(ad);
5025 break;
5027 mk_pointer(type);
5028 type->t |= qualifiers;
5029 if (ret == type)
5030 /* innermost pointed to type is the one for the first derivation */
5031 ret = pointed_type(type);
5034 if (tok == '(') {
5035 /* This is possibly a parameter type list for abstract declarators
5036 ('int ()'), use post_type for testing this. */
5037 if (!post_type(type, ad, 0, td)) {
5038 /* It's not, so it's a nested declarator, and the post operations
5039 apply to the innermost pointed to type (if any). */
5040 /* XXX: this is not correct to modify 'ad' at this point, but
5041 the syntax is not clear */
5042 parse_attribute(ad);
5043 post = type_decl(type, ad, v, td);
5044 skip(')');
5045 } else
5046 goto abstract;
5047 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5048 /* type identifier */
5049 *v = tok;
5050 next();
5051 } else {
5052 abstract:
5053 if (!(td & TYPE_ABSTRACT))
5054 expect("identifier");
5055 *v = 0;
5057 post_type(post, ad, post != ret ? 0 : storage,
5058 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5059 parse_attribute(ad);
5060 type->t |= storage;
5061 return ret;
5064 /* indirection with full error checking and bound check */
5065 ST_FUNC void indir(void)
5067 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5068 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5069 return;
5070 expect("pointer");
5072 if (vtop->r & VT_LVAL)
5073 gv(RC_INT);
5074 vtop->type = *pointed_type(&vtop->type);
5075 /* Arrays and functions are never lvalues */
5076 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5077 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5078 vtop->r |= VT_LVAL;
5079 /* if bound checking, the referenced pointer must be checked */
5080 #ifdef CONFIG_TCC_BCHECK
5081 if (tcc_state->do_bounds_check)
5082 vtop->r |= VT_MUSTBOUND;
5083 #endif
5087 /* pass a parameter to a function and do type checking and casting */
5088 static void gfunc_param_typed(Sym *func, Sym *arg)
5090 int func_type;
5091 CType type;
5093 func_type = func->f.func_type;
5094 if (func_type == FUNC_OLD ||
5095 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5096 /* default casting : only need to convert float to double */
5097 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5098 gen_cast_s(VT_DOUBLE);
5099 } else if (vtop->type.t & VT_BITFIELD) {
5100 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5101 type.ref = vtop->type.ref;
5102 gen_cast(&type);
5103 } else if (vtop->r & VT_MUSTCAST) {
5104 force_charshort_cast();
5106 } else if (arg == NULL) {
5107 tcc_error("too many arguments to function");
5108 } else {
5109 type = arg->type;
5110 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5111 gen_assign_cast(&type);
5115 /* parse an expression and return its type without any side effect. */
5116 static void expr_type(CType *type, void (*expr_fn)(void))
5118 nocode_wanted++;
5119 expr_fn();
5120 *type = vtop->type;
5121 vpop();
5122 nocode_wanted--;
5125 /* parse an expression of the form '(type)' or '(expr)' and return its
5126 type */
5127 static void parse_expr_type(CType *type)
5129 int n;
5130 AttributeDef ad;
5132 skip('(');
5133 if (parse_btype(type, &ad, 0)) {
5134 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5135 } else {
5136 expr_type(type, gexpr);
5138 skip(')');
5141 static void parse_type(CType *type)
5143 AttributeDef ad;
5144 int n;
5146 if (!parse_btype(type, &ad, 0)) {
5147 expect("type");
5149 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5152 static void parse_builtin_params(int nc, const char *args)
5154 char c, sep = '(';
5155 CType type;
5156 if (nc)
5157 nocode_wanted++;
5158 next();
5159 if (*args == 0)
5160 skip(sep);
5161 while ((c = *args++)) {
5162 skip(sep);
5163 sep = ',';
5164 if (c == 't') {
5165 parse_type(&type);
5166 vpush(&type);
5167 continue;
5169 expr_eq();
5170 type.ref = NULL;
5171 type.t = 0;
5172 switch (c) {
5173 case 'e':
5174 continue;
5175 case 'V':
5176 type.t = VT_CONSTANT;
5177 case 'v':
5178 type.t |= VT_VOID;
5179 mk_pointer (&type);
5180 break;
5181 case 'S':
5182 type.t = VT_CONSTANT;
5183 case 's':
5184 type.t |= char_type.t;
5185 mk_pointer (&type);
5186 break;
5187 case 'i':
5188 type.t = VT_INT;
5189 break;
5190 case 'l':
5191 type.t = VT_SIZE_T;
5192 break;
5193 default:
5194 break;
5196 gen_assign_cast(&type);
5198 skip(')');
5199 if (nc)
5200 nocode_wanted--;
5203 static void parse_atomic(int atok)
5205 int size, align, arg, t, save = 0;
5206 CType *atom, *atom_ptr, ct = {0};
5207 SValue store;
5208 char buf[40];
5209 static const char *const templates[] = {
5211 * Each entry consists of callback and function template.
5212 * The template represents argument types and return type.
5214 * ? void (return-only)
5215 * b bool
5216 * a atomic
5217 * A read-only atomic
5218 * p pointer to memory
5219 * v value
5220 * l load pointer
5221 * s save pointer
5222 * m memory model
5225 /* keep in order of appearance in tcctok.h: */
5226 /* __atomic_store */ "alm.?",
5227 /* __atomic_load */ "Asm.v",
5228 /* __atomic_exchange */ "alsm.v",
5229 /* __atomic_compare_exchange */ "aplbmm.b",
5230 /* __atomic_fetch_add */ "avm.v",
5231 /* __atomic_fetch_sub */ "avm.v",
5232 /* __atomic_fetch_or */ "avm.v",
5233 /* __atomic_fetch_xor */ "avm.v",
5234 /* __atomic_fetch_and */ "avm.v",
5235 /* __atomic_fetch_nand */ "avm.v",
5236 /* __atomic_and_fetch */ "avm.v",
5237 /* __atomic_sub_fetch */ "avm.v",
5238 /* __atomic_or_fetch */ "avm.v",
5239 /* __atomic_xor_fetch */ "avm.v",
5240 /* __atomic_and_fetch */ "avm.v",
5241 /* __atomic_nand_fetch */ "avm.v"
5243 const char *template = templates[(atok - TOK___atomic_store)];
5245 atom = atom_ptr = NULL;
5246 size = 0; /* pacify compiler */
5247 next();
5248 skip('(');
5249 for (arg = 0;;) {
5250 expr_eq();
5251 switch (template[arg]) {
5252 case 'a':
5253 case 'A':
5254 atom_ptr = &vtop->type;
5255 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5256 expect("pointer");
5257 atom = pointed_type(atom_ptr);
5258 size = type_size(atom, &align);
5259 if (size > 8
5260 || (size & (size - 1))
5261 || (atok > TOK___atomic_compare_exchange
5262 && (0 == btype_size(atom->t & VT_BTYPE)
5263 || (atom->t & VT_BTYPE) == VT_PTR)))
5264 expect("integral or integer-sized pointer target type");
5265 /* GCC does not care either: */
5266 /* if (!(atom->t & VT_ATOMIC))
5267 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5268 break;
5270 case 'p':
5271 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5272 || type_size(pointed_type(&vtop->type), &align) != size)
5273 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5274 gen_assign_cast(atom_ptr);
5275 break;
5276 case 'v':
5277 gen_assign_cast(atom);
5278 break;
5279 case 'l':
5280 indir();
5281 gen_assign_cast(atom);
5282 break;
5283 case 's':
5284 save = 1;
5285 indir();
5286 store = *vtop;
5287 vpop();
5288 break;
5289 case 'm':
5290 gen_assign_cast(&int_type);
5291 break;
5292 case 'b':
5293 ct.t = VT_BOOL;
5294 gen_assign_cast(&ct);
5295 break;
5297 if ('.' == template[++arg])
5298 break;
5299 skip(',');
5301 skip(')');
5303 ct.t = VT_VOID;
5304 switch (template[arg + 1]) {
5305 case 'b':
5306 ct.t = VT_BOOL;
5307 break;
5308 case 'v':
5309 ct = *atom;
5310 break;
5313 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5314 vpush_helper_func(tok_alloc_const(buf));
5315 vrott(arg - save + 1);
5316 gfunc_call(arg - save);
5318 vpush(&ct);
5319 PUT_R_RET(vtop, ct.t);
5320 t = ct.t & VT_BTYPE;
5321 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5322 #ifdef PROMOTE_RET
5323 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5324 #else
5325 vtop->type.t = VT_INT;
5326 #endif
5328 gen_cast(&ct);
5329 if (save) {
5330 vpush(&ct);
5331 *vtop = store;
5332 vswap();
5333 vstore();
5337 ST_FUNC void unary(void)
5339 int n, t, align, size, r, sizeof_caller;
5340 CType type;
5341 Sym *s;
5342 AttributeDef ad;
5344 /* generate line number info */
5345 if (debug_modes)
5346 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5348 sizeof_caller = in_sizeof;
5349 in_sizeof = 0;
5350 type.ref = NULL;
5351 /* XXX: GCC 2.95.3 does not generate a table although it should be
5352 better here */
5353 tok_next:
5354 switch(tok) {
5355 case TOK_EXTENSION:
5356 next();
5357 goto tok_next;
5358 case TOK_LCHAR:
5359 #ifdef TCC_TARGET_PE
5360 t = VT_SHORT|VT_UNSIGNED;
5361 goto push_tokc;
5362 #endif
5363 case TOK_CINT:
5364 case TOK_CCHAR:
5365 t = VT_INT;
5366 push_tokc:
5367 type.t = t;
5368 vsetc(&type, VT_CONST, &tokc);
5369 next();
5370 break;
5371 case TOK_CUINT:
5372 t = VT_INT | VT_UNSIGNED;
5373 goto push_tokc;
5374 case TOK_CLLONG:
5375 t = VT_LLONG;
5376 goto push_tokc;
5377 case TOK_CULLONG:
5378 t = VT_LLONG | VT_UNSIGNED;
5379 goto push_tokc;
5380 case TOK_CFLOAT:
5381 t = VT_FLOAT;
5382 goto push_tokc;
5383 case TOK_CDOUBLE:
5384 t = VT_DOUBLE;
5385 goto push_tokc;
5386 case TOK_CLDOUBLE:
5387 t = VT_LDOUBLE;
5388 goto push_tokc;
5389 case TOK_CLONG:
5390 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5391 goto push_tokc;
5392 case TOK_CULONG:
5393 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5394 goto push_tokc;
5395 case TOK___FUNCTION__:
5396 if (!gnu_ext)
5397 goto tok_identifier;
5398 /* fall thru */
5399 case TOK___FUNC__:
5401 Section *sec;
5402 int len;
5403 /* special function name identifier */
5404 len = strlen(funcname) + 1;
5405 /* generate char[len] type */
5406 type.t = char_type.t;
5407 if (tcc_state->warn_write_strings & WARN_ON)
5408 type.t |= VT_CONSTANT;
5409 mk_pointer(&type);
5410 type.t |= VT_ARRAY;
5411 type.ref->c = len;
5412 sec = rodata_section;
5413 vpush_ref(&type, sec, sec->data_offset, len);
5414 if (!NODATA_WANTED)
5415 memcpy(section_ptr_add(sec, len), funcname, len);
5416 next();
5418 break;
5419 case TOK_LSTR:
5420 #ifdef TCC_TARGET_PE
5421 t = VT_SHORT | VT_UNSIGNED;
5422 #else
5423 t = VT_INT;
5424 #endif
5425 goto str_init;
5426 case TOK_STR:
5427 /* string parsing */
5428 t = char_type.t;
5429 str_init:
5430 if (tcc_state->warn_write_strings & WARN_ON)
5431 t |= VT_CONSTANT;
5432 type.t = t;
5433 mk_pointer(&type);
5434 type.t |= VT_ARRAY;
5435 memset(&ad, 0, sizeof(AttributeDef));
5436 ad.section = rodata_section;
5437 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5438 break;
5439 case '(':
5440 next();
5441 /* cast ? */
5442 if (parse_btype(&type, &ad, 0)) {
5443 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5444 skip(')');
5445 /* check ISOC99 compound literal */
5446 if (tok == '{') {
5447 /* data is allocated locally by default */
5448 if (global_expr)
5449 r = VT_CONST;
5450 else
5451 r = VT_LOCAL;
5452 /* all except arrays are lvalues */
5453 if (!(type.t & VT_ARRAY))
5454 r |= VT_LVAL;
5455 memset(&ad, 0, sizeof(AttributeDef));
5456 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5457 } else {
5458 if (sizeof_caller) {
5459 vpush(&type);
5460 return;
5462 unary();
5463 gen_cast(&type);
5465 } else if (tok == '{') {
5466 int saved_nocode_wanted = nocode_wanted;
5467 if (const_wanted && !(nocode_wanted & unevalmask))
5468 expect("constant");
5469 if (0 == local_scope)
5470 tcc_error("statement expression outside of function");
5471 /* save all registers */
5472 save_regs(0);
5473 /* statement expression : we do not accept break/continue
5474 inside as GCC does. We do retain the nocode_wanted state,
5475 as statement expressions can't ever be entered from the
5476 outside, so any reactivation of code emission (from labels
5477 or loop heads) can be disabled again after the end of it. */
5478 block(1);
5479 /* If the statement expr can be entered, then we retain the current
5480 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5481 If it can't be entered then the state is that from before the
5482 statement expression. */
5483 if (saved_nocode_wanted)
5484 nocode_wanted = saved_nocode_wanted;
5485 skip(')');
5486 } else {
5487 gexpr();
5488 skip(')');
5490 break;
5491 case '*':
5492 next();
5493 unary();
5494 indir();
5495 break;
5496 case '&':
5497 next();
5498 unary();
5499 /* functions names must be treated as function pointers,
5500 except for unary '&' and sizeof. Since we consider that
5501 functions are not lvalues, we only have to handle it
5502 there and in function calls. */
5503 /* arrays can also be used although they are not lvalues */
5504 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5505 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5506 test_lvalue();
5507 if (vtop->sym)
5508 vtop->sym->a.addrtaken = 1;
5509 mk_pointer(&vtop->type);
5510 gaddrof();
5511 break;
5512 case '!':
5513 next();
5514 unary();
5515 gen_test_zero(TOK_EQ);
5516 break;
5517 case '~':
5518 next();
5519 unary();
5520 vpushi(-1);
5521 gen_op('^');
5522 break;
5523 case '+':
5524 next();
5525 unary();
5526 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5527 tcc_error("pointer not accepted for unary plus");
5528 /* In order to force cast, we add zero, except for floating point
5529 where we really need an noop (otherwise -0.0 will be transformed
5530 into +0.0). */
5531 if (!is_float(vtop->type.t)) {
5532 vpushi(0);
5533 gen_op('+');
5535 break;
5536 case TOK_SIZEOF:
5537 case TOK_ALIGNOF1:
5538 case TOK_ALIGNOF2:
5539 case TOK_ALIGNOF3:
5540 t = tok;
5541 next();
5542 in_sizeof++;
5543 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5544 if (t == TOK_SIZEOF) {
5545 vpush_type_size(&type, &align);
5546 gen_cast_s(VT_SIZE_T);
5547 } else {
5548 type_size(&type, &align);
5549 s = NULL;
5550 if (vtop[1].r & VT_SYM)
5551 s = vtop[1].sym; /* hack: accessing previous vtop */
5552 if (s && s->a.aligned)
5553 align = 1 << (s->a.aligned - 1);
5554 vpushs(align);
5556 break;
5558 case TOK_builtin_expect:
5559 /* __builtin_expect is a no-op for now */
5560 parse_builtin_params(0, "ee");
5561 vpop();
5562 break;
5563 case TOK_builtin_types_compatible_p:
5564 parse_builtin_params(0, "tt");
5565 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5566 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5567 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5568 vtop -= 2;
5569 vpushi(n);
5570 break;
5571 case TOK_builtin_choose_expr:
5573 int64_t c;
5574 next();
5575 skip('(');
5576 c = expr_const64();
5577 skip(',');
5578 if (!c) {
5579 nocode_wanted++;
5581 expr_eq();
5582 if (!c) {
5583 vpop();
5584 nocode_wanted--;
5586 skip(',');
5587 if (c) {
5588 nocode_wanted++;
5590 expr_eq();
5591 if (c) {
5592 vpop();
5593 nocode_wanted--;
5595 skip(')');
5597 break;
5598 case TOK_builtin_constant_p:
5599 constant_p = 1;
5600 parse_builtin_params(1, "e");
5601 n = constant_p &&
5602 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5603 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5604 vtop--;
5605 vpushi(n);
5606 break;
5607 case TOK_builtin_frame_address:
5608 case TOK_builtin_return_address:
5610 int tok1 = tok;
5611 int64_t level;
5612 next();
5613 skip('(');
5614 level = expr_const64();
5615 if (level < 0) {
5616 tcc_error("%s only takes positive integers",
5617 tok1 == TOK_builtin_return_address ?
5618 "__builtin_return_address" :
5619 "__builtin_frame_address");
5621 skip(')');
5622 type.t = VT_VOID;
5623 mk_pointer(&type);
5624 vset(&type, VT_LOCAL, 0); /* local frame */
5625 while (level--) {
5626 #ifdef TCC_TARGET_RISCV64
5627 vpushi(2*PTR_SIZE);
5628 gen_op('-');
5629 #endif
5630 mk_pointer(&vtop->type);
5631 indir(); /* -> parent frame */
5633 if (tok1 == TOK_builtin_return_address) {
5634 // assume return address is just above frame pointer on stack
5635 #ifdef TCC_TARGET_ARM
5636 vpushi(2*PTR_SIZE);
5637 gen_op('+');
5638 #elif defined TCC_TARGET_RISCV64
5639 vpushi(PTR_SIZE);
5640 gen_op('-');
5641 #else
5642 vpushi(PTR_SIZE);
5643 gen_op('+');
5644 #endif
5645 mk_pointer(&vtop->type);
5646 indir();
5649 break;
5650 #ifdef TCC_TARGET_RISCV64
5651 case TOK_builtin_va_start:
5652 parse_builtin_params(0, "ee");
5653 r = vtop->r & VT_VALMASK;
5654 if (r == VT_LLOCAL)
5655 r = VT_LOCAL;
5656 if (r != VT_LOCAL)
5657 tcc_error("__builtin_va_start expects a local variable");
5658 gen_va_start();
5659 vstore();
5660 break;
5661 #endif
5662 #ifdef TCC_TARGET_X86_64
5663 #ifdef TCC_TARGET_PE
5664 case TOK_builtin_va_start:
5665 parse_builtin_params(0, "ee");
5666 r = vtop->r & VT_VALMASK;
5667 if (r == VT_LLOCAL)
5668 r = VT_LOCAL;
5669 if (r != VT_LOCAL)
5670 tcc_error("__builtin_va_start expects a local variable");
5671 vtop->r = r;
5672 vtop->type = char_pointer_type;
5673 vtop->c.i += 8;
5674 vstore();
5675 break;
5676 #else
5677 case TOK_builtin_va_arg_types:
5678 parse_builtin_params(0, "t");
5679 vpushi(classify_x86_64_va_arg(&vtop->type));
5680 vswap();
5681 vpop();
5682 break;
5683 #endif
5684 #endif
5686 #ifdef TCC_TARGET_ARM64
5687 case TOK_builtin_va_start: {
5688 parse_builtin_params(0, "ee");
5689 //xx check types
5690 gen_va_start();
5691 vpushi(0);
5692 vtop->type.t = VT_VOID;
5693 break;
5695 case TOK_builtin_va_arg: {
5696 parse_builtin_params(0, "et");
5697 type = vtop->type;
5698 vpop();
5699 //xx check types
5700 gen_va_arg(&type);
5701 vtop->type = type;
5702 break;
5704 case TOK___arm64_clear_cache: {
5705 parse_builtin_params(0, "ee");
5706 gen_clear_cache();
5707 vpushi(0);
5708 vtop->type.t = VT_VOID;
5709 break;
5711 #endif
5713 /* atomic operations */
5714 case TOK___atomic_store:
5715 case TOK___atomic_load:
5716 case TOK___atomic_exchange:
5717 case TOK___atomic_compare_exchange:
5718 case TOK___atomic_fetch_add:
5719 case TOK___atomic_fetch_sub:
5720 case TOK___atomic_fetch_or:
5721 case TOK___atomic_fetch_xor:
5722 case TOK___atomic_fetch_and:
5723 case TOK___atomic_fetch_nand:
5724 case TOK___atomic_add_fetch:
5725 case TOK___atomic_sub_fetch:
5726 case TOK___atomic_or_fetch:
5727 case TOK___atomic_xor_fetch:
5728 case TOK___atomic_and_fetch:
5729 case TOK___atomic_nand_fetch:
5730 parse_atomic(tok);
5731 break;
5733 /* pre operations */
5734 case TOK_INC:
5735 case TOK_DEC:
5736 t = tok;
5737 next();
5738 unary();
5739 inc(0, t);
5740 break;
5741 case '-':
5742 next();
5743 unary();
5744 if (is_float(vtop->type.t)) {
5745 gen_opif(TOK_NEG);
5746 } else {
5747 vpushi(0);
5748 vswap();
5749 gen_op('-');
5751 break;
5752 case TOK_LAND:
5753 if (!gnu_ext)
5754 goto tok_identifier;
5755 next();
5756 /* allow to take the address of a label */
5757 if (tok < TOK_UIDENT)
5758 expect("label identifier");
5759 s = label_find(tok);
5760 if (!s) {
5761 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5762 } else {
5763 if (s->r == LABEL_DECLARED)
5764 s->r = LABEL_FORWARD;
5766 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5767 s->type.t = VT_VOID;
5768 mk_pointer(&s->type);
5769 s->type.t |= VT_STATIC;
5771 vpushsym(&s->type, s);
5772 next();
5773 break;
5775 case TOK_GENERIC:
5777 CType controlling_type;
5778 int has_default = 0;
5779 int has_match = 0;
5780 int learn = 0;
5781 TokenString *str = NULL;
5782 int saved_const_wanted = const_wanted;
5784 next();
5785 skip('(');
5786 const_wanted = 0;
5787 expr_type(&controlling_type, expr_eq);
5788 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5789 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5790 mk_pointer(&controlling_type);
5791 const_wanted = saved_const_wanted;
5792 for (;;) {
5793 learn = 0;
5794 skip(',');
5795 if (tok == TOK_DEFAULT) {
5796 if (has_default)
5797 tcc_error("too many 'default'");
5798 has_default = 1;
5799 if (!has_match)
5800 learn = 1;
5801 next();
5802 } else {
5803 AttributeDef ad_tmp;
5804 int itmp;
5805 CType cur_type;
5807 parse_btype(&cur_type, &ad_tmp, 0);
5808 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5809 if (compare_types(&controlling_type, &cur_type, 0)) {
5810 if (has_match) {
5811 tcc_error("type match twice");
5813 has_match = 1;
5814 learn = 1;
5817 skip(':');
5818 if (learn) {
5819 if (str)
5820 tok_str_free(str);
5821 skip_or_save_block(&str);
5822 } else {
5823 skip_or_save_block(NULL);
5825 if (tok == ')')
5826 break;
5828 if (!str) {
5829 char buf[60];
5830 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5831 tcc_error("type '%s' does not match any association", buf);
5833 begin_macro(str, 1);
5834 next();
5835 expr_eq();
5836 if (tok != TOK_EOF)
5837 expect(",");
5838 end_macro();
5839 next();
5840 break;
5842 // special qnan , snan and infinity values
5843 case TOK___NAN__:
5844 n = 0x7fc00000;
5845 special_math_val:
5846 vpushi(n);
5847 vtop->type.t = VT_FLOAT;
5848 next();
5849 break;
5850 case TOK___SNAN__:
5851 n = 0x7f800001;
5852 goto special_math_val;
5853 case TOK___INF__:
5854 n = 0x7f800000;
5855 goto special_math_val;
5857 default:
5858 tok_identifier:
5859 t = tok;
5860 next();
5861 if (t < TOK_UIDENT)
5862 expect("identifier");
5863 s = sym_find(t);
5864 if (!s || IS_ASM_SYM(s)) {
5865 const char *name = get_tok_str(t, NULL);
5866 if (tok != '(')
5867 tcc_error("'%s' undeclared", name);
5868 /* for simple function calls, we tolerate undeclared
5869 external reference to int() function */
5870 tcc_warning_c(warn_implicit_function_declaration)(
5871 "implicit declaration of function '%s'", name);
5872 s = external_global_sym(t, &func_old_type);
5875 r = s->r;
5876 /* A symbol that has a register is a local register variable,
5877 which starts out as VT_LOCAL value. */
5878 if ((r & VT_VALMASK) < VT_CONST)
5879 r = (r & ~VT_VALMASK) | VT_LOCAL;
5881 vset(&s->type, r, s->c);
5882 /* Point to s as backpointer (even without r&VT_SYM).
5883 Will be used by at least the x86 inline asm parser for
5884 regvars. */
5885 vtop->sym = s;
5887 if (r & VT_SYM) {
5888 vtop->c.i = 0;
5889 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5890 vtop->c.i = s->enum_val;
5892 break;
5895 /* post operations */
5896 while (1) {
5897 if (tok == TOK_INC || tok == TOK_DEC) {
5898 inc(1, tok);
5899 next();
5900 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5901 int qualifiers, cumofs = 0;
5902 /* field */
5903 if (tok == TOK_ARROW)
5904 indir();
5905 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5906 test_lvalue();
5907 gaddrof();
5908 /* expect pointer on structure */
5909 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5910 expect("struct or union");
5911 if (tok == TOK_CDOUBLE)
5912 expect("field name");
5913 next();
5914 if (tok == TOK_CINT || tok == TOK_CUINT)
5915 expect("field name");
5916 s = find_field(&vtop->type, tok, &cumofs);
5917 /* add field offset to pointer */
5918 vtop->type = char_pointer_type; /* change type to 'char *' */
5919 vpushi(cumofs);
5920 gen_op('+');
5921 /* change type to field type, and set to lvalue */
5922 vtop->type = s->type;
5923 vtop->type.t |= qualifiers;
5924 /* an array is never an lvalue */
5925 if (!(vtop->type.t & VT_ARRAY)) {
5926 vtop->r |= VT_LVAL;
5927 #ifdef CONFIG_TCC_BCHECK
5928 /* if bound checking, the referenced pointer must be checked */
5929 if (tcc_state->do_bounds_check)
5930 vtop->r |= VT_MUSTBOUND;
5931 #endif
5933 next();
5934 } else if (tok == '[') {
5935 next();
5936 gexpr();
5937 gen_op('+');
5938 indir();
5939 skip(']');
5940 } else if (tok == '(') {
5941 SValue ret;
5942 Sym *sa;
5943 int nb_args, ret_nregs, ret_align, regsize, variadic;
5945 /* function call */
5946 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5947 /* pointer test (no array accepted) */
5948 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5949 vtop->type = *pointed_type(&vtop->type);
5950 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5951 goto error_func;
5952 } else {
5953 error_func:
5954 expect("function pointer");
5956 } else {
5957 vtop->r &= ~VT_LVAL; /* no lvalue */
5959 /* get return type */
5960 s = vtop->type.ref;
5961 next();
5962 sa = s->next; /* first parameter */
5963 nb_args = regsize = 0;
5964 ret.r2 = VT_CONST;
5965 /* compute first implicit argument if a structure is returned */
5966 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5967 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5968 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5969 &ret_align, &regsize);
5970 if (ret_nregs <= 0) {
5971 /* get some space for the returned structure */
5972 size = type_size(&s->type, &align);
5973 #ifdef TCC_TARGET_ARM64
5974 /* On arm64, a small struct is return in registers.
5975 It is much easier to write it to memory if we know
5976 that we are allowed to write some extra bytes, so
5977 round the allocated space up to a power of 2: */
5978 if (size < 16)
5979 while (size & (size - 1))
5980 size = (size | (size - 1)) + 1;
5981 #endif
5982 loc = (loc - size) & -align;
5983 ret.type = s->type;
5984 ret.r = VT_LOCAL | VT_LVAL;
5985 /* pass it as 'int' to avoid structure arg passing
5986 problems */
5987 vseti(VT_LOCAL, loc);
5988 #ifdef CONFIG_TCC_BCHECK
5989 if (tcc_state->do_bounds_check)
5990 --loc;
5991 #endif
5992 ret.c = vtop->c;
5993 if (ret_nregs < 0)
5994 vtop--;
5995 else
5996 nb_args++;
5998 } else {
5999 ret_nregs = 1;
6000 ret.type = s->type;
6003 if (ret_nregs > 0) {
6004 /* return in register */
6005 ret.c.i = 0;
6006 PUT_R_RET(&ret, ret.type.t);
6008 if (tok != ')') {
6009 for(;;) {
6010 expr_eq();
6011 gfunc_param_typed(s, sa);
6012 nb_args++;
6013 if (sa)
6014 sa = sa->next;
6015 if (tok == ')')
6016 break;
6017 skip(',');
6020 if (sa)
6021 tcc_error("too few arguments to function");
6022 skip(')');
6023 gfunc_call(nb_args);
6025 if (ret_nregs < 0) {
6026 vsetc(&ret.type, ret.r, &ret.c);
6027 #ifdef TCC_TARGET_RISCV64
6028 arch_transfer_ret_regs(1);
6029 #endif
6030 } else {
6031 /* return value */
6032 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6033 vsetc(&ret.type, r, &ret.c);
6034 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6037 /* handle packed struct return */
6038 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6039 int addr, offset;
6041 size = type_size(&s->type, &align);
6042 /* We're writing whole regs often, make sure there's enough
6043 space. Assume register size is power of 2. */
6044 if (regsize > align)
6045 align = regsize;
6046 loc = (loc - size) & -align;
6047 addr = loc;
6048 offset = 0;
6049 for (;;) {
6050 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6051 vswap();
6052 vstore();
6053 vtop--;
6054 if (--ret_nregs == 0)
6055 break;
6056 offset += regsize;
6058 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6061 /* Promote char/short return values. This is matters only
6062 for calling function that were not compiled by TCC and
6063 only on some architectures. For those where it doesn't
6064 matter we expect things to be already promoted to int,
6065 but not larger. */
6066 t = s->type.t & VT_BTYPE;
6067 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6068 #ifdef PROMOTE_RET
6069 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6070 #else
6071 vtop->type.t = VT_INT;
6072 #endif
6075 if (s->f.func_noreturn) {
6076 if (debug_modes)
6077 tcc_tcov_block_end(tcc_state, -1);
6078 CODE_OFF();
6080 } else {
6081 break;
6086 #ifndef precedence_parser /* original top-down parser */
6088 static void expr_prod(void)
6090 int t;
6092 unary();
6093 while ((t = tok) == '*' || t == '/' || t == '%') {
6094 next();
6095 unary();
6096 gen_op(t);
6100 static void expr_sum(void)
6102 int t;
6104 expr_prod();
6105 while ((t = tok) == '+' || t == '-') {
6106 next();
6107 expr_prod();
6108 gen_op(t);
6112 static void expr_shift(void)
6114 int t;
6116 expr_sum();
6117 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6118 next();
6119 expr_sum();
6120 gen_op(t);
6124 static void expr_cmp(void)
6126 int t;
6128 expr_shift();
6129 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6130 t == TOK_ULT || t == TOK_UGE) {
6131 next();
6132 expr_shift();
6133 gen_op(t);
6137 static void expr_cmpeq(void)
6139 int t;
6141 expr_cmp();
6142 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6143 next();
6144 expr_cmp();
6145 gen_op(t);
6149 static void expr_and(void)
6151 expr_cmpeq();
6152 while (tok == '&') {
6153 next();
6154 expr_cmpeq();
6155 gen_op('&');
6159 static void expr_xor(void)
6161 expr_and();
6162 while (tok == '^') {
6163 next();
6164 expr_and();
6165 gen_op('^');
6169 static void expr_or(void)
6171 expr_xor();
6172 while (tok == '|') {
6173 next();
6174 expr_xor();
6175 gen_op('|');
6179 static void expr_landor(int op);
6181 static void expr_land(void)
6183 expr_or();
6184 if (tok == TOK_LAND)
6185 expr_landor(tok);
6188 static void expr_lor(void)
6190 expr_land();
6191 if (tok == TOK_LOR)
6192 expr_landor(tok);
6195 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6196 #else /* defined precedence_parser */
6197 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6198 # define expr_lor() unary(), expr_infix(1)
6200 static int precedence(int tok)
6202 switch (tok) {
6203 case TOK_LOR: return 1;
6204 case TOK_LAND: return 2;
6205 case '|': return 3;
6206 case '^': return 4;
6207 case '&': return 5;
6208 case TOK_EQ: case TOK_NE: return 6;
6209 relat: case TOK_ULT: case TOK_UGE: return 7;
6210 case TOK_SHL: case TOK_SAR: return 8;
6211 case '+': case '-': return 9;
6212 case '*': case '/': case '%': return 10;
6213 default:
6214 if (tok >= TOK_ULE && tok <= TOK_GT)
6215 goto relat;
6216 return 0;
6219 static unsigned char prec[256];
6220 static void init_prec(void)
6222 int i;
6223 for (i = 0; i < 256; i++)
6224 prec[i] = precedence(i);
6226 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6228 static void expr_landor(int op);
6230 static void expr_infix(int p)
6232 int t = tok, p2;
6233 while ((p2 = precedence(t)) >= p) {
6234 if (t == TOK_LOR || t == TOK_LAND) {
6235 expr_landor(t);
6236 } else {
6237 next();
6238 unary();
6239 if (precedence(tok) > p2)
6240 expr_infix(p2 + 1);
6241 gen_op(t);
6243 t = tok;
6246 #endif
6248 /* Assuming vtop is a value used in a conditional context
6249 (i.e. compared with zero) return 0 if it's false, 1 if
6250 true and -1 if it can't be statically determined. */
6251 static int condition_3way(void)
6253 int c = -1;
6254 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6255 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6256 vdup();
6257 gen_cast_s(VT_BOOL);
6258 c = vtop->c.i;
6259 vpop();
6261 return c;
6264 static void expr_landor(int op)
6266 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6267 for(;;) {
6268 c = f ? i : condition_3way();
6269 if (c < 0)
6270 save_regs(1), cc = 0;
6271 else if (c != i)
6272 nocode_wanted++, f = 1;
6273 if (tok != op)
6274 break;
6275 if (c < 0)
6276 t = gvtst(i, t);
6277 else
6278 vpop();
6279 next();
6280 expr_landor_next(op);
6282 if (cc || f) {
6283 vpop();
6284 vpushi(i ^ f);
6285 gsym(t);
6286 nocode_wanted -= f;
6287 } else {
6288 gvtst_set(i, t);
6292 static int is_cond_bool(SValue *sv)
6294 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6295 && (sv->type.t & VT_BTYPE) == VT_INT)
6296 return (unsigned)sv->c.i < 2;
6297 if (sv->r == VT_CMP)
6298 return 1;
6299 return 0;
6302 static void expr_cond(void)
6304 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6305 SValue sv;
6306 CType type;
6308 expr_lor();
6309 if (tok == '?') {
6310 next();
6311 c = condition_3way();
6312 g = (tok == ':' && gnu_ext);
6313 tt = 0;
6314 if (!g) {
6315 if (c < 0) {
6316 save_regs(1);
6317 tt = gvtst(1, 0);
6318 } else {
6319 vpop();
6321 } else if (c < 0) {
6322 /* needed to avoid having different registers saved in
6323 each branch */
6324 save_regs(1);
6325 gv_dup();
6326 tt = gvtst(0, 0);
6329 if (c == 0)
6330 nocode_wanted++;
6331 if (!g)
6332 gexpr();
6334 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6335 mk_pointer(&vtop->type);
6336 sv = *vtop; /* save value to handle it later */
6337 vtop--; /* no vpop so that FP stack is not flushed */
6339 if (g) {
6340 u = tt;
6341 } else if (c < 0) {
6342 u = gjmp(0);
6343 gsym(tt);
6344 } else
6345 u = 0;
6347 if (c == 0)
6348 nocode_wanted--;
6349 if (c == 1)
6350 nocode_wanted++;
6351 skip(':');
6352 expr_cond();
6354 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6355 mk_pointer(&vtop->type);
6357 /* cast operands to correct type according to ISOC rules */
6358 if (!combine_types(&type, &sv, vtop, '?'))
6359 type_incompatibility_error(&sv.type, &vtop->type,
6360 "type mismatch in conditional expression (have '%s' and '%s')");
6362 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6363 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6364 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6365 this code jumps directly to the if's then/else branches. */
6366 t1 = gvtst(0, 0);
6367 t2 = gjmp(0);
6368 gsym(u);
6369 vpushv(&sv);
6370 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6371 gvtst_set(0, t1);
6372 gvtst_set(1, t2);
6373 gen_cast(&type);
6374 // tcc_warning("two conditions expr_cond");
6375 return;
6378 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6379 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6380 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6382 /* now we convert second operand */
6383 if (c != 1) {
6384 gen_cast(&type);
6385 if (islv) {
6386 mk_pointer(&vtop->type);
6387 gaddrof();
6388 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6389 gaddrof();
6392 rc = RC_TYPE(type.t);
6393 /* for long longs, we use fixed registers to avoid having
6394 to handle a complicated move */
6395 if (USING_TWO_WORDS(type.t))
6396 rc = RC_RET(type.t);
6398 tt = r2 = 0;
6399 if (c < 0) {
6400 r2 = gv(rc);
6401 tt = gjmp(0);
6403 gsym(u);
6404 if (c == 1)
6405 nocode_wanted--;
6407 /* this is horrible, but we must also convert first
6408 operand */
6409 if (c != 0) {
6410 *vtop = sv;
6411 gen_cast(&type);
6412 if (islv) {
6413 mk_pointer(&vtop->type);
6414 gaddrof();
6415 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6416 gaddrof();
6419 if (c < 0) {
6420 r1 = gv(rc);
6421 move_reg(r2, r1, islv ? VT_PTR : type.t);
6422 vtop->r = r2;
6423 gsym(tt);
6426 if (islv)
6427 indir();
6431 static void expr_eq(void)
6433 int t;
6435 expr_cond();
6436 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6437 test_lvalue();
6438 next();
6439 if (t == '=') {
6440 expr_eq();
6441 } else {
6442 vdup();
6443 expr_eq();
6444 gen_op(TOK_ASSIGN_OP(t));
6446 vstore();
6450 ST_FUNC void gexpr(void)
6452 while (1) {
6453 expr_eq();
6454 if (tok != ',')
6455 break;
6456 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6457 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6458 vpop();
6459 next();
6463 /* parse a constant expression and return value in vtop. */
6464 static void expr_const1(void)
6466 const_wanted++;
6467 nocode_wanted += unevalmask + 1;
6468 expr_cond();
6469 nocode_wanted -= unevalmask + 1;
6470 const_wanted--;
6473 /* parse an integer constant and return its value. */
6474 static inline int64_t expr_const64(void)
6476 int64_t c;
6477 expr_const1();
6478 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6479 expect("constant expression");
6480 c = vtop->c.i;
6481 vpop();
6482 return c;
6485 /* parse an integer constant and return its value.
6486 Complain if it doesn't fit 32bit (signed or unsigned). */
6487 ST_FUNC int expr_const(void)
6489 int c;
6490 int64_t wc = expr_const64();
6491 c = wc;
6492 if (c != wc && (unsigned)c != wc)
6493 tcc_error("constant exceeds 32 bit");
6494 return c;
6497 /* ------------------------------------------------------------------------- */
6498 /* return from function */
6500 #ifndef TCC_TARGET_ARM64
6501 static void gfunc_return(CType *func_type)
6503 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6504 CType type, ret_type;
6505 int ret_align, ret_nregs, regsize;
6506 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6507 &ret_align, &regsize);
6508 if (ret_nregs < 0) {
6509 #ifdef TCC_TARGET_RISCV64
6510 arch_transfer_ret_regs(0);
6511 #endif
6512 } else if (0 == ret_nregs) {
6513 /* if returning structure, must copy it to implicit
6514 first pointer arg location */
6515 type = *func_type;
6516 mk_pointer(&type);
6517 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6518 indir();
6519 vswap();
6520 /* copy structure value to pointer */
6521 vstore();
6522 } else {
6523 /* returning structure packed into registers */
6524 int size, addr, align, rc;
6525 size = type_size(func_type,&align);
6526 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6527 (vtop->c.i & (ret_align-1)))
6528 && (align & (ret_align-1))) {
6529 loc = (loc - size) & -ret_align;
6530 addr = loc;
6531 type = *func_type;
6532 vset(&type, VT_LOCAL | VT_LVAL, addr);
6533 vswap();
6534 vstore();
6535 vpop();
6536 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6538 vtop->type = ret_type;
6539 rc = RC_RET(ret_type.t);
6540 if (ret_nregs == 1)
6541 gv(rc);
6542 else {
6543 for (;;) {
6544 vdup();
6545 gv(rc);
6546 vpop();
6547 if (--ret_nregs == 0)
6548 break;
6549 /* We assume that when a structure is returned in multiple
6550 registers, their classes are consecutive values of the
6551 suite s(n) = 2^n */
6552 rc <<= 1;
6553 vtop->c.i += regsize;
6557 } else {
6558 gv(RC_RET(func_type->t));
6560 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6562 #endif
6564 static void check_func_return(void)
6566 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6567 return;
6568 if (!strcmp (funcname, "main")
6569 && (func_vt.t & VT_BTYPE) == VT_INT) {
6570 /* main returns 0 by default */
6571 vpushi(0);
6572 gen_assign_cast(&func_vt);
6573 gfunc_return(&func_vt);
6574 } else {
6575 tcc_warning("function might return no value: '%s'", funcname);
6579 /* ------------------------------------------------------------------------- */
6580 /* switch/case */
6582 static int case_cmpi(const void *pa, const void *pb)
6584 int64_t a = (*(struct case_t**) pa)->v1;
6585 int64_t b = (*(struct case_t**) pb)->v1;
6586 return a < b ? -1 : a > b;
6589 static int case_cmpu(const void *pa, const void *pb)
6591 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6592 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6593 return a < b ? -1 : a > b;
6596 static void gtst_addr(int t, int a)
6598 gsym_addr(gvtst(0, t), a);
6601 static void gcase(struct case_t **base, int len, int *bsym)
6603 struct case_t *p;
6604 int e;
6605 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6606 while (len > 8) {
6607 /* binary search */
6608 p = base[len/2];
6609 vdup();
6610 if (ll)
6611 vpushll(p->v2);
6612 else
6613 vpushi(p->v2);
6614 gen_op(TOK_LE);
6615 e = gvtst(1, 0);
6616 vdup();
6617 if (ll)
6618 vpushll(p->v1);
6619 else
6620 vpushi(p->v1);
6621 gen_op(TOK_GE);
6622 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6623 /* x < v1 */
6624 gcase(base, len/2, bsym);
6625 /* x > v2 */
6626 gsym(e);
6627 e = len/2 + 1;
6628 base += e; len -= e;
6630 /* linear scan */
6631 while (len--) {
6632 p = *base++;
6633 vdup();
6634 if (ll)
6635 vpushll(p->v2);
6636 else
6637 vpushi(p->v2);
6638 if (p->v1 == p->v2) {
6639 gen_op(TOK_EQ);
6640 gtst_addr(0, p->sym);
6641 } else {
6642 gen_op(TOK_LE);
6643 e = gvtst(1, 0);
6644 vdup();
6645 if (ll)
6646 vpushll(p->v1);
6647 else
6648 vpushi(p->v1);
6649 gen_op(TOK_GE);
6650 gtst_addr(0, p->sym);
6651 gsym(e);
6654 *bsym = gjmp(*bsym);
6657 /* ------------------------------------------------------------------------- */
6658 /* __attribute__((cleanup(fn))) */
6660 static void try_call_scope_cleanup(Sym *stop)
6662 Sym *cls = cur_scope->cl.s;
6664 for (; cls != stop; cls = cls->ncl) {
6665 Sym *fs = cls->next;
6666 Sym *vs = cls->prev_tok;
6668 vpushsym(&fs->type, fs);
6669 vset(&vs->type, vs->r, vs->c);
6670 vtop->sym = vs;
6671 mk_pointer(&vtop->type);
6672 gaddrof();
6673 gfunc_call(1);
6677 static void try_call_cleanup_goto(Sym *cleanupstate)
6679 Sym *oc, *cc;
6680 int ocd, ccd;
6682 if (!cur_scope->cl.s)
6683 return;
6685 /* search NCA of both cleanup chains given parents and initial depth */
6686 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6687 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6689 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6691 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6694 try_call_scope_cleanup(cc);
6697 /* call 'func' for each __attribute__((cleanup(func))) */
6698 static void block_cleanup(struct scope *o)
6700 int jmp = 0;
6701 Sym *g, **pg;
6702 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6703 if (g->prev_tok->r & LABEL_FORWARD) {
6704 Sym *pcl = g->next;
6705 if (!jmp)
6706 jmp = gjmp(0);
6707 gsym(pcl->jnext);
6708 try_call_scope_cleanup(o->cl.s);
6709 pcl->jnext = gjmp(0);
6710 if (!o->cl.n)
6711 goto remove_pending;
6712 g->c = o->cl.n;
6713 pg = &g->prev;
6714 } else {
6715 remove_pending:
6716 *pg = g->prev;
6717 sym_free(g);
6720 gsym(jmp);
6721 try_call_scope_cleanup(o->cl.s);
6724 /* ------------------------------------------------------------------------- */
6725 /* VLA */
6727 static void vla_restore(int loc)
6729 if (loc)
6730 gen_vla_sp_restore(loc);
6733 static void vla_leave(struct scope *o)
6735 struct scope *c = cur_scope, *v = NULL;
6736 for (; c != o && c; c = c->prev)
6737 if (c->vla.num)
6738 v = c;
6739 if (v)
6740 vla_restore(v->vla.locorig);
6743 /* ------------------------------------------------------------------------- */
6744 /* local scopes */
6746 static void new_scope(struct scope *o)
6748 /* copy and link previous scope */
6749 *o = *cur_scope;
6750 o->prev = cur_scope;
6751 cur_scope = o;
6752 cur_scope->vla.num = 0;
6754 /* record local declaration stack position */
6755 o->lstk = local_stack;
6756 o->llstk = local_label_stack;
6757 ++local_scope;
6759 if (debug_modes)
6760 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6763 static void prev_scope(struct scope *o, int is_expr)
6765 vla_leave(o->prev);
6767 if (o->cl.s != o->prev->cl.s)
6768 block_cleanup(o->prev);
6770 /* pop locally defined labels */
6771 label_pop(&local_label_stack, o->llstk, is_expr);
6773 /* In the is_expr case (a statement expression is finished here),
6774 vtop might refer to symbols on the local_stack. Either via the
6775 type or via vtop->sym. We can't pop those nor any that in turn
6776 might be referred to. To make it easier we don't roll back
6777 any symbols in that case; some upper level call to block() will
6778 do that. We do have to remove such symbols from the lookup
6779 tables, though. sym_pop will do that. */
6781 /* pop locally defined symbols */
6782 pop_local_syms(o->lstk, is_expr);
6783 cur_scope = o->prev;
6784 --local_scope;
6786 if (debug_modes)
6787 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6790 /* leave a scope via break/continue(/goto) */
6791 static void leave_scope(struct scope *o)
6793 if (!o)
6794 return;
6795 try_call_scope_cleanup(o->cl.s);
6796 vla_leave(o);
6799 /* ------------------------------------------------------------------------- */
6800 /* call block from 'for do while' loops */
6802 static void lblock(int *bsym, int *csym)
6804 struct scope *lo = loop_scope, *co = cur_scope;
6805 int *b = co->bsym, *c = co->csym;
6806 if (csym) {
6807 co->csym = csym;
6808 loop_scope = co;
6810 co->bsym = bsym;
6811 block(0);
6812 co->bsym = b;
6813 if (csym) {
6814 co->csym = c;
6815 loop_scope = lo;
6819 static void block(int is_expr)
6821 int a, b, c, d, e, t;
6822 struct scope o;
6823 Sym *s;
6825 if (is_expr) {
6826 /* default return value is (void) */
6827 vpushi(0);
6828 vtop->type.t = VT_VOID;
6831 again:
6832 t = tok;
6833 /* If the token carries a value, next() might destroy it. Only with
6834 invalid code such as f(){"123"4;} */
6835 if (TOK_HAS_VALUE(t))
6836 goto expr;
6837 next();
6839 if (debug_modes)
6840 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6842 if (t == TOK_IF) {
6843 //new_scope(&o); //?? breaks tests2.122
6844 skip('(');
6845 gexpr();
6846 skip(')');
6847 a = gvtst(1, 0);
6848 block(0);
6849 if (tok == TOK_ELSE) {
6850 d = gjmp(0);
6851 gsym(a);
6852 next();
6853 block(0);
6854 gsym(d); /* patch else jmp */
6855 } else {
6856 gsym(a);
6858 //prev_scope(&o,0); //?? breaks tests2.122
6860 } else if (t == TOK_WHILE) {
6861 new_scope(&o);
6862 d = gind();
6863 skip('(');
6864 gexpr();
6865 skip(')');
6866 a = gvtst(1, 0);
6867 b = 0;
6868 lblock(&a, &b);
6869 gjmp_addr(d);
6870 gsym_addr(b, d);
6871 gsym(a);
6872 prev_scope(&o,0);
6873 } else if (t == '{') {
6874 new_scope(&o);
6876 /* handle local labels declarations */
6877 while (tok == TOK_LABEL) {
6878 do {
6879 next();
6880 if (tok < TOK_UIDENT)
6881 expect("label identifier");
6882 label_push(&local_label_stack, tok, LABEL_DECLARED);
6883 next();
6884 } while (tok == ',');
6885 skip(';');
6888 while (tok != '}') {
6889 decl(VT_LOCAL);
6890 if (tok != '}') {
6891 if (is_expr)
6892 vpop();
6893 block(is_expr);
6897 prev_scope(&o, is_expr);
6898 if (local_scope)
6899 next();
6900 else if (!nocode_wanted)
6901 check_func_return();
6903 } else if (t == TOK_RETURN) {
6904 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6905 if (tok != ';') {
6906 gexpr();
6907 if (b) {
6908 gen_assign_cast(&func_vt);
6909 } else {
6910 if (vtop->type.t != VT_VOID)
6911 tcc_warning("void function returns a value");
6912 vtop--;
6914 } else if (b) {
6915 tcc_warning("'return' with no value");
6916 b = 0;
6918 leave_scope(root_scope);
6919 if (b)
6920 gfunc_return(&func_vt);
6921 skip(';');
6922 /* jump unless last stmt in top-level block */
6923 if (tok != '}' || local_scope != 1)
6924 rsym = gjmp(rsym);
6925 if (debug_modes)
6926 tcc_tcov_block_end (tcc_state, -1);
6927 CODE_OFF();
6929 } else if (t == TOK_BREAK) {
6930 /* compute jump */
6931 if (!cur_scope->bsym)
6932 tcc_error("cannot break");
6933 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6934 leave_scope(cur_switch->scope);
6935 else
6936 leave_scope(loop_scope);
6937 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6938 skip(';');
6940 } else if (t == TOK_CONTINUE) {
6941 /* compute jump */
6942 if (!cur_scope->csym)
6943 tcc_error("cannot continue");
6944 leave_scope(loop_scope);
6945 *cur_scope->csym = gjmp(*cur_scope->csym);
6946 skip(';');
6948 } else if (t == TOK_FOR) {
6949 new_scope(&o);
6951 skip('(');
6952 if (tok != ';') {
6953 /* c99 for-loop init decl? */
6954 if (!decl(VT_JMP)) {
6955 /* no, regular for-loop init expr */
6956 gexpr();
6957 vpop();
6960 skip(';');
6961 a = b = 0;
6962 c = d = gind();
6963 if (tok != ';') {
6964 gexpr();
6965 a = gvtst(1, 0);
6967 skip(';');
6968 if (tok != ')') {
6969 e = gjmp(0);
6970 d = gind();
6971 gexpr();
6972 vpop();
6973 gjmp_addr(c);
6974 gsym(e);
6976 skip(')');
6977 lblock(&a, &b);
6978 gjmp_addr(d);
6979 gsym_addr(b, d);
6980 gsym(a);
6981 prev_scope(&o, 0);
6983 } else if (t == TOK_DO) {
6984 new_scope(&o);
6985 a = b = 0;
6986 d = gind();
6987 lblock(&a, &b);
6988 gsym(b);
6989 skip(TOK_WHILE);
6990 skip('(');
6991 gexpr();
6992 skip(')');
6993 skip(';');
6994 prev_scope(&o,0);
6995 c = gvtst(0, 0);
6996 gsym_addr(c, d);
6997 gsym(a);
6999 } else if (t == TOK_SWITCH) {
7000 struct switch_t *sw;
7002 new_scope(&o);
7003 sw = tcc_mallocz(sizeof *sw);
7004 sw->bsym = &a;
7005 sw->scope = cur_scope;
7006 sw->prev = cur_switch;
7007 sw->nocode_wanted = nocode_wanted;
7008 cur_switch = sw;
7010 skip('(');
7011 gexpr();
7012 skip(')');
7013 sw->sv = *vtop--; /* save switch value */
7015 a = 0;
7016 b = gjmp(0); /* jump to first case */
7017 lblock(&a, NULL);
7018 a = gjmp(a); /* add implicit break */
7019 /* case lookup */
7020 gsym(b);
7022 if (sw->nocode_wanted)
7023 goto skip_switch;
7024 if (sw->sv.type.t & VT_UNSIGNED)
7025 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7026 else
7027 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7028 for (b = 1; b < sw->n; b++)
7029 if (sw->sv.type.t & VT_UNSIGNED
7030 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7031 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7032 tcc_error("duplicate case value");
7033 vpushv(&sw->sv);
7034 gv(RC_INT);
7035 d = 0, gcase(sw->p, sw->n, &d);
7036 vpop();
7037 if (sw->def_sym)
7038 gsym_addr(d, sw->def_sym);
7039 else
7040 gsym(d);
7041 skip_switch:
7042 /* break label */
7043 gsym(a);
7045 dynarray_reset(&sw->p, &sw->n);
7046 cur_switch = sw->prev;
7047 tcc_free(sw);
7048 prev_scope(&o,0);
7050 } else if (t == TOK_CASE) {
7051 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7052 if (!cur_switch)
7053 expect("switch");
7054 cr->v1 = cr->v2 = expr_const64();
7055 if (gnu_ext && tok == TOK_DOTS) {
7056 next();
7057 cr->v2 = expr_const64();
7058 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7059 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7060 tcc_warning("empty case range");
7062 /* case and default are unreachable from a switch under nocode_wanted */
7063 if (!cur_switch->nocode_wanted)
7064 cr->sym = gind();
7065 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7066 skip(':');
7067 is_expr = 0;
7068 goto block_after_label;
7070 } else if (t == TOK_DEFAULT) {
7071 if (!cur_switch)
7072 expect("switch");
7073 if (cur_switch->def_sym)
7074 tcc_error("too many 'default'");
7075 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7076 skip(':');
7077 is_expr = 0;
7078 goto block_after_label;
7080 } else if (t == TOK_GOTO) {
7081 if (cur_scope->vla.num)
7082 vla_restore(cur_scope->vla.locorig);
7083 if (tok == '*' && gnu_ext) {
7084 /* computed goto */
7085 next();
7086 gexpr();
7087 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7088 expect("pointer");
7089 ggoto();
7091 } else if (tok >= TOK_UIDENT) {
7092 s = label_find(tok);
7093 /* put forward definition if needed */
7094 if (!s)
7095 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7096 else if (s->r == LABEL_DECLARED)
7097 s->r = LABEL_FORWARD;
7099 if (s->r & LABEL_FORWARD) {
7100 /* start new goto chain for cleanups, linked via label->next */
7101 if (cur_scope->cl.s && !nocode_wanted) {
7102 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7103 pending_gotos->prev_tok = s;
7104 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7105 pending_gotos->next = s;
7107 s->jnext = gjmp(s->jnext);
7108 } else {
7109 try_call_cleanup_goto(s->cleanupstate);
7110 gjmp_addr(s->jnext);
7112 next();
7114 } else {
7115 expect("label identifier");
7117 skip(';');
7119 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7120 asm_instr();
7122 } else {
7123 if (tok == ':' && t >= TOK_UIDENT) {
7124 /* label case */
7125 next();
7126 s = label_find(t);
7127 if (s) {
7128 if (s->r == LABEL_DEFINED)
7129 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7130 s->r = LABEL_DEFINED;
7131 if (s->next) {
7132 Sym *pcl; /* pending cleanup goto */
7133 for (pcl = s->next; pcl; pcl = pcl->prev)
7134 gsym(pcl->jnext);
7135 sym_pop(&s->next, NULL, 0);
7136 } else
7137 gsym(s->jnext);
7138 } else {
7139 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7141 s->jnext = gind();
7142 s->cleanupstate = cur_scope->cl.s;
7144 block_after_label:
7146 /* Accept attributes after labels (e.g. 'unused') */
7147 AttributeDef ad_tmp;
7148 parse_attribute(&ad_tmp);
7150 if (debug_modes)
7151 tcc_tcov_reset_ind(tcc_state);
7152 vla_restore(cur_scope->vla.loc);
7153 if (tok != '}')
7154 goto again;
7155 /* we accept this, but it is a mistake */
7156 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7158 } else {
7159 /* expression case */
7160 if (t != ';') {
7161 unget_tok(t);
7162 expr:
7163 if (is_expr) {
7164 vpop();
7165 gexpr();
7166 } else {
7167 gexpr();
7168 vpop();
7170 skip(';');
7175 if (debug_modes)
7176 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7179 /* This skips over a stream of tokens containing balanced {} and ()
7180 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7181 with a '{'). If STR then allocates and stores the skipped tokens
7182 in *STR. This doesn't check if () and {} are nested correctly,
7183 i.e. "({)}" is accepted. */
7184 static void skip_or_save_block(TokenString **str)
7186 int braces = tok == '{';
7187 int level = 0;
7188 if (str)
7189 *str = tok_str_alloc();
7191 while (1) {
7192 int t = tok;
7193 if (level == 0
7194 && (t == ','
7195 || t == ';'
7196 || t == '}'
7197 || t == ')'
7198 || t == ']'))
7199 break;
7200 if (t == TOK_EOF) {
7201 if (str || level > 0)
7202 tcc_error("unexpected end of file");
7203 else
7204 break;
7206 if (str)
7207 tok_str_add_tok(*str);
7208 next();
7209 if (t == '{' || t == '(' || t == '[') {
7210 level++;
7211 } else if (t == '}' || t == ')' || t == ']') {
7212 level--;
7213 if (level == 0 && braces && t == '}')
7214 break;
7217 if (str) {
7218 tok_str_add(*str, -1);
7219 tok_str_add(*str, 0);
7223 #define EXPR_CONST 1
7224 #define EXPR_ANY 2
7226 static void parse_init_elem(int expr_type)
7228 int saved_global_expr;
7229 switch(expr_type) {
7230 case EXPR_CONST:
7231 /* compound literals must be allocated globally in this case */
7232 saved_global_expr = global_expr;
7233 global_expr = 1;
7234 expr_const1();
7235 global_expr = saved_global_expr;
7236 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7237 (compound literals). */
7238 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7239 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7240 || vtop->sym->v < SYM_FIRST_ANOM))
7241 #ifdef TCC_TARGET_PE
7242 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7243 #endif
7245 tcc_error("initializer element is not constant");
7246 break;
7247 case EXPR_ANY:
7248 expr_eq();
7249 break;
7253 #if 1
7254 static void init_assert(init_params *p, int offset)
7256 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7257 : !nocode_wanted && offset > p->local_offset)
7258 tcc_internal_error("initializer overflow");
7260 #else
7261 #define init_assert(sec, offset)
7262 #endif
7264 /* put zeros for variable based init */
7265 static void init_putz(init_params *p, unsigned long c, int size)
7267 init_assert(p, c + size);
7268 if (p->sec) {
7269 /* nothing to do because globals are already set to zero */
7270 } else {
7271 vpush_helper_func(TOK_memset);
7272 vseti(VT_LOCAL, c);
7273 #ifdef TCC_TARGET_ARM
7274 vpushs(size);
7275 vpushi(0);
7276 #else
7277 vpushi(0);
7278 vpushs(size);
7279 #endif
7280 gfunc_call(3);
7284 #define DIF_FIRST 1
7285 #define DIF_SIZE_ONLY 2
7286 #define DIF_HAVE_ELEM 4
7287 #define DIF_CLEAR 8
7289 /* delete relocations for specified range c ... c + size. Unfortunatly
7290 in very special cases, relocations may occur unordered */
7291 static void decl_design_delrels(Section *sec, int c, int size)
7293 ElfW_Rel *rel, *rel2, *rel_end;
7294 if (!sec || !sec->reloc)
7295 return;
7296 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7297 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7298 while (rel < rel_end) {
7299 if (rel->r_offset >= c && rel->r_offset < c + size) {
7300 sec->reloc->data_offset -= sizeof *rel;
7301 } else {
7302 if (rel2 != rel)
7303 memcpy(rel2, rel, sizeof *rel);
7304 ++rel2;
7306 ++rel;
7310 static void decl_design_flex(init_params *p, Sym *ref, int index)
7312 if (ref == p->flex_array_ref) {
7313 if (index >= ref->c)
7314 ref->c = index + 1;
7315 } else if (ref->c < 0)
7316 tcc_error("flexible array has zero size in this context");
7319 /* t is the array or struct type. c is the array or struct
7320 address. cur_field is the pointer to the current
7321 field, for arrays the 'c' member contains the current start
7322 index. 'flags' is as in decl_initializer.
7323 'al' contains the already initialized length of the
7324 current container (starting at c). This returns the new length of that. */
7325 static int decl_designator(init_params *p, CType *type, unsigned long c,
7326 Sym **cur_field, int flags, int al)
7328 Sym *s, *f;
7329 int index, index_last, align, l, nb_elems, elem_size;
7330 unsigned long corig = c;
7332 elem_size = 0;
7333 nb_elems = 1;
7335 if (flags & DIF_HAVE_ELEM)
7336 goto no_designator;
7338 if (gnu_ext && tok >= TOK_UIDENT) {
7339 l = tok, next();
7340 if (tok == ':')
7341 goto struct_field;
7342 unget_tok(l);
7345 /* NOTE: we only support ranges for last designator */
7346 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7347 if (tok == '[') {
7348 if (!(type->t & VT_ARRAY))
7349 expect("array type");
7350 next();
7351 index = index_last = expr_const();
7352 if (tok == TOK_DOTS && gnu_ext) {
7353 next();
7354 index_last = expr_const();
7356 skip(']');
7357 s = type->ref;
7358 decl_design_flex(p, s, index_last);
7359 if (index < 0 || index_last >= s->c || index_last < index)
7360 tcc_error("index exceeds array bounds or range is empty");
7361 if (cur_field)
7362 (*cur_field)->c = index_last;
7363 type = pointed_type(type);
7364 elem_size = type_size(type, &align);
7365 c += index * elem_size;
7366 nb_elems = index_last - index + 1;
7367 } else {
7368 int cumofs;
7369 next();
7370 l = tok;
7371 struct_field:
7372 next();
7373 if ((type->t & VT_BTYPE) != VT_STRUCT)
7374 expect("struct/union type");
7375 cumofs = 0;
7376 f = find_field(type, l, &cumofs);
7377 if (cur_field)
7378 *cur_field = f;
7379 type = &f->type;
7380 c += cumofs;
7382 cur_field = NULL;
7384 if (!cur_field) {
7385 if (tok == '=') {
7386 next();
7387 } else if (!gnu_ext) {
7388 expect("=");
7390 } else {
7391 no_designator:
7392 if (type->t & VT_ARRAY) {
7393 index = (*cur_field)->c;
7394 s = type->ref;
7395 decl_design_flex(p, s, index);
7396 if (index >= s->c)
7397 tcc_error("too many initializers");
7398 type = pointed_type(type);
7399 elem_size = type_size(type, &align);
7400 c += index * elem_size;
7401 } else {
7402 f = *cur_field;
7403 /* Skip bitfield padding. Also with size 32 and 64. */
7404 while (f && (f->v & SYM_FIRST_ANOM) &&
7405 is_integer_btype(f->type.t & VT_BTYPE))
7406 *cur_field = f = f->next;
7407 if (!f)
7408 tcc_error("too many initializers");
7409 type = &f->type;
7410 c += f->c;
7414 if (!elem_size) /* for structs */
7415 elem_size = type_size(type, &align);
7417 /* Using designators the same element can be initialized more
7418 than once. In that case we need to delete possibly already
7419 existing relocations. */
7420 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7421 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7422 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7425 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7427 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7428 Sym aref = {0};
7429 CType t1;
7430 int i;
7431 if (p->sec || (type->t & VT_ARRAY)) {
7432 /* make init_putv/vstore believe it were a struct */
7433 aref.c = elem_size;
7434 t1.t = VT_STRUCT, t1.ref = &aref;
7435 type = &t1;
7437 if (p->sec)
7438 vpush_ref(type, p->sec, c, elem_size);
7439 else
7440 vset(type, VT_LOCAL|VT_LVAL, c);
7441 for (i = 1; i < nb_elems; i++) {
7442 vdup();
7443 init_putv(p, type, c + elem_size * i);
7445 vpop();
7448 c += nb_elems * elem_size;
7449 if (c - corig > al)
7450 al = c - corig;
7451 return al;
7454 /* store a value or an expression directly in global data or in local array */
7455 static void init_putv(init_params *p, CType *type, unsigned long c)
7457 int bt;
7458 void *ptr;
7459 CType dtype;
7460 int size, align;
7461 Section *sec = p->sec;
7462 uint64_t val;
7464 dtype = *type;
7465 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7467 size = type_size(type, &align);
7468 if (type->t & VT_BITFIELD)
7469 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7470 init_assert(p, c + size);
7472 if (sec) {
7473 /* XXX: not portable */
7474 /* XXX: generate error if incorrect relocation */
7475 gen_assign_cast(&dtype);
7476 bt = type->t & VT_BTYPE;
7478 if ((vtop->r & VT_SYM)
7479 && bt != VT_PTR
7480 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7481 || (type->t & VT_BITFIELD))
7482 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7484 tcc_error("initializer element is not computable at load time");
7486 if (NODATA_WANTED) {
7487 vtop--;
7488 return;
7491 ptr = sec->data + c;
7492 val = vtop->c.i;
7494 /* XXX: make code faster ? */
7495 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7496 vtop->sym->v >= SYM_FIRST_ANOM &&
7497 /* XXX This rejects compound literals like
7498 '(void *){ptr}'. The problem is that '&sym' is
7499 represented the same way, which would be ruled out
7500 by the SYM_FIRST_ANOM check above, but also '"string"'
7501 in 'char *p = "string"' is represented the same
7502 with the type being VT_PTR and the symbol being an
7503 anonymous one. That is, there's no difference in vtop
7504 between '(void *){x}' and '&(void *){x}'. Ignore
7505 pointer typed entities here. Hopefully no real code
7506 will ever use compound literals with scalar type. */
7507 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7508 /* These come from compound literals, memcpy stuff over. */
7509 Section *ssec;
7510 ElfSym *esym;
7511 ElfW_Rel *rel;
7512 esym = elfsym(vtop->sym);
7513 ssec = tcc_state->sections[esym->st_shndx];
7514 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7515 if (ssec->reloc) {
7516 /* We need to copy over all memory contents, and that
7517 includes relocations. Use the fact that relocs are
7518 created it order, so look from the end of relocs
7519 until we hit one before the copied region. */
7520 unsigned long relofs = ssec->reloc->data_offset;
7521 while (relofs >= sizeof(*rel)) {
7522 relofs -= sizeof(*rel);
7523 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7524 if (rel->r_offset >= esym->st_value + size)
7525 continue;
7526 if (rel->r_offset < esym->st_value)
7527 break;
7528 put_elf_reloca(symtab_section, sec,
7529 c + rel->r_offset - esym->st_value,
7530 ELFW(R_TYPE)(rel->r_info),
7531 ELFW(R_SYM)(rel->r_info),
7532 #if PTR_SIZE == 8
7533 rel->r_addend
7534 #else
7536 #endif
7540 } else {
7541 if (type->t & VT_BITFIELD) {
7542 int bit_pos, bit_size, bits, n;
7543 unsigned char *p, v, m;
7544 bit_pos = BIT_POS(vtop->type.t);
7545 bit_size = BIT_SIZE(vtop->type.t);
7546 p = (unsigned char*)ptr + (bit_pos >> 3);
7547 bit_pos &= 7, bits = 0;
7548 while (bit_size) {
7549 n = 8 - bit_pos;
7550 if (n > bit_size)
7551 n = bit_size;
7552 v = val >> bits << bit_pos;
7553 m = ((1 << n) - 1) << bit_pos;
7554 *p = (*p & ~m) | (v & m);
7555 bits += n, bit_size -= n, bit_pos = 0, ++p;
7557 } else
7558 switch(bt) {
7559 case VT_BOOL:
7560 *(char *)ptr = val != 0;
7561 break;
7562 case VT_BYTE:
7563 *(char *)ptr = val;
7564 break;
7565 case VT_SHORT:
7566 write16le(ptr, val);
7567 break;
7568 case VT_FLOAT:
7569 write32le(ptr, val);
7570 break;
7571 case VT_DOUBLE:
7572 write64le(ptr, val);
7573 break;
7574 case VT_LDOUBLE:
7575 #if defined TCC_IS_NATIVE_387
7576 /* Host and target platform may be different but both have x87.
7577 On windows, tcc does not use VT_LDOUBLE, except when it is a
7578 cross compiler. In this case a mingw gcc as host compiler
7579 comes here with 10-byte long doubles, while msvc or tcc won't.
7580 tcc itself can still translate by asm.
7581 In any case we avoid possibly random bytes 11 and 12.
7583 if (sizeof (long double) >= 10)
7584 memcpy(ptr, &vtop->c.ld, 10);
7585 #ifdef __TINYC__
7586 else if (sizeof (long double) == sizeof (double))
7587 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7588 #endif
7589 else if (vtop->c.ld == 0.0)
7591 else
7592 #endif
7593 /* For other platforms it should work natively, but may not work
7594 for cross compilers */
7595 if (sizeof(long double) == LDOUBLE_SIZE)
7596 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7597 else if (sizeof(double) == LDOUBLE_SIZE)
7598 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7599 #ifndef TCC_CROSS_TEST
7600 else
7601 tcc_error("can't cross compile long double constants");
7602 #endif
7603 break;
7605 #if PTR_SIZE == 8
7606 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7607 case VT_LLONG:
7608 case VT_PTR:
7609 if (vtop->r & VT_SYM)
7610 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7611 else
7612 write64le(ptr, val);
7613 break;
7614 case VT_INT:
7615 write32le(ptr, val);
7616 break;
7617 #else
7618 case VT_LLONG:
7619 write64le(ptr, val);
7620 break;
7621 case VT_PTR:
7622 case VT_INT:
7623 if (vtop->r & VT_SYM)
7624 greloc(sec, vtop->sym, c, R_DATA_PTR);
7625 write32le(ptr, val);
7626 break;
7627 #endif
7628 default:
7629 //tcc_internal_error("unexpected type");
7630 break;
7633 vtop--;
7634 } else {
7635 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7636 vswap();
7637 vstore();
7638 vpop();
7642 /* 't' contains the type and storage info. 'c' is the offset of the
7643 object in section 'sec'. If 'sec' is NULL, it means stack based
7644 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7645 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7646 size only evaluation is wanted (only for arrays). */
7647 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7649 int len, n, no_oblock, i;
7650 int size1, align1;
7651 Sym *s, *f;
7652 Sym indexsym;
7653 CType *t1;
7655 /* generate line number info */
7656 if (debug_modes && !p->sec)
7657 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7659 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7660 /* In case of strings we have special handling for arrays, so
7661 don't consume them as initializer value (which would commit them
7662 to some anonymous symbol). */
7663 tok != TOK_LSTR && tok != TOK_STR &&
7664 (!(flags & DIF_SIZE_ONLY)
7665 /* a struct may be initialized from a struct of same type, as in
7666 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7667 In that case we need to parse the element in order to check
7668 it for compatibility below */
7669 || (type->t & VT_BTYPE) == VT_STRUCT)
7671 int ncw_prev = nocode_wanted;
7672 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7673 ++nocode_wanted;
7674 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7675 nocode_wanted = ncw_prev;
7676 flags |= DIF_HAVE_ELEM;
7679 if (type->t & VT_ARRAY) {
7680 no_oblock = 1;
7681 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7682 tok == '{') {
7683 skip('{');
7684 no_oblock = 0;
7687 s = type->ref;
7688 n = s->c;
7689 t1 = pointed_type(type);
7690 size1 = type_size(t1, &align1);
7692 /* only parse strings here if correct type (otherwise: handle
7693 them as ((w)char *) expressions */
7694 if ((tok == TOK_LSTR &&
7695 #ifdef TCC_TARGET_PE
7696 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7697 #else
7698 (t1->t & VT_BTYPE) == VT_INT
7699 #endif
7700 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7701 len = 0;
7702 cstr_reset(&initstr);
7703 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7704 tcc_error("unhandled string literal merging");
7705 while (tok == TOK_STR || tok == TOK_LSTR) {
7706 if (initstr.size)
7707 initstr.size -= size1;
7708 if (tok == TOK_STR)
7709 len += tokc.str.size;
7710 else
7711 len += tokc.str.size / sizeof(nwchar_t);
7712 len--;
7713 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7714 next();
7716 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7717 && tok != TOK_EOF) {
7718 /* Not a lone literal but part of a bigger expression. */
7719 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7720 tokc.str.size = initstr.size;
7721 tokc.str.data = initstr.data;
7722 goto do_init_array;
7725 decl_design_flex(p, s, len);
7726 if (!(flags & DIF_SIZE_ONLY)) {
7727 int nb = n, ch;
7728 if (len < nb)
7729 nb = len;
7730 if (len > nb)
7731 tcc_warning("initializer-string for array is too long");
7732 /* in order to go faster for common case (char
7733 string in global variable, we handle it
7734 specifically */
7735 if (p->sec && size1 == 1) {
7736 init_assert(p, c + nb);
7737 if (!NODATA_WANTED)
7738 memcpy(p->sec->data + c, initstr.data, nb);
7739 } else {
7740 for(i=0;i<n;i++) {
7741 if (i >= nb) {
7742 /* only add trailing zero if enough storage (no
7743 warning in this case since it is standard) */
7744 if (flags & DIF_CLEAR)
7745 break;
7746 if (n - i >= 4) {
7747 init_putz(p, c + i * size1, (n - i) * size1);
7748 break;
7750 ch = 0;
7751 } else if (size1 == 1)
7752 ch = ((unsigned char *)initstr.data)[i];
7753 else
7754 ch = ((nwchar_t *)initstr.data)[i];
7755 vpushi(ch);
7756 init_putv(p, t1, c + i * size1);
7760 } else {
7762 do_init_array:
7763 indexsym.c = 0;
7764 f = &indexsym;
7766 do_init_list:
7767 /* zero memory once in advance */
7768 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7769 init_putz(p, c, n*size1);
7770 flags |= DIF_CLEAR;
7773 len = 0;
7774 /* GNU extension: if the initializer is empty for a flex array,
7775 it's size is zero. We won't enter the loop, so set the size
7776 now. */
7777 decl_design_flex(p, s, len);
7778 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7779 len = decl_designator(p, type, c, &f, flags, len);
7780 flags &= ~DIF_HAVE_ELEM;
7781 if (type->t & VT_ARRAY) {
7782 ++indexsym.c;
7783 /* special test for multi dimensional arrays (may not
7784 be strictly correct if designators are used at the
7785 same time) */
7786 if (no_oblock && len >= n*size1)
7787 break;
7788 } else {
7789 if (s->type.t == VT_UNION)
7790 f = NULL;
7791 else
7792 f = f->next;
7793 if (no_oblock && f == NULL)
7794 break;
7797 if (tok == '}')
7798 break;
7799 skip(',');
7802 if (!no_oblock)
7803 skip('}');
7805 } else if ((flags & DIF_HAVE_ELEM)
7806 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7807 The source type might have VT_CONSTANT set, which is
7808 of course assignable to non-const elements. */
7809 && is_compatible_unqualified_types(type, &vtop->type)) {
7810 goto one_elem;
7812 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7813 no_oblock = 1;
7814 if ((flags & DIF_FIRST) || tok == '{') {
7815 skip('{');
7816 no_oblock = 0;
7818 s = type->ref;
7819 f = s->next;
7820 n = s->c;
7821 size1 = 1;
7822 goto do_init_list;
7824 } else if (tok == '{') {
7825 if (flags & DIF_HAVE_ELEM)
7826 skip(';');
7827 next();
7828 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7829 skip('}');
7831 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7832 /* If we supported only ISO C we wouldn't have to accept calling
7833 this on anything than an array if DIF_SIZE_ONLY (and even then
7834 only on the outermost level, so no recursion would be needed),
7835 because initializing a flex array member isn't supported.
7836 But GNU C supports it, so we need to recurse even into
7837 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7838 /* just skip expression */
7839 if (flags & DIF_HAVE_ELEM)
7840 vpop();
7841 else
7842 skip_or_save_block(NULL);
7844 } else {
7845 if (!(flags & DIF_HAVE_ELEM)) {
7846 /* This should happen only when we haven't parsed
7847 the init element above for fear of committing a
7848 string constant to memory too early. */
7849 if (tok != TOK_STR && tok != TOK_LSTR)
7850 expect("string constant");
7851 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7853 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7854 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7855 && vtop->c.i == 0
7856 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7858 vpop();
7859 else
7860 init_putv(p, type, c);
7864 /* parse an initializer for type 't' if 'has_init' is non zero, and
7865 allocate space in local or global data space ('r' is either
7866 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7867 variable 'v' of scope 'scope' is declared before initializers
7868 are parsed. If 'v' is zero, then a reference to the new object
7869 is put in the value stack. If 'has_init' is 2, a special parsing
7870 is done to handle string constants. */
7871 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7872 int has_init, int v, int global)
7874 int size, align, addr;
7875 TokenString *init_str = NULL;
7877 Section *sec;
7878 Sym *flexible_array;
7879 Sym *sym;
7880 int saved_nocode_wanted = nocode_wanted;
7881 #ifdef CONFIG_TCC_BCHECK
7882 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7883 #endif
7884 init_params p = {0};
7886 /* Always allocate static or global variables */
7887 if (v && (r & VT_VALMASK) == VT_CONST)
7888 nocode_wanted |= DATA_ONLY_WANTED;
7890 flexible_array = NULL;
7891 size = type_size(type, &align);
7893 /* exactly one flexible array may be initialized, either the
7894 toplevel array or the last member of the toplevel struct */
7896 if (size < 0) {
7897 /* If the base type itself was an array type of unspecified size
7898 (like in 'typedef int arr[]; arr x = {1};') then we will
7899 overwrite the unknown size by the real one for this decl.
7900 We need to unshare the ref symbol holding that size. */
7901 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7902 p.flex_array_ref = type->ref;
7904 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7905 Sym *field = type->ref->next;
7906 if (field) {
7907 while (field->next)
7908 field = field->next;
7909 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7910 flexible_array = field;
7911 p.flex_array_ref = field->type.ref;
7912 size = -1;
7917 if (size < 0) {
7918 /* If unknown size, do a dry-run 1st pass */
7919 if (!has_init)
7920 tcc_error("unknown type size");
7921 if (has_init == 2) {
7922 /* only get strings */
7923 init_str = tok_str_alloc();
7924 while (tok == TOK_STR || tok == TOK_LSTR) {
7925 tok_str_add_tok(init_str);
7926 next();
7928 tok_str_add(init_str, -1);
7929 tok_str_add(init_str, 0);
7930 } else
7931 skip_or_save_block(&init_str);
7932 unget_tok(0);
7934 /* compute size */
7935 begin_macro(init_str, 1);
7936 next();
7937 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7938 /* prepare second initializer parsing */
7939 macro_ptr = init_str->str;
7940 next();
7942 /* if still unknown size, error */
7943 size = type_size(type, &align);
7944 if (size < 0)
7945 tcc_error("unknown type size");
7947 /* If there's a flex member and it was used in the initializer
7948 adjust size. */
7949 if (flexible_array && flexible_array->type.ref->c > 0)
7950 size += flexible_array->type.ref->c
7951 * pointed_size(&flexible_array->type);
7954 /* take into account specified alignment if bigger */
7955 if (ad->a.aligned) {
7956 int speca = 1 << (ad->a.aligned - 1);
7957 if (speca > align)
7958 align = speca;
7959 } else if (ad->a.packed) {
7960 align = 1;
7963 if (!v && NODATA_WANTED)
7964 size = 0, align = 1;
7966 if ((r & VT_VALMASK) == VT_LOCAL) {
7967 sec = NULL;
7968 #ifdef CONFIG_TCC_BCHECK
7969 if (bcheck && v) {
7970 /* add padding between stack variables for bound checking */
7971 loc -= align;
7973 #endif
7974 loc = (loc - size) & -align;
7975 addr = loc;
7976 p.local_offset = addr + size;
7977 #ifdef CONFIG_TCC_BCHECK
7978 if (bcheck && v) {
7979 /* add padding between stack variables for bound checking */
7980 loc -= align;
7982 #endif
7983 if (v) {
7984 /* local variable */
7985 #ifdef CONFIG_TCC_ASM
7986 if (ad->asm_label) {
7987 int reg = asm_parse_regvar(ad->asm_label);
7988 if (reg >= 0)
7989 r = (r & ~VT_VALMASK) | reg;
7991 #endif
7992 sym = sym_push(v, type, r, addr);
7993 if (ad->cleanup_func) {
7994 Sym *cls = sym_push2(&all_cleanups,
7995 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7996 cls->prev_tok = sym;
7997 cls->next = ad->cleanup_func;
7998 cls->ncl = cur_scope->cl.s;
7999 cur_scope->cl.s = cls;
8002 sym->a = ad->a;
8003 } else {
8004 /* push local reference */
8005 vset(type, r, addr);
8007 } else {
8008 sym = NULL;
8009 if (v && global) {
8010 /* see if the symbol was already defined */
8011 sym = sym_find(v);
8012 if (sym) {
8013 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8014 && sym->type.ref->c > type->ref->c) {
8015 /* flex array was already declared with explicit size
8016 extern int arr[10];
8017 int arr[] = { 1,2,3 }; */
8018 type->ref->c = sym->type.ref->c;
8019 size = type_size(type, &align);
8021 patch_storage(sym, ad, type);
8022 /* we accept several definitions of the same global variable. */
8023 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8024 goto no_alloc;
8028 /* allocate symbol in corresponding section */
8029 sec = ad->section;
8030 if (!sec) {
8031 CType *tp = type;
8032 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8033 tp = &tp->ref->type;
8034 if (tp->t & VT_CONSTANT) {
8035 sec = rodata_section;
8036 } else if (has_init) {
8037 sec = data_section;
8038 /*if (tcc_state->g_debug & 4)
8039 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8040 } else if (tcc_state->nocommon)
8041 sec = bss_section;
8044 if (sec) {
8045 addr = section_add(sec, size, align);
8046 #ifdef CONFIG_TCC_BCHECK
8047 /* add padding if bound check */
8048 if (bcheck)
8049 section_add(sec, 1, 1);
8050 #endif
8051 } else {
8052 addr = align; /* SHN_COMMON is special, symbol value is align */
8053 sec = common_section;
8056 if (v) {
8057 if (!sym) {
8058 sym = sym_push(v, type, r | VT_SYM, 0);
8059 patch_storage(sym, ad, NULL);
8061 /* update symbol definition */
8062 put_extern_sym(sym, sec, addr, size);
8063 } else {
8064 /* push global reference */
8065 vpush_ref(type, sec, addr, size);
8066 sym = vtop->sym;
8067 vtop->r |= r;
8070 #ifdef CONFIG_TCC_BCHECK
8071 /* handles bounds now because the symbol must be defined
8072 before for the relocation */
8073 if (bcheck) {
8074 addr_t *bounds_ptr;
8076 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8077 /* then add global bound info */
8078 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8079 bounds_ptr[0] = 0; /* relocated */
8080 bounds_ptr[1] = size;
8082 #endif
8085 if (type->t & VT_VLA) {
8086 int a;
8088 if (NODATA_WANTED)
8089 goto no_alloc;
8091 /* save before-VLA stack pointer if needed */
8092 if (cur_scope->vla.num == 0) {
8093 if (cur_scope->prev && cur_scope->prev->vla.num) {
8094 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8095 } else {
8096 gen_vla_sp_save(loc -= PTR_SIZE);
8097 cur_scope->vla.locorig = loc;
8101 vpush_type_size(type, &a);
8102 gen_vla_alloc(type, a);
8103 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8104 /* on _WIN64, because of the function args scratch area, the
8105 result of alloca differs from RSP and is returned in RAX. */
8106 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8107 #endif
8108 gen_vla_sp_save(addr);
8109 cur_scope->vla.loc = addr;
8110 cur_scope->vla.num++;
8111 } else if (has_init) {
8112 p.sec = sec;
8113 decl_initializer(&p, type, addr, DIF_FIRST);
8114 /* patch flexible array member size back to -1, */
8115 /* for possible subsequent similar declarations */
8116 if (flexible_array)
8117 flexible_array->type.ref->c = -1;
8120 no_alloc:
8121 /* restore parse state if needed */
8122 if (init_str) {
8123 end_macro();
8124 next();
8127 nocode_wanted = saved_nocode_wanted;
8130 /* generate vla code saved in post_type() */
8131 static void func_vla_arg_code(Sym *arg)
8133 int align;
8134 TokenString *vla_array_tok = NULL;
8136 if (arg->type.ref)
8137 func_vla_arg_code(arg->type.ref);
8139 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8140 loc -= type_size(&int_type, &align);
8141 loc &= -align;
8142 arg->type.ref->c = loc;
8144 unget_tok(0);
8145 vla_array_tok = tok_str_alloc();
8146 vla_array_tok->str = arg->type.ref->vla_array_str;
8147 begin_macro(vla_array_tok, 1);
8148 next();
8149 gexpr();
8150 end_macro();
8151 next();
8152 vpush_type_size(&arg->type.ref->type, &align);
8153 gen_op('*');
8154 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8155 vswap();
8156 vstore();
8157 vpop();
8161 static void func_vla_arg(Sym *sym)
8163 Sym *arg;
8165 for (arg = sym->type.ref->next; arg; arg = arg->next)
8166 if (arg->type.t & VT_VLA)
8167 func_vla_arg_code(arg);
8170 /* parse a function defined by symbol 'sym' and generate its code in
8171 'cur_text_section' */
8172 static void gen_function(Sym *sym)
8174 struct scope f = { 0 };
8175 cur_scope = root_scope = &f;
8176 nocode_wanted = 0;
8177 ind = cur_text_section->data_offset;
8178 if (sym->a.aligned) {
8179 size_t newoff = section_add(cur_text_section, 0,
8180 1 << (sym->a.aligned - 1));
8181 gen_fill_nops(newoff - ind);
8183 /* NOTE: we patch the symbol size later */
8184 put_extern_sym(sym, cur_text_section, ind, 0);
8185 if (sym->type.ref->f.func_ctor)
8186 add_array (tcc_state, ".init_array", sym->c);
8187 if (sym->type.ref->f.func_dtor)
8188 add_array (tcc_state, ".fini_array", sym->c);
8190 funcname = get_tok_str(sym->v, NULL);
8191 func_ind = ind;
8192 func_vt = sym->type.ref->type;
8193 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8195 /* put debug symbol */
8196 tcc_debug_funcstart(tcc_state, sym);
8197 /* push a dummy symbol to enable local sym storage */
8198 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8199 local_scope = 1; /* for function parameters */
8200 gfunc_prolog(sym);
8201 tcc_debug_prolog_epilog(tcc_state, 0);
8202 local_scope = 0;
8203 rsym = 0;
8204 clear_temp_local_var_list();
8205 func_vla_arg(sym);
8206 block(0);
8207 gsym(rsym);
8208 nocode_wanted = 0;
8209 /* reset local stack */
8210 pop_local_syms(NULL, 0);
8211 tcc_debug_prolog_epilog(tcc_state, 1);
8212 gfunc_epilog();
8213 cur_text_section->data_offset = ind;
8214 local_scope = 0;
8215 label_pop(&global_label_stack, NULL, 0);
8216 sym_pop(&all_cleanups, NULL, 0);
8217 /* patch symbol size */
8218 elfsym(sym)->st_size = ind - func_ind;
8219 /* end of function */
8220 tcc_debug_funcend(tcc_state, ind - func_ind);
8221 /* It's better to crash than to generate wrong code */
8222 cur_text_section = NULL;
8223 funcname = ""; /* for safety */
8224 func_vt.t = VT_VOID; /* for safety */
8225 func_var = 0; /* for safety */
8226 ind = 0; /* for safety */
8227 func_ind = -1;
8228 nocode_wanted = DATA_ONLY_WANTED;
8229 check_vstack();
8230 /* do this after funcend debug info */
8231 next();
8234 static void gen_inline_functions(TCCState *s)
8236 Sym *sym;
8237 int inline_generated, i;
8238 struct InlineFunc *fn;
8240 tcc_open_bf(s, ":inline:", 0);
8241 /* iterate while inline function are referenced */
8242 do {
8243 inline_generated = 0;
8244 for (i = 0; i < s->nb_inline_fns; ++i) {
8245 fn = s->inline_fns[i];
8246 sym = fn->sym;
8247 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8248 /* the function was used or forced (and then not internal):
8249 generate its code and convert it to a normal function */
8250 fn->sym = NULL;
8251 tcc_debug_putfile(s, fn->filename);
8252 begin_macro(fn->func_str, 1);
8253 next();
8254 cur_text_section = text_section;
8255 gen_function(sym);
8256 end_macro();
8258 inline_generated = 1;
8261 } while (inline_generated);
8262 tcc_close();
8265 static void free_inline_functions(TCCState *s)
8267 int i;
8268 /* free tokens of unused inline functions */
8269 for (i = 0; i < s->nb_inline_fns; ++i) {
8270 struct InlineFunc *fn = s->inline_fns[i];
8271 if (fn->sym)
8272 tok_str_free(fn->func_str);
8274 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8277 static void do_Static_assert(void){
8278 CString error_str;
8279 int c;
8281 next();
8282 skip('(');
8283 c = expr_const();
8285 if (tok == ')') {
8286 if (!c)
8287 tcc_error("_Static_assert fail");
8288 next();
8289 goto static_assert_out;
8292 skip(',');
8293 parse_mult_str(&error_str, "string constant");
8294 if (c == 0)
8295 tcc_error("%s", (char *)error_str.data);
8296 cstr_free(&error_str);
8297 skip(')');
8298 static_assert_out:
8299 skip(';');
8302 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8303 or VT_CMP if parsing old style parameter list
8304 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8305 static int decl(int l)
8307 int v, has_init, r, oldint;
8308 CType type, btype;
8309 Sym *sym;
8310 AttributeDef ad, adbase;
8312 while (1) {
8313 if (tok == TOK_STATIC_ASSERT) {
8314 do_Static_assert();
8315 continue;
8318 oldint = 0;
8319 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8320 if (l == VT_JMP)
8321 return 0;
8322 /* skip redundant ';' if not in old parameter decl scope */
8323 if (tok == ';' && l != VT_CMP) {
8324 next();
8325 continue;
8327 if (l != VT_CONST)
8328 break;
8329 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8330 /* global asm block */
8331 asm_global_instr();
8332 continue;
8334 if (tok >= TOK_UIDENT) {
8335 /* special test for old K&R protos without explicit int
8336 type. Only accepted when defining global data */
8337 btype.t = VT_INT;
8338 oldint = 1;
8339 } else {
8340 if (tok != TOK_EOF)
8341 expect("declaration");
8342 break;
8346 if (tok == ';') {
8347 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8348 v = btype.ref->v;
8349 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8350 tcc_warning("unnamed struct/union that defines no instances");
8351 next();
8352 continue;
8354 if (IS_ENUM(btype.t)) {
8355 next();
8356 continue;
8360 while (1) { /* iterate thru each declaration */
8361 type = btype;
8362 ad = adbase;
8363 type_decl(&type, &ad, &v, TYPE_DIRECT);
8364 #if 0
8366 char buf[500];
8367 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8368 printf("type = '%s'\n", buf);
8370 #endif
8371 if ((type.t & VT_BTYPE) == VT_FUNC) {
8372 if ((type.t & VT_STATIC) && (l != VT_CONST))
8373 tcc_error("function without file scope cannot be static");
8374 /* if old style function prototype, we accept a
8375 declaration list */
8376 sym = type.ref;
8377 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8378 func_vt = type;
8379 decl(VT_CMP);
8381 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8382 if (sym->f.func_alwinl
8383 && ((type.t & (VT_EXTERN | VT_INLINE))
8384 == (VT_EXTERN | VT_INLINE))) {
8385 /* always_inline functions must be handled as if they
8386 don't generate multiple global defs, even if extern
8387 inline, i.e. GNU inline semantics for those. Rewrite
8388 them into static inline. */
8389 type.t &= ~VT_EXTERN;
8390 type.t |= VT_STATIC;
8392 #endif
8393 /* always compile 'extern inline' */
8394 if (type.t & VT_EXTERN)
8395 type.t &= ~VT_INLINE;
8397 } else if (oldint) {
8398 tcc_warning("type defaults to int");
8401 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8402 ad.asm_label = asm_label_instr();
8403 /* parse one last attribute list, after asm label */
8404 parse_attribute(&ad);
8405 #if 0
8406 /* gcc does not allow __asm__("label") with function definition,
8407 but why not ... */
8408 if (tok == '{')
8409 expect(";");
8410 #endif
8413 #ifdef TCC_TARGET_PE
8414 if (ad.a.dllimport || ad.a.dllexport) {
8415 if (type.t & VT_STATIC)
8416 tcc_error("cannot have dll linkage with static");
8417 if (type.t & VT_TYPEDEF) {
8418 tcc_warning("'%s' attribute ignored for typedef",
8419 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8420 (ad.a.dllexport = 0, "dllexport"));
8421 } else if (ad.a.dllimport) {
8422 if ((type.t & VT_BTYPE) == VT_FUNC)
8423 ad.a.dllimport = 0;
8424 else
8425 type.t |= VT_EXTERN;
8428 #endif
8429 if (tok == '{') {
8430 if (l != VT_CONST)
8431 tcc_error("cannot use local functions");
8432 if ((type.t & VT_BTYPE) != VT_FUNC)
8433 expect("function definition");
8435 /* reject abstract declarators in function definition
8436 make old style params without decl have int type */
8437 sym = type.ref;
8438 while ((sym = sym->next) != NULL) {
8439 if (!(sym->v & ~SYM_FIELD))
8440 expect("identifier");
8441 if (sym->type.t == VT_VOID)
8442 sym->type = int_type;
8445 /* apply post-declaraton attributes */
8446 merge_funcattr(&type.ref->f, &ad.f);
8448 /* put function symbol */
8449 type.t &= ~VT_EXTERN;
8450 sym = external_sym(v, &type, 0, &ad);
8452 /* static inline functions are just recorded as a kind
8453 of macro. Their code will be emitted at the end of
8454 the compilation unit only if they are used */
8455 if (sym->type.t & VT_INLINE) {
8456 struct InlineFunc *fn;
8457 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8458 strcpy(fn->filename, file->filename);
8459 fn->sym = sym;
8460 skip_or_save_block(&fn->func_str);
8461 dynarray_add(&tcc_state->inline_fns,
8462 &tcc_state->nb_inline_fns, fn);
8463 } else {
8464 /* compute text section */
8465 cur_text_section = ad.section;
8466 if (!cur_text_section)
8467 cur_text_section = text_section;
8468 gen_function(sym);
8470 break;
8471 } else {
8472 if (l == VT_CMP) {
8473 /* find parameter in function parameter list */
8474 for (sym = func_vt.ref->next; sym; sym = sym->next)
8475 if ((sym->v & ~SYM_FIELD) == v)
8476 goto found;
8477 tcc_error("declaration for parameter '%s' but no such parameter",
8478 get_tok_str(v, NULL));
8479 found:
8480 if (type.t & VT_STORAGE) /* 'register' is okay */
8481 tcc_error("storage class specified for '%s'",
8482 get_tok_str(v, NULL));
8483 if (sym->type.t != VT_VOID)
8484 tcc_error("redefinition of parameter '%s'",
8485 get_tok_str(v, NULL));
8486 convert_parameter_type(&type);
8487 sym->type = type;
8488 } else if (type.t & VT_TYPEDEF) {
8489 /* save typedefed type */
8490 /* XXX: test storage specifiers ? */
8491 sym = sym_find(v);
8492 if (sym && sym->sym_scope == local_scope) {
8493 if (!is_compatible_types(&sym->type, &type)
8494 || !(sym->type.t & VT_TYPEDEF))
8495 tcc_error("incompatible redefinition of '%s'",
8496 get_tok_str(v, NULL));
8497 sym->type = type;
8498 } else {
8499 sym = sym_push(v, &type, 0, 0);
8501 sym->a = ad.a;
8502 if ((type.t & VT_BTYPE) == VT_FUNC)
8503 merge_funcattr(&sym->type.ref->f, &ad.f);
8504 if (debug_modes)
8505 tcc_debug_typedef (tcc_state, sym);
8506 } else if ((type.t & VT_BTYPE) == VT_VOID
8507 && !(type.t & VT_EXTERN)) {
8508 tcc_error("declaration of void object");
8509 } else {
8510 r = 0;
8511 if ((type.t & VT_BTYPE) == VT_FUNC) {
8512 /* external function definition */
8513 /* specific case for func_call attribute */
8514 merge_funcattr(&type.ref->f, &ad.f);
8515 } else if (!(type.t & VT_ARRAY)) {
8516 /* not lvalue if array */
8517 r |= VT_LVAL;
8519 has_init = (tok == '=');
8520 if (has_init && (type.t & VT_VLA))
8521 tcc_error("variable length array cannot be initialized");
8522 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8523 || (type.t & VT_BTYPE) == VT_FUNC
8524 /* as with GCC, uninitialized global arrays with no size
8525 are considered extern: */
8526 || ((type.t & VT_ARRAY) && !has_init
8527 && l == VT_CONST && type.ref->c < 0)
8529 /* external variable or function */
8530 type.t |= VT_EXTERN;
8531 sym = external_sym(v, &type, r, &ad);
8532 if (ad.alias_target) {
8533 /* Aliases need to be emitted when their target
8534 symbol is emitted, even if perhaps unreferenced.
8535 We only support the case where the base is
8536 already defined, otherwise we would need
8537 deferring to emit the aliases until the end of
8538 the compile unit. */
8539 Sym *alias_target = sym_find(ad.alias_target);
8540 ElfSym *esym = elfsym(alias_target);
8541 if (!esym)
8542 tcc_error("unsupported forward __alias__ attribute");
8543 put_extern_sym2(sym, esym->st_shndx,
8544 esym->st_value, esym->st_size, 1);
8546 } else {
8547 if (l == VT_CONST || (type.t & VT_STATIC))
8548 r |= VT_CONST;
8549 else
8550 r |= VT_LOCAL;
8551 if (has_init)
8552 next();
8553 else if (l == VT_CONST)
8554 /* uninitialized global variables may be overridden */
8555 type.t |= VT_EXTERN;
8556 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8559 if (tok != ',') {
8560 if (l == VT_JMP)
8561 return 1;
8562 skip(';');
8563 break;
8565 next();
8569 return 0;
8572 /* ------------------------------------------------------------------------- */
8573 #undef gjmp_addr
8574 #undef gjmp
8575 /* ------------------------------------------------------------------------- */