Fix test90 for 32 bits targets
[tinycc.git] / tccgen.c
blobf493abf45f7bd11a31cd09aac7e78eab06a0187b
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
64 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
65 ST_DATA int func_vc;
66 ST_DATA int func_ind;
67 ST_DATA const char *funcname;
68 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
69 static CString initstr;
71 #if PTR_SIZE == 4
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
74 #elif LONG_SIZE == 4
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
77 #else
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
80 #endif
82 static struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int nocode_wanted;
89 int *bsym;
90 struct scope *scope;
91 struct switch_t *prev;
92 SValue sv;
93 } *cur_switch; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable {
98 int location; //offset on stack. Svalue.c.i
99 short size;
100 short align;
101 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
102 static int nb_temp_local_vars;
104 static struct scope {
105 struct scope *prev;
106 struct { int loc, locorig, num; } vla;
107 struct { Sym *s; int n; } cl;
108 int *bsym, *csym;
109 Sym *lstk, *llstk;
110 } *cur_scope, *loop_scope, *root_scope;
112 typedef struct {
113 Section *sec;
114 int local_offset;
115 Sym *flex_array_ref;
116 } init_params;
118 #if 1
119 #define precedence_parser
120 static void init_prec(void);
121 #endif
123 static void gen_cast(CType *type);
124 static void gen_cast_s(int t);
125 static inline CType *pointed_type(CType *type);
126 static int is_compatible_types(CType *type1, CType *type2);
127 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
128 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
129 static void parse_expr_type(CType *type);
130 static void init_putv(init_params *p, CType *type, unsigned long c);
131 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
132 static void block(int is_expr);
133 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
134 static int decl(int l);
135 static void expr_eq(void);
136 static void vpush_type_size(CType *type, int *a);
137 static int is_compatible_unqualified_types(CType *type1, CType *type2);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty, unsigned long long v);
140 static void vpush(CType *type);
141 static int gvtst(int inv, int t);
142 static void gen_inline_functions(TCCState *s);
143 static void free_inline_functions(TCCState *s);
144 static void skip_or_save_block(TokenString **str);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size,int align);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType *st, CType *dt);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC void gsym(int t)
156 if (t) {
157 gsym_addr(t, ind);
158 CODE_ON();
162 /* Clear 'nocode_wanted' if current pc is a label */
163 static int gind()
165 int t = ind;
166 CODE_ON();
167 if (debug_modes)
168 tcc_tcov_block_begin(tcc_state);
169 return t;
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t)
175 gjmp_addr(t);
176 CODE_OFF();
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t)
182 t = gjmp(t);
183 CODE_OFF();
184 return t;
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN int is_float(int t)
194 int bt = t & VT_BTYPE;
195 return bt == VT_LDOUBLE
196 || bt == VT_DOUBLE
197 || bt == VT_FLOAT
198 || bt == VT_QFLOAT;
201 static inline int is_integer_btype(int bt)
203 return bt == VT_BYTE
204 || bt == VT_BOOL
205 || bt == VT_SHORT
206 || bt == VT_INT
207 || bt == VT_LLONG;
210 static int btype_size(int bt)
212 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
213 bt == VT_SHORT ? 2 :
214 bt == VT_INT ? 4 :
215 bt == VT_LLONG ? 8 :
216 bt == VT_PTR ? PTR_SIZE : 0;
219 /* returns function return register from type */
220 static int R_RET(int t)
222 if (!is_float(t))
223 return REG_IRET;
224 #ifdef TCC_TARGET_X86_64
225 if ((t & VT_BTYPE) == VT_LDOUBLE)
226 return TREG_ST0;
227 #elif defined TCC_TARGET_RISCV64
228 if ((t & VT_BTYPE) == VT_LDOUBLE)
229 return REG_IRET;
230 #endif
231 return REG_FRET;
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t)
237 t &= VT_BTYPE;
238 #if PTR_SIZE == 4
239 if (t == VT_LLONG)
240 return REG_IRE2;
241 #elif defined TCC_TARGET_X86_64
242 if (t == VT_QLONG)
243 return REG_IRE2;
244 if (t == VT_QFLOAT)
245 return REG_FRE2;
246 #elif defined TCC_TARGET_RISCV64
247 if (t == VT_LDOUBLE)
248 return REG_IRE2;
249 #endif
250 return VT_CONST;
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue *sv, int t)
259 sv->r = R_RET(t), sv->r2 = R2_RET(t);
262 /* returns function return register class for type t */
263 static int RC_RET(int t)
265 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t)
271 if (!is_float(t))
272 return RC_INT;
273 #ifdef TCC_TARGET_X86_64
274 if ((t & VT_BTYPE) == VT_LDOUBLE)
275 return RC_ST0;
276 if ((t & VT_BTYPE) == VT_QFLOAT)
277 return RC_FRET;
278 #elif defined TCC_TARGET_RISCV64
279 if ((t & VT_BTYPE) == VT_LDOUBLE)
280 return RC_INT;
281 #endif
282 return RC_FLOAT;
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t, int rc)
288 if (!USING_TWO_WORDS(t))
289 return 0;
290 #ifdef RC_IRE2
291 if (rc == RC_IRET)
292 return RC_IRE2;
293 #endif
294 #ifdef RC_FRE2
295 if (rc == RC_FRET)
296 return RC_FRE2;
297 #endif
298 if (rc & RC_FLOAT)
299 return RC_FLOAT;
300 return RC_INT;
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC int ieee_finite(double d)
308 int p[4];
309 memcpy(p, &d, sizeof(double));
310 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
317 #endif
319 ST_FUNC void test_lvalue(void)
321 if (!(vtop->r & VT_LVAL))
322 expect("lvalue");
325 ST_FUNC void check_vstack(void)
327 if (vtop != vstack - 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop - vstack + 1));
332 /* vstack debugging aid */
333 #if 0
334 void pv (const char *lbl, int a, int b)
336 int i;
337 for (i = a; i < a + b; ++i) {
338 SValue *p = &vtop[-i];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
343 #endif
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC void tccgen_init(TCCState *s1)
349 vtop = vstack - 1;
350 memset(vtop, 0, sizeof *vtop);
352 /* define some often used types */
353 int_type.t = VT_INT;
355 char_type.t = VT_BYTE;
356 if (s1->char_is_unsigned)
357 char_type.t |= VT_UNSIGNED;
358 char_pointer_type = char_type;
359 mk_pointer(&char_pointer_type);
361 func_old_type.t = VT_FUNC;
362 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
363 func_old_type.ref->f.func_call = FUNC_CDECL;
364 func_old_type.ref->f.func_type = FUNC_OLD;
365 #ifdef precedence_parser
366 init_prec();
367 #endif
368 cstr_new(&initstr);
371 ST_FUNC int tccgen_compile(TCCState *s1)
373 cur_text_section = NULL;
374 funcname = "";
375 func_ind = -1;
376 anon_sym = SYM_FIRST_ANOM;
377 const_wanted = 0;
378 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
379 local_scope = 0;
380 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
382 tcc_debug_start(s1);
383 tcc_tcov_start (s1);
384 #ifdef TCC_TARGET_ARM
385 arm_init(s1);
386 #endif
387 #ifdef INC_DEBUG
388 printf("%s: **** new file\n", file->filename);
389 #endif
390 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
391 next();
392 decl(VT_CONST);
393 gen_inline_functions(s1);
394 check_vstack();
395 /* end of translation unit info */
396 tcc_debug_end(s1);
397 tcc_tcov_end(s1);
398 return 0;
401 ST_FUNC void tccgen_finish(TCCState *s1)
403 cstr_free(&initstr);
404 free_inline_functions(s1);
405 sym_pop(&global_stack, NULL, 0);
406 sym_pop(&local_stack, NULL, 0);
407 /* free preprocessor macros */
408 free_defines(NULL);
409 /* free sym_pools */
410 dynarray_reset(&sym_pools, &nb_sym_pools);
411 sym_free_first = NULL;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym *elfsym(Sym *s)
417 if (!s || !s->c)
418 return NULL;
419 return &((ElfSym *)symtab_section->data)[s->c];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC void update_storage(Sym *sym)
425 ElfSym *esym;
426 int sym_bind, old_sym_bind;
428 esym = elfsym(sym);
429 if (!esym)
430 return;
432 if (sym->a.visibility)
433 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
434 | sym->a.visibility;
436 if (sym->type.t & (VT_STATIC | VT_INLINE))
437 sym_bind = STB_LOCAL;
438 else if (sym->a.weak)
439 sym_bind = STB_WEAK;
440 else
441 sym_bind = STB_GLOBAL;
442 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
443 if (sym_bind != old_sym_bind) {
444 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
447 #ifdef TCC_TARGET_PE
448 if (sym->a.dllimport)
449 esym->st_other |= ST_PE_IMPORT;
450 if (sym->a.dllexport)
451 esym->st_other |= ST_PE_EXPORT;
452 #endif
454 #if 0
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym->v, NULL),
457 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
458 sym->a.visibility,
459 sym->a.dllexport,
460 sym->a.dllimport
462 #endif
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
470 addr_t value, unsigned long size,
471 int can_add_underscore)
473 int sym_type, sym_bind, info, other, t;
474 ElfSym *esym;
475 const char *name;
476 char buf1[256];
478 if (!sym->c) {
479 name = get_tok_str(sym->v, NULL);
480 t = sym->type.t;
481 if ((t & VT_BTYPE) == VT_FUNC) {
482 sym_type = STT_FUNC;
483 } else if ((t & VT_BTYPE) == VT_VOID) {
484 sym_type = STT_NOTYPE;
485 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
486 sym_type = STT_FUNC;
487 } else {
488 sym_type = STT_OBJECT;
490 if (t & (VT_STATIC | VT_INLINE))
491 sym_bind = STB_LOCAL;
492 else
493 sym_bind = STB_GLOBAL;
494 other = 0;
496 #ifdef TCC_TARGET_PE
497 if (sym_type == STT_FUNC && sym->type.ref) {
498 Sym *ref = sym->type.ref;
499 if (ref->a.nodecorate) {
500 can_add_underscore = 0;
502 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
503 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
504 name = buf1;
505 other |= ST_PE_STDCALL;
506 can_add_underscore = 0;
509 #endif
511 if (sym->asm_label) {
512 name = get_tok_str(sym->asm_label, NULL);
513 can_add_underscore = 0;
516 if (tcc_state->leading_underscore && can_add_underscore) {
517 buf1[0] = '_';
518 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
519 name = buf1;
522 info = ELFW(ST_INFO)(sym_bind, sym_type);
523 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
525 if (debug_modes)
526 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
528 } else {
529 esym = elfsym(sym);
530 esym->st_value = value;
531 esym->st_size = size;
532 esym->st_shndx = sh_num;
534 update_storage(sym);
537 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
539 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
540 return;
541 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
546 addr_t addend)
548 int c = 0;
550 if (nocode_wanted && s == cur_text_section)
551 return;
553 if (sym) {
554 if (0 == sym->c)
555 put_extern_sym(sym, NULL, 0, 0);
556 c = sym->c;
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section, s, offset, type, c, addend);
563 #if PTR_SIZE == 4
564 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
566 greloca(s, sym, offset, type, 0);
568 #endif
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym *__sym_malloc(void)
574 Sym *sym_pool, *sym, *last_sym;
575 int i;
577 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
578 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
580 last_sym = sym_free_first;
581 sym = sym_pool;
582 for(i = 0; i < SYM_POOL_NB; i++) {
583 sym->next = last_sym;
584 last_sym = sym;
585 sym++;
587 sym_free_first = last_sym;
588 return last_sym;
591 static inline Sym *sym_malloc(void)
593 Sym *sym;
594 #ifndef SYM_DEBUG
595 sym = sym_free_first;
596 if (!sym)
597 sym = __sym_malloc();
598 sym_free_first = sym->next;
599 return sym;
600 #else
601 sym = tcc_malloc(sizeof(Sym));
602 return sym;
603 #endif
606 ST_INLN void sym_free(Sym *sym)
608 #ifndef SYM_DEBUG
609 sym->next = sym_free_first;
610 sym_free_first = sym;
611 #else
612 tcc_free(sym);
613 #endif
616 /* push, without hashing */
617 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
619 Sym *s;
621 s = sym_malloc();
622 memset(s, 0, sizeof *s);
623 s->v = v;
624 s->type.t = t;
625 s->c = c;
626 /* add in stack */
627 s->prev = *ps;
628 *ps = s;
629 return s;
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym *sym_find2(Sym *s, int v)
636 while (s) {
637 if (s->v == v)
638 return s;
639 else if (s->v == -1)
640 return NULL;
641 s = s->prev;
643 return NULL;
646 /* structure lookup */
647 ST_INLN Sym *struct_find(int v)
649 v -= TOK_IDENT;
650 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
651 return NULL;
652 return table_ident[v]->sym_struct;
655 /* find an identifier */
656 ST_INLN Sym *sym_find(int v)
658 v -= TOK_IDENT;
659 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
660 return NULL;
661 return table_ident[v]->sym_identifier;
664 static int sym_scope(Sym *s)
666 if (IS_ENUM_VAL (s->type.t))
667 return s->type.ref->sym_scope;
668 else
669 return s->sym_scope;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
675 Sym *s, **ps;
676 TokenSym *ts;
678 if (local_stack)
679 ps = &local_stack;
680 else
681 ps = &global_stack;
682 s = sym_push2(ps, v, type->t, c);
683 s->type.ref = type->ref;
684 s->r = r;
685 /* don't record fields or anonymous symbols */
686 /* XXX: simplify */
687 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
688 /* record symbol in token array */
689 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
690 if (v & SYM_STRUCT)
691 ps = &ts->sym_struct;
692 else
693 ps = &ts->sym_identifier;
694 s->prev_tok = *ps;
695 *ps = s;
696 s->sym_scope = local_scope;
697 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v & ~SYM_STRUCT, NULL));
701 return s;
704 /* push a global identifier */
705 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
707 Sym *s, **ps;
708 s = sym_push2(&global_stack, v, t, c);
709 s->r = VT_CONST | VT_SYM;
710 /* don't record anonymous symbol */
711 if (v < SYM_FIRST_ANOM) {
712 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps != NULL && (*ps)->sym_scope)
716 ps = &(*ps)->prev_tok;
717 s->prev_tok = *ps;
718 *ps = s;
720 return s;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
727 Sym *s, *ss, **ps;
728 TokenSym *ts;
729 int v;
731 s = *ptop;
732 while(s != b) {
733 ss = s->prev;
734 v = s->v;
735 /* remove symbol in token array */
736 /* XXX: simplify */
737 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
738 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
739 if (v & SYM_STRUCT)
740 ps = &ts->sym_struct;
741 else
742 ps = &ts->sym_identifier;
743 *ps = s->prev_tok;
745 if (!keep)
746 sym_free(s);
747 s = ss;
749 if (!keep)
750 *ptop = b;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop->r == VT_CMP && !nocode_wanted)
769 gv(RC_INT);
772 static void vsetc(CType *type, int r, CValue *vc)
774 if (vtop >= vstack + (VSTACK_SIZE - 1))
775 tcc_error("memory full (vstack)");
776 vcheck_cmp();
777 vtop++;
778 vtop->type = *type;
779 vtop->r = r;
780 vtop->r2 = VT_CONST;
781 vtop->c = *vc;
782 vtop->sym = NULL;
785 ST_FUNC void vswap(void)
787 SValue tmp;
789 vcheck_cmp();
790 tmp = vtop[0];
791 vtop[0] = vtop[-1];
792 vtop[-1] = tmp;
795 /* pop stack value */
796 ST_FUNC void vpop(void)
798 int v;
799 v = vtop->r & VT_VALMASK;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
802 if (v == TREG_ST0) {
803 o(0xd8dd); /* fstp %st(0) */
804 } else
805 #endif
806 if (v == VT_CMP) {
807 /* need to put correct jump if && or || without test */
808 gsym(vtop->jtrue);
809 gsym(vtop->jfalse);
811 vtop--;
814 /* push constant of type "type" with useless value */
815 static void vpush(CType *type)
817 vset(type, VT_CONST, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty, unsigned long long v)
823 CValue cval;
824 CType ctype;
825 ctype.t = ty;
826 ctype.ref = NULL;
827 cval.i = v;
828 vsetc(&ctype, VT_CONST, &cval);
831 /* push integer constant */
832 ST_FUNC void vpushi(int v)
834 vpush64(VT_INT, v);
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v)
840 vpush64(VT_SIZE_T, v);
843 /* push long long constant */
844 static inline void vpushll(long long v)
846 vpush64(VT_LLONG, v);
849 ST_FUNC void vset(CType *type, int r, int v)
851 CValue cval;
852 cval.i = v;
853 vsetc(type, r, &cval);
856 static void vseti(int r, int v)
858 CType type;
859 type.t = VT_INT;
860 type.ref = NULL;
861 vset(&type, r, v);
864 ST_FUNC void vpushv(SValue *v)
866 if (vtop >= vstack + (VSTACK_SIZE - 1))
867 tcc_error("memory full (vstack)");
868 vtop++;
869 *vtop = *v;
872 static void vdup(void)
874 vpushv(vtop);
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC void vrotb(int n)
882 int i;
883 SValue tmp;
885 vcheck_cmp();
886 tmp = vtop[-n + 1];
887 for(i=-n+1;i!=0;i++)
888 vtop[i] = vtop[i+1];
889 vtop[0] = tmp;
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC void vrote(SValue *e, int n)
897 int i;
898 SValue tmp;
900 vcheck_cmp();
901 tmp = *e;
902 for(i = 0;i < n - 1; i++)
903 e[-i] = e[-i - 1];
904 e[-n + 1] = tmp;
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC void vrott(int n)
912 vrote(vtop, n);
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC void vset_VT_CMP(int op)
921 vtop->r = VT_CMP;
922 vtop->cmp_op = op;
923 vtop->jfalse = 0;
924 vtop->jtrue = 0;
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op = vtop->cmp_op;
932 if (vtop->jtrue || vtop->jfalse) {
933 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
934 int inv = op & (op < 2); /* small optimization */
935 vseti(VT_JMP+inv, gvtst(inv, 0));
936 } else {
937 /* otherwise convert flags (rsp. 0/1) to register */
938 vtop->c.i = op;
939 if (op < 2) /* doesn't seem to happen */
940 vtop->r = VT_CONST;
944 /* Set CPU Flags, doesn't yet jump */
945 static void gvtst_set(int inv, int t)
947 int *p;
949 if (vtop->r != VT_CMP) {
950 vpushi(0);
951 gen_op(TOK_NE);
952 if (vtop->r != VT_CMP) /* must be VT_CONST then */
953 vset_VT_CMP(vtop->c.i != 0);
956 p = inv ? &vtop->jfalse : &vtop->jtrue;
957 *p = gjmp_append(*p, t);
960 /* Generate value test
962 * Generate a test for any value (jump, comparison and integers) */
963 static int gvtst(int inv, int t)
965 int op, x, u;
967 gvtst_set(inv, t);
968 t = vtop->jtrue, u = vtop->jfalse;
969 if (inv)
970 x = u, u = t, t = x;
971 op = vtop->cmp_op;
973 /* jump to the wanted target */
974 if (op > 1)
975 t = gjmp_cond(op ^ inv, t);
976 else if (op != inv)
977 t = gjmp(t);
978 /* resolve complementary jumps to here */
979 gsym(u);
981 vtop--;
982 return t;
985 /* generate a zero or nozero test */
986 static void gen_test_zero(int op)
988 if (vtop->r == VT_CMP) {
989 int j;
990 if (op == TOK_EQ) {
991 j = vtop->jfalse;
992 vtop->jfalse = vtop->jtrue;
993 vtop->jtrue = j;
994 vtop->cmp_op ^= 1;
996 } else {
997 vpushi(0);
998 gen_op(op);
1002 /* ------------------------------------------------------------------------- */
1003 /* push a symbol value of TYPE */
1004 ST_FUNC void vpushsym(CType *type, Sym *sym)
1006 CValue cval;
1007 cval.i = 0;
1008 vsetc(type, VT_CONST | VT_SYM, &cval);
1009 vtop->sym = sym;
1012 /* Return a static symbol pointing to a section */
1013 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1015 int v;
1016 Sym *sym;
1018 v = anon_sym++;
1019 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1020 sym->type.t |= VT_STATIC;
1021 put_extern_sym(sym, sec, offset, size);
1022 return sym;
1025 /* push a reference to a section offset by adding a dummy symbol */
1026 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1028 vpushsym(type, get_sym_ref(type, sec, offset, size));
1031 /* define a new external reference to a symbol 'v' of type 'u' */
1032 ST_FUNC Sym *external_global_sym(int v, CType *type)
1034 Sym *s;
1036 s = sym_find(v);
1037 if (!s) {
1038 /* push forward reference */
1039 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1040 s->type.ref = type->ref;
1041 } else if (IS_ASM_SYM(s)) {
1042 s->type.t = type->t | (s->type.t & VT_EXTERN);
1043 s->type.ref = type->ref;
1044 update_storage(s);
1046 return s;
1049 /* create an external reference with no specific type similar to asm labels.
1050 This avoids type conflicts if the symbol is used from C too */
1051 ST_FUNC Sym *external_helper_sym(int v)
1053 CType ct = { VT_ASM_FUNC, NULL };
1054 return external_global_sym(v, &ct);
1057 /* push a reference to an helper function (such as memmove) */
1058 ST_FUNC void vpush_helper_func(int v)
1060 vpushsym(&func_old_type, external_helper_sym(v));
1063 /* Merge symbol attributes. */
1064 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1066 if (sa1->aligned && !sa->aligned)
1067 sa->aligned = sa1->aligned;
1068 sa->packed |= sa1->packed;
1069 sa->weak |= sa1->weak;
1070 if (sa1->visibility != STV_DEFAULT) {
1071 int vis = sa->visibility;
1072 if (vis == STV_DEFAULT
1073 || vis > sa1->visibility)
1074 vis = sa1->visibility;
1075 sa->visibility = vis;
1077 sa->dllexport |= sa1->dllexport;
1078 sa->nodecorate |= sa1->nodecorate;
1079 sa->dllimport |= sa1->dllimport;
1082 /* Merge function attributes. */
1083 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1085 if (fa1->func_call && !fa->func_call)
1086 fa->func_call = fa1->func_call;
1087 if (fa1->func_type && !fa->func_type)
1088 fa->func_type = fa1->func_type;
1089 if (fa1->func_args && !fa->func_args)
1090 fa->func_args = fa1->func_args;
1091 if (fa1->func_noreturn)
1092 fa->func_noreturn = 1;
1093 if (fa1->func_ctor)
1094 fa->func_ctor = 1;
1095 if (fa1->func_dtor)
1096 fa->func_dtor = 1;
1099 /* Merge attributes. */
1100 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1102 merge_symattr(&ad->a, &ad1->a);
1103 merge_funcattr(&ad->f, &ad1->f);
1105 if (ad1->section)
1106 ad->section = ad1->section;
1107 if (ad1->alias_target)
1108 ad->alias_target = ad1->alias_target;
1109 if (ad1->asm_label)
1110 ad->asm_label = ad1->asm_label;
1111 if (ad1->attr_mode)
1112 ad->attr_mode = ad1->attr_mode;
1115 /* Merge some type attributes. */
1116 static void patch_type(Sym *sym, CType *type)
1118 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1119 if (!(sym->type.t & VT_EXTERN))
1120 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1121 sym->type.t &= ~VT_EXTERN;
1124 if (IS_ASM_SYM(sym)) {
1125 /* stay static if both are static */
1126 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1127 sym->type.ref = type->ref;
1130 if (!is_compatible_types(&sym->type, type)) {
1131 tcc_error("incompatible types for redefinition of '%s'",
1132 get_tok_str(sym->v, NULL));
1134 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1135 int static_proto = sym->type.t & VT_STATIC;
1136 /* warn if static follows non-static function declaration */
1137 if ((type->t & VT_STATIC) && !static_proto
1138 /* XXX this test for inline shouldn't be here. Until we
1139 implement gnu-inline mode again it silences a warning for
1140 mingw caused by our workarounds. */
1141 && !((type->t | sym->type.t) & VT_INLINE))
1142 tcc_warning("static storage ignored for redefinition of '%s'",
1143 get_tok_str(sym->v, NULL));
1145 /* set 'inline' if both agree or if one has static */
1146 if ((type->t | sym->type.t) & VT_INLINE) {
1147 if (!((type->t ^ sym->type.t) & VT_INLINE)
1148 || ((type->t | sym->type.t) & VT_STATIC))
1149 static_proto |= VT_INLINE;
1152 if (0 == (type->t & VT_EXTERN)) {
1153 struct FuncAttr f = sym->type.ref->f;
1154 /* put complete type, use static from prototype */
1155 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1156 sym->type.ref = type->ref;
1157 merge_funcattr(&sym->type.ref->f, &f);
1158 } else {
1159 sym->type.t &= ~VT_INLINE | static_proto;
1162 if (sym->type.ref->f.func_type == FUNC_OLD
1163 && type->ref->f.func_type != FUNC_OLD) {
1164 sym->type.ref = type->ref;
1167 } else {
1168 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1169 /* set array size if it was omitted in extern declaration */
1170 sym->type.ref->c = type->ref->c;
1172 if ((type->t ^ sym->type.t) & VT_STATIC)
1173 tcc_warning("storage mismatch for redefinition of '%s'",
1174 get_tok_str(sym->v, NULL));
1178 /* Merge some storage attributes. */
1179 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1181 if (type)
1182 patch_type(sym, type);
1184 #ifdef TCC_TARGET_PE
1185 if (sym->a.dllimport != ad->a.dllimport)
1186 tcc_error("incompatible dll linkage for redefinition of '%s'",
1187 get_tok_str(sym->v, NULL));
1188 #endif
1189 merge_symattr(&sym->a, &ad->a);
1190 if (ad->asm_label)
1191 sym->asm_label = ad->asm_label;
1192 update_storage(sym);
1195 /* copy sym to other stack */
1196 static Sym *sym_copy(Sym *s0, Sym **ps)
1198 Sym *s;
1199 s = sym_malloc(), *s = *s0;
1200 s->prev = *ps, *ps = s;
1201 if (s->v < SYM_FIRST_ANOM) {
1202 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1203 s->prev_tok = *ps, *ps = s;
1205 return s;
1208 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1209 static void sym_copy_ref(Sym *s, Sym **ps)
1211 int bt = s->type.t & VT_BTYPE;
1212 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1213 Sym **sp = &s->type.ref;
1214 for (s = *sp, *sp = NULL; s; s = s->next) {
1215 Sym *s2 = sym_copy(s, ps);
1216 sp = &(*sp = s2)->next;
1217 sym_copy_ref(s2, ps);
1222 /* define a new external reference to a symbol 'v' */
1223 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1225 Sym *s;
1227 /* look for global symbol */
1228 s = sym_find(v);
1229 while (s && s->sym_scope)
1230 s = s->prev_tok;
1232 if (!s) {
1233 /* push forward reference */
1234 s = global_identifier_push(v, type->t, 0);
1235 s->r |= r;
1236 s->a = ad->a;
1237 s->asm_label = ad->asm_label;
1238 s->type.ref = type->ref;
1239 /* copy type to the global stack */
1240 if (local_stack)
1241 sym_copy_ref(s, &global_stack);
1242 } else {
1243 patch_storage(s, ad, type);
1245 /* push variables on local_stack if any */
1246 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1247 s = sym_copy(s, &local_stack);
1248 return s;
1251 /* save registers up to (vtop - n) stack entry */
1252 ST_FUNC void save_regs(int n)
1254 SValue *p, *p1;
1255 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1256 save_reg(p->r);
1259 /* save r to the memory stack, and mark it as being free */
1260 ST_FUNC void save_reg(int r)
1262 save_reg_upstack(r, 0);
1265 /* save r to the memory stack, and mark it as being free,
1266 if seen up to (vtop - n) stack entry */
1267 ST_FUNC void save_reg_upstack(int r, int n)
1269 int l, size, align, bt;
1270 SValue *p, *p1, sv;
1272 if ((r &= VT_VALMASK) >= VT_CONST)
1273 return;
1274 if (nocode_wanted)
1275 return;
1276 l = 0;
1277 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1278 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1279 /* must save value on stack if not already done */
1280 if (!l) {
1281 bt = p->type.t & VT_BTYPE;
1282 if (bt == VT_VOID)
1283 continue;
1284 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1285 bt = VT_PTR;
1286 sv.type.t = bt;
1287 size = type_size(&sv.type, &align);
1288 l = get_temp_local_var(size,align);
1289 sv.r = VT_LOCAL | VT_LVAL;
1290 sv.c.i = l;
1291 store(p->r & VT_VALMASK, &sv);
1292 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1293 /* x86 specific: need to pop fp register ST0 if saved */
1294 if (r == TREG_ST0) {
1295 o(0xd8dd); /* fstp %st(0) */
1297 #endif
1298 /* special long long case */
1299 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1300 sv.c.i += PTR_SIZE;
1301 store(p->r2, &sv);
1304 /* mark that stack entry as being saved on the stack */
1305 if (p->r & VT_LVAL) {
1306 /* also clear the bounded flag because the
1307 relocation address of the function was stored in
1308 p->c.i */
1309 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1310 } else {
1311 p->r = VT_LVAL | VT_LOCAL;
1313 p->sym = NULL;
1314 p->r2 = VT_CONST;
1315 p->c.i = l;
1320 #ifdef TCC_TARGET_ARM
1321 /* find a register of class 'rc2' with at most one reference on stack.
1322 * If none, call get_reg(rc) */
1323 ST_FUNC int get_reg_ex(int rc, int rc2)
1325 int r;
1326 SValue *p;
1328 for(r=0;r<NB_REGS;r++) {
1329 if (reg_classes[r] & rc2) {
1330 int n;
1331 n=0;
1332 for(p = vstack; p <= vtop; p++) {
1333 if ((p->r & VT_VALMASK) == r ||
1334 p->r2 == r)
1335 n++;
1337 if (n <= 1)
1338 return r;
1341 return get_reg(rc);
1343 #endif
1345 /* find a free register of class 'rc'. If none, save one register */
1346 ST_FUNC int get_reg(int rc)
1348 int r;
1349 SValue *p;
1351 /* find a free register */
1352 for(r=0;r<NB_REGS;r++) {
1353 if (reg_classes[r] & rc) {
1354 if (nocode_wanted)
1355 return r;
1356 for(p=vstack;p<=vtop;p++) {
1357 if ((p->r & VT_VALMASK) == r ||
1358 p->r2 == r)
1359 goto notfound;
1361 return r;
1363 notfound: ;
1366 /* no register left : free the first one on the stack (VERY
1367 IMPORTANT to start from the bottom to ensure that we don't
1368 spill registers used in gen_opi()) */
1369 for(p=vstack;p<=vtop;p++) {
1370 /* look at second register (if long long) */
1371 r = p->r2;
1372 if (r < VT_CONST && (reg_classes[r] & rc))
1373 goto save_found;
1374 r = p->r & VT_VALMASK;
1375 if (r < VT_CONST && (reg_classes[r] & rc)) {
1376 save_found:
1377 save_reg(r);
1378 return r;
1381 /* Should never comes here */
1382 return -1;
1385 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1386 static int get_temp_local_var(int size,int align){
1387 int i;
1388 struct temp_local_variable *temp_var;
1389 int found_var;
1390 SValue *p;
1391 int r;
1392 char free;
1393 char found;
1394 found=0;
1395 for(i=0;i<nb_temp_local_vars;i++){
1396 temp_var=&arr_temp_local_vars[i];
1397 if(temp_var->size<size||align!=temp_var->align){
1398 continue;
1400 /*check if temp_var is free*/
1401 free=1;
1402 for(p=vstack;p<=vtop;p++) {
1403 r=p->r&VT_VALMASK;
1404 if(r==VT_LOCAL||r==VT_LLOCAL){
1405 if(p->c.i==temp_var->location){
1406 free=0;
1407 break;
1411 if(free){
1412 found_var=temp_var->location;
1413 found=1;
1414 break;
1417 if(!found){
1418 loc = (loc - size) & -align;
1419 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1420 temp_var=&arr_temp_local_vars[i];
1421 temp_var->location=loc;
1422 temp_var->size=size;
1423 temp_var->align=align;
1424 nb_temp_local_vars++;
1426 found_var=loc;
1428 return found_var;
1431 static void clear_temp_local_var_list(){
1432 nb_temp_local_vars=0;
1435 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1436 if needed */
1437 static void move_reg(int r, int s, int t)
1439 SValue sv;
1441 if (r != s) {
1442 save_reg(r);
1443 sv.type.t = t;
1444 sv.type.ref = NULL;
1445 sv.r = s;
1446 sv.c.i = 0;
1447 load(r, &sv);
1451 /* get address of vtop (vtop MUST BE an lvalue) */
1452 ST_FUNC void gaddrof(void)
1454 vtop->r &= ~VT_LVAL;
1455 /* tricky: if saved lvalue, then we can go back to lvalue */
1456 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1457 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1460 #ifdef CONFIG_TCC_BCHECK
1461 /* generate a bounded pointer addition */
1462 static void gen_bounded_ptr_add(void)
1464 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1465 if (save) {
1466 vpushv(&vtop[-1]);
1467 vrott(3);
1469 vpush_helper_func(TOK___bound_ptr_add);
1470 vrott(3);
1471 gfunc_call(2);
1472 vtop -= save;
1473 vpushi(0);
1474 /* returned pointer is in REG_IRET */
1475 vtop->r = REG_IRET | VT_BOUNDED;
1476 if (nocode_wanted)
1477 return;
1478 /* relocation offset of the bounding function call point */
1479 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1482 /* patch pointer addition in vtop so that pointer dereferencing is
1483 also tested */
1484 static void gen_bounded_ptr_deref(void)
1486 addr_t func;
1487 int size, align;
1488 ElfW_Rel *rel;
1489 Sym *sym;
1491 if (nocode_wanted)
1492 return;
1494 size = type_size(&vtop->type, &align);
1495 switch(size) {
1496 case 1: func = TOK___bound_ptr_indir1; break;
1497 case 2: func = TOK___bound_ptr_indir2; break;
1498 case 4: func = TOK___bound_ptr_indir4; break;
1499 case 8: func = TOK___bound_ptr_indir8; break;
1500 case 12: func = TOK___bound_ptr_indir12; break;
1501 case 16: func = TOK___bound_ptr_indir16; break;
1502 default:
1503 /* may happen with struct member access */
1504 return;
1506 sym = external_helper_sym(func);
1507 if (!sym->c)
1508 put_extern_sym(sym, NULL, 0, 0);
1509 /* patch relocation */
1510 /* XXX: find a better solution ? */
1511 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1512 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1515 /* generate lvalue bound code */
1516 static void gbound(void)
1518 CType type1;
1520 vtop->r &= ~VT_MUSTBOUND;
1521 /* if lvalue, then use checking code before dereferencing */
1522 if (vtop->r & VT_LVAL) {
1523 /* if not VT_BOUNDED value, then make one */
1524 if (!(vtop->r & VT_BOUNDED)) {
1525 /* must save type because we must set it to int to get pointer */
1526 type1 = vtop->type;
1527 vtop->type.t = VT_PTR;
1528 gaddrof();
1529 vpushi(0);
1530 gen_bounded_ptr_add();
1531 vtop->r |= VT_LVAL;
1532 vtop->type = type1;
1534 /* then check for dereferencing */
1535 gen_bounded_ptr_deref();
1539 /* we need to call __bound_ptr_add before we start to load function
1540 args into registers */
1541 ST_FUNC void gbound_args(int nb_args)
1543 int i, v;
1544 SValue *sv;
1546 for (i = 1; i <= nb_args; ++i)
1547 if (vtop[1 - i].r & VT_MUSTBOUND) {
1548 vrotb(i);
1549 gbound();
1550 vrott(i);
1553 sv = vtop - nb_args;
1554 if (sv->r & VT_SYM) {
1555 v = sv->sym->v;
1556 if (v == TOK_setjmp
1557 || v == TOK__setjmp
1558 #ifndef TCC_TARGET_PE
1559 || v == TOK_sigsetjmp
1560 || v == TOK___sigsetjmp
1561 #endif
1563 vpush_helper_func(TOK___bound_setjmp);
1564 vpushv(sv + 1);
1565 gfunc_call(1);
1566 func_bound_add_epilog = 1;
1568 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1569 if (v == TOK_alloca)
1570 func_bound_add_epilog = 1;
1571 #endif
1572 #if TARGETOS_NetBSD
1573 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1574 sv->sym->asm_label = TOK___bound_longjmp;
1575 #endif
1579 /* Add bounds for local symbols from S to E (via ->prev) */
1580 static void add_local_bounds(Sym *s, Sym *e)
1582 for (; s != e; s = s->prev) {
1583 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1584 continue;
1585 /* Add arrays/structs/unions because we always take address */
1586 if ((s->type.t & VT_ARRAY)
1587 || (s->type.t & VT_BTYPE) == VT_STRUCT
1588 || s->a.addrtaken) {
1589 /* add local bound info */
1590 int align, size = type_size(&s->type, &align);
1591 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1592 2 * sizeof(addr_t));
1593 bounds_ptr[0] = s->c;
1594 bounds_ptr[1] = size;
1598 #endif
1600 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1601 static void pop_local_syms(Sym *b, int keep)
1603 #ifdef CONFIG_TCC_BCHECK
1604 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1605 add_local_bounds(local_stack, b);
1606 #endif
1607 if (debug_modes)
1608 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1609 sym_pop(&local_stack, b, keep);
1612 static void incr_bf_adr(int o)
1614 vtop->type = char_pointer_type;
1615 gaddrof();
1616 vpushs(o);
1617 gen_op('+');
1618 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1619 vtop->r |= VT_LVAL;
1622 /* single-byte load mode for packed or otherwise unaligned bitfields */
1623 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1625 int n, o, bits;
1626 save_reg_upstack(vtop->r, 1);
1627 vpush64(type->t & VT_BTYPE, 0); // B X
1628 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1629 do {
1630 vswap(); // X B
1631 incr_bf_adr(o);
1632 vdup(); // X B B
1633 n = 8 - bit_pos;
1634 if (n > bit_size)
1635 n = bit_size;
1636 if (bit_pos)
1637 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1638 if (n < 8)
1639 vpushi((1 << n) - 1), gen_op('&');
1640 gen_cast(type);
1641 if (bits)
1642 vpushi(bits), gen_op(TOK_SHL);
1643 vrotb(3); // B Y X
1644 gen_op('|'); // B X
1645 bits += n, bit_size -= n, o = 1;
1646 } while (bit_size);
1647 vswap(), vpop();
1648 if (!(type->t & VT_UNSIGNED)) {
1649 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1650 vpushi(n), gen_op(TOK_SHL);
1651 vpushi(n), gen_op(TOK_SAR);
1655 /* single-byte store mode for packed or otherwise unaligned bitfields */
1656 static void store_packed_bf(int bit_pos, int bit_size)
1658 int bits, n, o, m, c;
1659 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 vswap(); // X B
1661 save_reg_upstack(vtop->r, 1);
1662 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1663 do {
1664 incr_bf_adr(o); // X B
1665 vswap(); //B X
1666 c ? vdup() : gv_dup(); // B V X
1667 vrott(3); // X B V
1668 if (bits)
1669 vpushi(bits), gen_op(TOK_SHR);
1670 if (bit_pos)
1671 vpushi(bit_pos), gen_op(TOK_SHL);
1672 n = 8 - bit_pos;
1673 if (n > bit_size)
1674 n = bit_size;
1675 if (n < 8) {
1676 m = ((1 << n) - 1) << bit_pos;
1677 vpushi(m), gen_op('&'); // X B V1
1678 vpushv(vtop-1); // X B V1 B
1679 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1680 gen_op('&'); // X B V1 B1
1681 gen_op('|'); // X B V2
1683 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1684 vstore(), vpop(); // X B
1685 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1686 } while (bit_size);
1687 vpop(), vpop();
1690 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1692 int t;
1693 if (0 == sv->type.ref)
1694 return 0;
1695 t = sv->type.ref->auxtype;
1696 if (t != -1 && t != VT_STRUCT) {
1697 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1698 sv->r |= VT_LVAL;
1700 return t;
1703 /* store vtop a register belonging to class 'rc'. lvalues are
1704 converted to values. Cannot be used if cannot be converted to
1705 register value (such as structures). */
1706 ST_FUNC int gv(int rc)
1708 int r, r2, r_ok, r2_ok, rc2, bt;
1709 int bit_pos, bit_size, size, align;
1711 /* NOTE: get_reg can modify vstack[] */
1712 if (vtop->type.t & VT_BITFIELD) {
1713 CType type;
1715 bit_pos = BIT_POS(vtop->type.t);
1716 bit_size = BIT_SIZE(vtop->type.t);
1717 /* remove bit field info to avoid loops */
1718 vtop->type.t &= ~VT_STRUCT_MASK;
1720 type.ref = NULL;
1721 type.t = vtop->type.t & VT_UNSIGNED;
1722 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1723 type.t |= VT_UNSIGNED;
1725 r = adjust_bf(vtop, bit_pos, bit_size);
1727 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1728 type.t |= VT_LLONG;
1729 else
1730 type.t |= VT_INT;
1732 if (r == VT_STRUCT) {
1733 load_packed_bf(&type, bit_pos, bit_size);
1734 } else {
1735 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1736 /* cast to int to propagate signedness in following ops */
1737 gen_cast(&type);
1738 /* generate shifts */
1739 vpushi(bits - (bit_pos + bit_size));
1740 gen_op(TOK_SHL);
1741 vpushi(bits - bit_size);
1742 /* NOTE: transformed to SHR if unsigned */
1743 gen_op(TOK_SAR);
1745 r = gv(rc);
1746 } else {
1747 if (is_float(vtop->type.t) &&
1748 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1749 /* CPUs usually cannot use float constants, so we store them
1750 generically in data segment */
1751 init_params p = { rodata_section };
1752 unsigned long offset;
1753 size = type_size(&vtop->type, &align);
1754 if (NODATA_WANTED)
1755 size = 0, align = 1;
1756 offset = section_add(p.sec, size, align);
1757 vpush_ref(&vtop->type, p.sec, offset, size);
1758 vswap();
1759 init_putv(&p, &vtop->type, offset);
1760 vtop->r |= VT_LVAL;
1762 #ifdef CONFIG_TCC_BCHECK
1763 if (vtop->r & VT_MUSTBOUND)
1764 gbound();
1765 #endif
1767 bt = vtop->type.t & VT_BTYPE;
1769 #ifdef TCC_TARGET_RISCV64
1770 /* XXX mega hack */
1771 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1772 rc = RC_INT;
1773 #endif
1774 rc2 = RC2_TYPE(bt, rc);
1776 /* need to reload if:
1777 - constant
1778 - lvalue (need to dereference pointer)
1779 - already a register, but not in the right class */
1780 r = vtop->r & VT_VALMASK;
1781 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1782 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1784 if (!r_ok || !r2_ok) {
1785 if (!r_ok)
1786 r = get_reg(rc);
1787 if (rc2) {
1788 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1789 int original_type = vtop->type.t;
1791 /* two register type load :
1792 expand to two words temporarily */
1793 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1794 /* load constant */
1795 unsigned long long ll = vtop->c.i;
1796 vtop->c.i = ll; /* first word */
1797 load(r, vtop);
1798 vtop->r = r; /* save register value */
1799 vpushi(ll >> 32); /* second word */
1800 } else if (vtop->r & VT_LVAL) {
1801 /* We do not want to modifier the long long pointer here.
1802 So we save any other instances down the stack */
1803 save_reg_upstack(vtop->r, 1);
1804 /* load from memory */
1805 vtop->type.t = load_type;
1806 load(r, vtop);
1807 vdup();
1808 vtop[-1].r = r; /* save register value */
1809 /* increment pointer to get second word */
1810 vtop->type.t = VT_PTRDIFF_T;
1811 gaddrof();
1812 vpushs(PTR_SIZE);
1813 gen_op('+');
1814 vtop->r |= VT_LVAL;
1815 vtop->type.t = load_type;
1816 } else {
1817 /* move registers */
1818 if (!r_ok)
1819 load(r, vtop);
1820 if (r2_ok && vtop->r2 < VT_CONST)
1821 goto done;
1822 vdup();
1823 vtop[-1].r = r; /* save register value */
1824 vtop->r = vtop[-1].r2;
1826 /* Allocate second register. Here we rely on the fact that
1827 get_reg() tries first to free r2 of an SValue. */
1828 r2 = get_reg(rc2);
1829 load(r2, vtop);
1830 vpop();
1831 /* write second register */
1832 vtop->r2 = r2;
1833 done:
1834 vtop->type.t = original_type;
1835 } else {
1836 if (vtop->r == VT_CMP)
1837 vset_VT_JMP();
1838 /* one register type load */
1839 load(r, vtop);
1842 vtop->r = r;
1843 #ifdef TCC_TARGET_C67
1844 /* uses register pairs for doubles */
1845 if (bt == VT_DOUBLE)
1846 vtop->r2 = r+1;
1847 #endif
1849 return r;
1852 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1853 ST_FUNC void gv2(int rc1, int rc2)
1855 /* generate more generic register first. But VT_JMP or VT_CMP
1856 values must be generated first in all cases to avoid possible
1857 reload errors */
1858 if (vtop->r != VT_CMP && rc1 <= rc2) {
1859 vswap();
1860 gv(rc1);
1861 vswap();
1862 gv(rc2);
1863 /* test if reload is needed for first register */
1864 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1865 vswap();
1866 gv(rc1);
1867 vswap();
1869 } else {
1870 gv(rc2);
1871 vswap();
1872 gv(rc1);
1873 vswap();
1874 /* test if reload is needed for first register */
1875 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1876 gv(rc2);
1881 #if PTR_SIZE == 4
1882 /* expand 64bit on stack in two ints */
1883 ST_FUNC void lexpand(void)
1885 int u, v;
1886 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1887 v = vtop->r & (VT_VALMASK | VT_LVAL);
1888 if (v == VT_CONST) {
1889 vdup();
1890 vtop[0].c.i >>= 32;
1891 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1892 vdup();
1893 vtop[0].c.i += 4;
1894 } else {
1895 gv(RC_INT);
1896 vdup();
1897 vtop[0].r = vtop[-1].r2;
1898 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1900 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1902 #endif
1904 #if PTR_SIZE == 4
1905 /* build a long long from two ints */
1906 static void lbuild(int t)
1908 gv2(RC_INT, RC_INT);
1909 vtop[-1].r2 = vtop[0].r;
1910 vtop[-1].type.t = t;
1911 vpop();
1913 #endif
1915 /* convert stack entry to register and duplicate its value in another
1916 register */
1917 static void gv_dup(void)
1919 int t, rc, r;
1921 t = vtop->type.t;
1922 #if PTR_SIZE == 4
1923 if ((t & VT_BTYPE) == VT_LLONG) {
1924 if (t & VT_BITFIELD) {
1925 gv(RC_INT);
1926 t = vtop->type.t;
1928 lexpand();
1929 gv_dup();
1930 vswap();
1931 vrotb(3);
1932 gv_dup();
1933 vrotb(4);
1934 /* stack: H L L1 H1 */
1935 lbuild(t);
1936 vrotb(3);
1937 vrotb(3);
1938 vswap();
1939 lbuild(t);
1940 vswap();
1941 return;
1943 #endif
1944 /* duplicate value */
1945 rc = RC_TYPE(t);
1946 gv(rc);
1947 r = get_reg(rc);
1948 vdup();
1949 load(r, vtop);
1950 vtop->r = r;
1953 #if PTR_SIZE == 4
1954 /* generate CPU independent (unsigned) long long operations */
1955 static void gen_opl(int op)
1957 int t, a, b, op1, c, i;
1958 int func;
1959 unsigned short reg_iret = REG_IRET;
1960 unsigned short reg_lret = REG_IRE2;
1961 SValue tmp;
1963 switch(op) {
1964 case '/':
1965 case TOK_PDIV:
1966 func = TOK___divdi3;
1967 goto gen_func;
1968 case TOK_UDIV:
1969 func = TOK___udivdi3;
1970 goto gen_func;
1971 case '%':
1972 func = TOK___moddi3;
1973 goto gen_mod_func;
1974 case TOK_UMOD:
1975 func = TOK___umoddi3;
1976 gen_mod_func:
1977 #ifdef TCC_ARM_EABI
1978 reg_iret = TREG_R2;
1979 reg_lret = TREG_R3;
1980 #endif
1981 gen_func:
1982 /* call generic long long function */
1983 vpush_helper_func(func);
1984 vrott(3);
1985 gfunc_call(2);
1986 vpushi(0);
1987 vtop->r = reg_iret;
1988 vtop->r2 = reg_lret;
1989 break;
1990 case '^':
1991 case '&':
1992 case '|':
1993 case '*':
1994 case '+':
1995 case '-':
1996 //pv("gen_opl A",0,2);
1997 t = vtop->type.t;
1998 vswap();
1999 lexpand();
2000 vrotb(3);
2001 lexpand();
2002 /* stack: L1 H1 L2 H2 */
2003 tmp = vtop[0];
2004 vtop[0] = vtop[-3];
2005 vtop[-3] = tmp;
2006 tmp = vtop[-2];
2007 vtop[-2] = vtop[-3];
2008 vtop[-3] = tmp;
2009 vswap();
2010 /* stack: H1 H2 L1 L2 */
2011 //pv("gen_opl B",0,4);
2012 if (op == '*') {
2013 vpushv(vtop - 1);
2014 vpushv(vtop - 1);
2015 gen_op(TOK_UMULL);
2016 lexpand();
2017 /* stack: H1 H2 L1 L2 ML MH */
2018 for(i=0;i<4;i++)
2019 vrotb(6);
2020 /* stack: ML MH H1 H2 L1 L2 */
2021 tmp = vtop[0];
2022 vtop[0] = vtop[-2];
2023 vtop[-2] = tmp;
2024 /* stack: ML MH H1 L2 H2 L1 */
2025 gen_op('*');
2026 vrotb(3);
2027 vrotb(3);
2028 gen_op('*');
2029 /* stack: ML MH M1 M2 */
2030 gen_op('+');
2031 gen_op('+');
2032 } else if (op == '+' || op == '-') {
2033 /* XXX: add non carry method too (for MIPS or alpha) */
2034 if (op == '+')
2035 op1 = TOK_ADDC1;
2036 else
2037 op1 = TOK_SUBC1;
2038 gen_op(op1);
2039 /* stack: H1 H2 (L1 op L2) */
2040 vrotb(3);
2041 vrotb(3);
2042 gen_op(op1 + 1); /* TOK_xxxC2 */
2043 } else {
2044 gen_op(op);
2045 /* stack: H1 H2 (L1 op L2) */
2046 vrotb(3);
2047 vrotb(3);
2048 /* stack: (L1 op L2) H1 H2 */
2049 gen_op(op);
2050 /* stack: (L1 op L2) (H1 op H2) */
2052 /* stack: L H */
2053 lbuild(t);
2054 break;
2055 case TOK_SAR:
2056 case TOK_SHR:
2057 case TOK_SHL:
2058 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2059 t = vtop[-1].type.t;
2060 vswap();
2061 lexpand();
2062 vrotb(3);
2063 /* stack: L H shift */
2064 c = (int)vtop->c.i;
2065 /* constant: simpler */
2066 /* NOTE: all comments are for SHL. the other cases are
2067 done by swapping words */
2068 vpop();
2069 if (op != TOK_SHL)
2070 vswap();
2071 if (c >= 32) {
2072 /* stack: L H */
2073 vpop();
2074 if (c > 32) {
2075 vpushi(c - 32);
2076 gen_op(op);
2078 if (op != TOK_SAR) {
2079 vpushi(0);
2080 } else {
2081 gv_dup();
2082 vpushi(31);
2083 gen_op(TOK_SAR);
2085 vswap();
2086 } else {
2087 vswap();
2088 gv_dup();
2089 /* stack: H L L */
2090 vpushi(c);
2091 gen_op(op);
2092 vswap();
2093 vpushi(32 - c);
2094 if (op == TOK_SHL)
2095 gen_op(TOK_SHR);
2096 else
2097 gen_op(TOK_SHL);
2098 vrotb(3);
2099 /* stack: L L H */
2100 vpushi(c);
2101 if (op == TOK_SHL)
2102 gen_op(TOK_SHL);
2103 else
2104 gen_op(TOK_SHR);
2105 gen_op('|');
2107 if (op != TOK_SHL)
2108 vswap();
2109 lbuild(t);
2110 } else {
2111 /* XXX: should provide a faster fallback on x86 ? */
2112 switch(op) {
2113 case TOK_SAR:
2114 func = TOK___ashrdi3;
2115 goto gen_func;
2116 case TOK_SHR:
2117 func = TOK___lshrdi3;
2118 goto gen_func;
2119 case TOK_SHL:
2120 func = TOK___ashldi3;
2121 goto gen_func;
2124 break;
2125 default:
2126 /* compare operations */
2127 t = vtop->type.t;
2128 vswap();
2129 lexpand();
2130 vrotb(3);
2131 lexpand();
2132 /* stack: L1 H1 L2 H2 */
2133 tmp = vtop[-1];
2134 vtop[-1] = vtop[-2];
2135 vtop[-2] = tmp;
2136 /* stack: L1 L2 H1 H2 */
2137 save_regs(4);
2138 /* compare high */
2139 op1 = op;
2140 /* when values are equal, we need to compare low words. since
2141 the jump is inverted, we invert the test too. */
2142 if (op1 == TOK_LT)
2143 op1 = TOK_LE;
2144 else if (op1 == TOK_GT)
2145 op1 = TOK_GE;
2146 else if (op1 == TOK_ULT)
2147 op1 = TOK_ULE;
2148 else if (op1 == TOK_UGT)
2149 op1 = TOK_UGE;
2150 a = 0;
2151 b = 0;
2152 gen_op(op1);
2153 if (op == TOK_NE) {
2154 b = gvtst(0, 0);
2155 } else {
2156 a = gvtst(1, 0);
2157 if (op != TOK_EQ) {
2158 /* generate non equal test */
2159 vpushi(0);
2160 vset_VT_CMP(TOK_NE);
2161 b = gvtst(0, 0);
2164 /* compare low. Always unsigned */
2165 op1 = op;
2166 if (op1 == TOK_LT)
2167 op1 = TOK_ULT;
2168 else if (op1 == TOK_LE)
2169 op1 = TOK_ULE;
2170 else if (op1 == TOK_GT)
2171 op1 = TOK_UGT;
2172 else if (op1 == TOK_GE)
2173 op1 = TOK_UGE;
2174 gen_op(op1);
2175 #if 0//def TCC_TARGET_I386
2176 if (op == TOK_NE) { gsym(b); break; }
2177 if (op == TOK_EQ) { gsym(a); break; }
2178 #endif
2179 gvtst_set(1, a);
2180 gvtst_set(0, b);
2181 break;
2184 #endif
2186 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2188 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2189 return (a ^ b) >> 63 ? -x : x;
2192 static int gen_opic_lt(uint64_t a, uint64_t b)
2194 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2197 /* handle integer constant optimizations and various machine
2198 independent opt */
2199 static void gen_opic(int op)
2201 SValue *v1 = vtop - 1;
2202 SValue *v2 = vtop;
2203 int t1 = v1->type.t & VT_BTYPE;
2204 int t2 = v2->type.t & VT_BTYPE;
2205 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2206 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2207 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2208 uint64_t l1 = c1 ? v1->c.i : 0;
2209 uint64_t l2 = c2 ? v2->c.i : 0;
2210 int shm = (t1 == VT_LLONG) ? 63 : 31;
2212 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2213 l1 = ((uint32_t)l1 |
2214 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2215 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2216 l2 = ((uint32_t)l2 |
2217 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2219 if (c1 && c2) {
2220 switch(op) {
2221 case '+': l1 += l2; break;
2222 case '-': l1 -= l2; break;
2223 case '&': l1 &= l2; break;
2224 case '^': l1 ^= l2; break;
2225 case '|': l1 |= l2; break;
2226 case '*': l1 *= l2; break;
2228 case TOK_PDIV:
2229 case '/':
2230 case '%':
2231 case TOK_UDIV:
2232 case TOK_UMOD:
2233 /* if division by zero, generate explicit division */
2234 if (l2 == 0) {
2235 if (const_wanted && !(nocode_wanted & unevalmask))
2236 tcc_error("division by zero in constant");
2237 goto general_case;
2239 switch(op) {
2240 default: l1 = gen_opic_sdiv(l1, l2); break;
2241 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2242 case TOK_UDIV: l1 = l1 / l2; break;
2243 case TOK_UMOD: l1 = l1 % l2; break;
2245 break;
2246 case TOK_SHL: l1 <<= (l2 & shm); break;
2247 case TOK_SHR: l1 >>= (l2 & shm); break;
2248 case TOK_SAR:
2249 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2250 break;
2251 /* tests */
2252 case TOK_ULT: l1 = l1 < l2; break;
2253 case TOK_UGE: l1 = l1 >= l2; break;
2254 case TOK_EQ: l1 = l1 == l2; break;
2255 case TOK_NE: l1 = l1 != l2; break;
2256 case TOK_ULE: l1 = l1 <= l2; break;
2257 case TOK_UGT: l1 = l1 > l2; break;
2258 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2259 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2260 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2261 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2262 /* logical */
2263 case TOK_LAND: l1 = l1 && l2; break;
2264 case TOK_LOR: l1 = l1 || l2; break;
2265 default:
2266 goto general_case;
2268 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2269 l1 = ((uint32_t)l1 |
2270 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2271 v1->c.i = l1;
2272 vtop--;
2273 } else {
2274 nonconst = VT_NONCONST;
2275 /* if commutative ops, put c2 as constant */
2276 if (c1 && (op == '+' || op == '&' || op == '^' ||
2277 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2278 vswap();
2279 c2 = c1; //c = c1, c1 = c2, c2 = c;
2280 l2 = l1; //l = l1, l1 = l2, l2 = l;
2282 if (!const_wanted &&
2283 c1 && ((l1 == 0 &&
2284 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2285 (l1 == -1 && op == TOK_SAR))) {
2286 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2287 vtop--;
2288 } else if (!const_wanted &&
2289 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2290 (op == '|' &&
2291 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2292 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2293 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2294 if (l2 == 1)
2295 vtop->c.i = 0;
2296 vswap();
2297 vtop--;
2298 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2299 op == TOK_PDIV) &&
2300 l2 == 1) ||
2301 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2302 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2303 l2 == 0) ||
2304 (op == '&' &&
2305 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2306 /* filter out NOP operations like x*1, x-0, x&-1... */
2307 vtop--;
2308 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2309 /* try to use shifts instead of muls or divs */
2310 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2311 int n = -1;
2312 while (l2) {
2313 l2 >>= 1;
2314 n++;
2316 vtop->c.i = n;
2317 if (op == '*')
2318 op = TOK_SHL;
2319 else if (op == TOK_PDIV)
2320 op = TOK_SAR;
2321 else
2322 op = TOK_SHR;
2324 goto general_case;
2325 } else if (c2 && (op == '+' || op == '-') &&
2326 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2327 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2328 /* symbol + constant case */
2329 if (op == '-')
2330 l2 = -l2;
2331 l2 += vtop[-1].c.i;
2332 /* The backends can't always deal with addends to symbols
2333 larger than +-1<<31. Don't construct such. */
2334 if ((int)l2 != l2)
2335 goto general_case;
2336 vtop--;
2337 vtop->c.i = l2;
2338 } else {
2339 general_case:
2340 /* call low level op generator */
2341 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2342 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2343 gen_opl(op);
2344 else
2345 gen_opi(op);
2348 if (vtop->r == VT_CONST)
2349 vtop->r |= nonconst;
2352 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2353 # define gen_negf gen_opf
2354 #elif defined TCC_TARGET_ARM
2355 void gen_negf(int op)
2357 /* arm will detect 0-x and replace by vneg */
2358 vpushi(0), vswap(), gen_op('-');
2360 #else
2361 /* XXX: implement in gen_opf() for other backends too */
2362 void gen_negf(int op)
2364 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2365 subtract(-0, x), but with them it's really a sign flip
2366 operation. We implement this with bit manipulation and have
2367 to do some type reinterpretation for this, which TCC can do
2368 only via memory. */
2370 int align, size, bt;
2372 size = type_size(&vtop->type, &align);
2373 bt = vtop->type.t & VT_BTYPE;
2374 save_reg(gv(RC_TYPE(bt)));
2375 vdup();
2376 incr_bf_adr(size - 1);
2377 vdup();
2378 vpushi(0x80); /* flip sign */
2379 gen_op('^');
2380 vstore();
2381 vpop();
2383 #endif
2385 /* generate a floating point operation with constant propagation */
2386 static void gen_opif(int op)
2388 int c1, c2;
2389 SValue *v1, *v2;
2390 #if defined _MSC_VER && defined __x86_64__
2391 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2392 volatile
2393 #endif
2394 long double f1, f2;
2396 v1 = vtop - 1;
2397 v2 = vtop;
2398 if (op == TOK_NEG)
2399 v1 = v2;
2401 /* currently, we cannot do computations with forward symbols */
2402 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2403 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2404 if (c1 && c2) {
2405 if (v1->type.t == VT_FLOAT) {
2406 f1 = v1->c.f;
2407 f2 = v2->c.f;
2408 } else if (v1->type.t == VT_DOUBLE) {
2409 f1 = v1->c.d;
2410 f2 = v2->c.d;
2411 } else {
2412 f1 = v1->c.ld;
2413 f2 = v2->c.ld;
2415 /* NOTE: we only do constant propagation if finite number (not
2416 NaN or infinity) (ANSI spec) */
2417 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2418 goto general_case;
2419 switch(op) {
2420 case '+': f1 += f2; break;
2421 case '-': f1 -= f2; break;
2422 case '*': f1 *= f2; break;
2423 case '/':
2424 if (f2 == 0.0) {
2425 union { float f; unsigned u; } x1, x2, y;
2426 /* If not in initializer we need to potentially generate
2427 FP exceptions at runtime, otherwise we want to fold. */
2428 if (!const_wanted)
2429 goto general_case;
2430 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2431 when used to compile the f1 /= f2 below, would be -nan */
2432 x1.f = f1, x2.f = f2;
2433 if (f1 == 0.0)
2434 y.u = 0x7fc00000; /* nan */
2435 else
2436 y.u = 0x7f800000; /* infinity */
2437 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2438 f1 = y.f;
2439 break;
2441 f1 /= f2;
2442 break;
2443 case TOK_NEG:
2444 f1 = -f1;
2445 goto unary_result;
2446 /* XXX: also handles tests ? */
2447 default:
2448 goto general_case;
2450 vtop--;
2451 unary_result:
2452 /* XXX: overflow test ? */
2453 if (v1->type.t == VT_FLOAT) {
2454 v1->c.f = f1;
2455 } else if (v1->type.t == VT_DOUBLE) {
2456 v1->c.d = f1;
2457 } else {
2458 v1->c.ld = f1;
2460 } else {
2461 general_case:
2462 if (op == TOK_NEG) {
2463 gen_negf(op);
2464 } else {
2465 gen_opf(op);
2470 /* print a type. If 'varstr' is not NULL, then the variable is also
2471 printed in the type */
2472 /* XXX: union */
2473 /* XXX: add array and function pointers */
2474 static void type_to_str(char *buf, int buf_size,
2475 CType *type, const char *varstr)
2477 int bt, v, t;
2478 Sym *s, *sa;
2479 char buf1[256];
2480 const char *tstr;
2482 t = type->t;
2483 bt = t & VT_BTYPE;
2484 buf[0] = '\0';
2486 if (t & VT_EXTERN)
2487 pstrcat(buf, buf_size, "extern ");
2488 if (t & VT_STATIC)
2489 pstrcat(buf, buf_size, "static ");
2490 if (t & VT_TYPEDEF)
2491 pstrcat(buf, buf_size, "typedef ");
2492 if (t & VT_INLINE)
2493 pstrcat(buf, buf_size, "inline ");
2494 if (bt != VT_PTR) {
2495 if (t & VT_VOLATILE)
2496 pstrcat(buf, buf_size, "volatile ");
2497 if (t & VT_CONSTANT)
2498 pstrcat(buf, buf_size, "const ");
2500 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2501 || ((t & VT_UNSIGNED)
2502 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2503 && !IS_ENUM(t)
2505 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2507 buf_size -= strlen(buf);
2508 buf += strlen(buf);
2510 switch(bt) {
2511 case VT_VOID:
2512 tstr = "void";
2513 goto add_tstr;
2514 case VT_BOOL:
2515 tstr = "_Bool";
2516 goto add_tstr;
2517 case VT_BYTE:
2518 tstr = "char";
2519 goto add_tstr;
2520 case VT_SHORT:
2521 tstr = "short";
2522 goto add_tstr;
2523 case VT_INT:
2524 tstr = "int";
2525 goto maybe_long;
2526 case VT_LLONG:
2527 tstr = "long long";
2528 maybe_long:
2529 if (t & VT_LONG)
2530 tstr = "long";
2531 if (!IS_ENUM(t))
2532 goto add_tstr;
2533 tstr = "enum ";
2534 goto tstruct;
2535 case VT_FLOAT:
2536 tstr = "float";
2537 goto add_tstr;
2538 case VT_DOUBLE:
2539 tstr = "double";
2540 if (!(t & VT_LONG))
2541 goto add_tstr;
2542 case VT_LDOUBLE:
2543 tstr = "long double";
2544 add_tstr:
2545 pstrcat(buf, buf_size, tstr);
2546 break;
2547 case VT_STRUCT:
2548 tstr = "struct ";
2549 if (IS_UNION(t))
2550 tstr = "union ";
2551 tstruct:
2552 pstrcat(buf, buf_size, tstr);
2553 v = type->ref->v & ~SYM_STRUCT;
2554 if (v >= SYM_FIRST_ANOM)
2555 pstrcat(buf, buf_size, "<anonymous>");
2556 else
2557 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2558 break;
2559 case VT_FUNC:
2560 s = type->ref;
2561 buf1[0]=0;
2562 if (varstr && '*' == *varstr) {
2563 pstrcat(buf1, sizeof(buf1), "(");
2564 pstrcat(buf1, sizeof(buf1), varstr);
2565 pstrcat(buf1, sizeof(buf1), ")");
2567 pstrcat(buf1, buf_size, "(");
2568 sa = s->next;
2569 while (sa != NULL) {
2570 char buf2[256];
2571 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2572 pstrcat(buf1, sizeof(buf1), buf2);
2573 sa = sa->next;
2574 if (sa)
2575 pstrcat(buf1, sizeof(buf1), ", ");
2577 if (s->f.func_type == FUNC_ELLIPSIS)
2578 pstrcat(buf1, sizeof(buf1), ", ...");
2579 pstrcat(buf1, sizeof(buf1), ")");
2580 type_to_str(buf, buf_size, &s->type, buf1);
2581 goto no_var;
2582 case VT_PTR:
2583 s = type->ref;
2584 if (t & (VT_ARRAY|VT_VLA)) {
2585 if (varstr && '*' == *varstr)
2586 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2587 else
2588 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2589 type_to_str(buf, buf_size, &s->type, buf1);
2590 goto no_var;
2592 pstrcpy(buf1, sizeof(buf1), "*");
2593 if (t & VT_CONSTANT)
2594 pstrcat(buf1, buf_size, "const ");
2595 if (t & VT_VOLATILE)
2596 pstrcat(buf1, buf_size, "volatile ");
2597 if (varstr)
2598 pstrcat(buf1, sizeof(buf1), varstr);
2599 type_to_str(buf, buf_size, &s->type, buf1);
2600 goto no_var;
2602 if (varstr) {
2603 pstrcat(buf, buf_size, " ");
2604 pstrcat(buf, buf_size, varstr);
2606 no_var: ;
2609 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2611 char buf1[256], buf2[256];
2612 type_to_str(buf1, sizeof(buf1), st, NULL);
2613 type_to_str(buf2, sizeof(buf2), dt, NULL);
2614 tcc_error(fmt, buf1, buf2);
2617 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2619 char buf1[256], buf2[256];
2620 type_to_str(buf1, sizeof(buf1), st, NULL);
2621 type_to_str(buf2, sizeof(buf2), dt, NULL);
2622 tcc_warning(fmt, buf1, buf2);
2625 static int pointed_size(CType *type)
2627 int align;
2628 return type_size(pointed_type(type), &align);
2631 static inline int is_null_pointer(SValue *p)
2633 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2634 return 0;
2635 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2636 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2637 ((p->type.t & VT_BTYPE) == VT_PTR &&
2638 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2639 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2640 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2644 /* compare function types. OLD functions match any new functions */
2645 static int is_compatible_func(CType *type1, CType *type2)
2647 Sym *s1, *s2;
2649 s1 = type1->ref;
2650 s2 = type2->ref;
2651 if (s1->f.func_call != s2->f.func_call)
2652 return 0;
2653 if (s1->f.func_type != s2->f.func_type
2654 && s1->f.func_type != FUNC_OLD
2655 && s2->f.func_type != FUNC_OLD)
2656 return 0;
2657 for (;;) {
2658 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2659 return 0;
2660 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2661 return 1;
2662 s1 = s1->next;
2663 s2 = s2->next;
2664 if (!s1)
2665 return !s2;
2666 if (!s2)
2667 return 0;
2671 /* return true if type1 and type2 are the same. If unqualified is
2672 true, qualifiers on the types are ignored.
2674 static int compare_types(CType *type1, CType *type2, int unqualified)
2676 int bt1, t1, t2;
2678 t1 = type1->t & VT_TYPE;
2679 t2 = type2->t & VT_TYPE;
2680 if (unqualified) {
2681 /* strip qualifiers before comparing */
2682 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2683 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2686 /* Default Vs explicit signedness only matters for char */
2687 if ((t1 & VT_BTYPE) != VT_BYTE) {
2688 t1 &= ~VT_DEFSIGN;
2689 t2 &= ~VT_DEFSIGN;
2691 /* XXX: bitfields ? */
2692 if (t1 != t2)
2693 return 0;
2695 if ((t1 & VT_ARRAY)
2696 && !(type1->ref->c < 0
2697 || type2->ref->c < 0
2698 || type1->ref->c == type2->ref->c))
2699 return 0;
2701 /* test more complicated cases */
2702 bt1 = t1 & VT_BTYPE;
2703 if (bt1 == VT_PTR) {
2704 type1 = pointed_type(type1);
2705 type2 = pointed_type(type2);
2706 return is_compatible_types(type1, type2);
2707 } else if (bt1 == VT_STRUCT) {
2708 return (type1->ref == type2->ref);
2709 } else if (bt1 == VT_FUNC) {
2710 return is_compatible_func(type1, type2);
2711 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2712 /* If both are enums then they must be the same, if only one is then
2713 t1 and t2 must be equal, which was checked above already. */
2714 return type1->ref == type2->ref;
2715 } else {
2716 return 1;
2720 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2721 type is stored in DEST if non-null (except for pointer plus/minus) . */
2722 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2724 CType *type1 = &op1->type, *type2 = &op2->type, type;
2725 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2726 int ret = 1;
2728 type.t = VT_VOID;
2729 type.ref = NULL;
2731 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2732 ret = op == '?' ? 1 : 0;
2733 /* NOTE: as an extension, we accept void on only one side */
2734 type.t = VT_VOID;
2735 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2736 if (op == '+') ; /* Handled in caller */
2737 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2738 /* If one is a null ptr constant the result type is the other. */
2739 else if (is_null_pointer (op2)) type = *type1;
2740 else if (is_null_pointer (op1)) type = *type2;
2741 else if (bt1 != bt2) {
2742 /* accept comparison or cond-expr between pointer and integer
2743 with a warning */
2744 if ((op == '?' || TOK_ISCOND(op))
2745 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2746 tcc_warning("pointer/integer mismatch in %s",
2747 op == '?' ? "conditional expression" : "comparison");
2748 else if (op != '-' || !is_integer_btype(bt2))
2749 ret = 0;
2750 type = *(bt1 == VT_PTR ? type1 : type2);
2751 } else {
2752 CType *pt1 = pointed_type(type1);
2753 CType *pt2 = pointed_type(type2);
2754 int pbt1 = pt1->t & VT_BTYPE;
2755 int pbt2 = pt2->t & VT_BTYPE;
2756 int newquals, copied = 0;
2757 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2758 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2759 if (op != '?' && !TOK_ISCOND(op))
2760 ret = 0;
2761 else
2762 type_incompatibility_warning(type1, type2,
2763 op == '?'
2764 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2765 : "pointer type mismatch in comparison('%s' and '%s')");
2767 if (op == '?') {
2768 /* pointers to void get preferred, otherwise the
2769 pointed to types minus qualifs should be compatible */
2770 type = *((pbt1 == VT_VOID) ? type1 : type2);
2771 /* combine qualifs */
2772 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2773 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2774 & newquals)
2776 /* copy the pointer target symbol */
2777 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2778 0, type.ref->c);
2779 copied = 1;
2780 pointed_type(&type)->t |= newquals;
2782 /* pointers to incomplete arrays get converted to
2783 pointers to completed ones if possible */
2784 if (pt1->t & VT_ARRAY
2785 && pt2->t & VT_ARRAY
2786 && pointed_type(&type)->ref->c < 0
2787 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2789 if (!copied)
2790 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2791 0, type.ref->c);
2792 pointed_type(&type)->ref =
2793 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2794 0, pointed_type(&type)->ref->c);
2795 pointed_type(&type)->ref->c =
2796 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2800 if (TOK_ISCOND(op))
2801 type.t = VT_SIZE_T;
2802 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2803 if (op != '?' || !compare_types(type1, type2, 1))
2804 ret = 0;
2805 type = *type1;
2806 } else if (is_float(bt1) || is_float(bt2)) {
2807 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2808 type.t = VT_LDOUBLE;
2809 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2810 type.t = VT_DOUBLE;
2811 } else {
2812 type.t = VT_FLOAT;
2814 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2815 /* cast to biggest op */
2816 type.t = VT_LLONG | VT_LONG;
2817 if (bt1 == VT_LLONG)
2818 type.t &= t1;
2819 if (bt2 == VT_LLONG)
2820 type.t &= t2;
2821 /* convert to unsigned if it does not fit in a long long */
2822 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2823 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2824 type.t |= VT_UNSIGNED;
2825 } else {
2826 /* integer operations */
2827 type.t = VT_INT | (VT_LONG & (t1 | t2));
2828 /* convert to unsigned if it does not fit in an integer */
2829 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2830 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2831 type.t |= VT_UNSIGNED;
2833 if (dest)
2834 *dest = type;
2835 return ret;
2838 /* generic gen_op: handles types problems */
2839 ST_FUNC void gen_op(int op)
2841 int t1, t2, bt1, bt2, t;
2842 CType type1, combtype;
2844 redo:
2845 t1 = vtop[-1].type.t;
2846 t2 = vtop[0].type.t;
2847 bt1 = t1 & VT_BTYPE;
2848 bt2 = t2 & VT_BTYPE;
2850 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2851 if (bt2 == VT_FUNC) {
2852 mk_pointer(&vtop->type);
2853 gaddrof();
2855 if (bt1 == VT_FUNC) {
2856 vswap();
2857 mk_pointer(&vtop->type);
2858 gaddrof();
2859 vswap();
2861 goto redo;
2862 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2863 tcc_error_noabort("invalid operand types for binary operation");
2864 vpop();
2865 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2866 /* at least one operand is a pointer */
2867 /* relational op: must be both pointers */
2868 int align;
2869 if (TOK_ISCOND(op))
2870 goto std_op;
2871 /* if both pointers, then it must be the '-' op */
2872 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2873 if (op != '-')
2874 tcc_error("cannot use pointers here");
2875 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2876 vrott(3);
2877 gen_opic(op);
2878 vtop->type.t = VT_PTRDIFF_T;
2879 vswap();
2880 gen_op(TOK_PDIV);
2881 } else {
2882 /* exactly one pointer : must be '+' or '-'. */
2883 if (op != '-' && op != '+')
2884 tcc_error("cannot use pointers here");
2885 /* Put pointer as first operand */
2886 if (bt2 == VT_PTR) {
2887 vswap();
2888 t = t1, t1 = t2, t2 = t;
2890 #if PTR_SIZE == 4
2891 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2892 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2893 gen_cast_s(VT_INT);
2894 #endif
2895 type1 = vtop[-1].type;
2896 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2897 gen_op('*');
2898 #ifdef CONFIG_TCC_BCHECK
2899 if (tcc_state->do_bounds_check && !const_wanted) {
2900 /* if bounded pointers, we generate a special code to
2901 test bounds */
2902 if (op == '-') {
2903 vpushi(0);
2904 vswap();
2905 gen_op('-');
2907 gen_bounded_ptr_add();
2908 } else
2909 #endif
2911 gen_opic(op);
2913 type1.t &= ~(VT_ARRAY|VT_VLA);
2914 /* put again type if gen_opic() swaped operands */
2915 vtop->type = type1;
2917 } else {
2918 /* floats can only be used for a few operations */
2919 if (is_float(combtype.t)
2920 && op != '+' && op != '-' && op != '*' && op != '/'
2921 && !TOK_ISCOND(op))
2922 tcc_error("invalid operands for binary operation");
2923 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2924 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2925 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2926 t |= VT_UNSIGNED;
2927 t |= (VT_LONG & t1);
2928 combtype.t = t;
2930 std_op:
2931 t = t2 = combtype.t;
2932 /* XXX: currently, some unsigned operations are explicit, so
2933 we modify them here */
2934 if (t & VT_UNSIGNED) {
2935 if (op == TOK_SAR)
2936 op = TOK_SHR;
2937 else if (op == '/')
2938 op = TOK_UDIV;
2939 else if (op == '%')
2940 op = TOK_UMOD;
2941 else if (op == TOK_LT)
2942 op = TOK_ULT;
2943 else if (op == TOK_GT)
2944 op = TOK_UGT;
2945 else if (op == TOK_LE)
2946 op = TOK_ULE;
2947 else if (op == TOK_GE)
2948 op = TOK_UGE;
2950 vswap();
2951 gen_cast_s(t);
2952 vswap();
2953 /* special case for shifts and long long: we keep the shift as
2954 an integer */
2955 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2956 t2 = VT_INT;
2957 gen_cast_s(t2);
2958 if (is_float(t))
2959 gen_opif(op);
2960 else
2961 gen_opic(op);
2962 if (TOK_ISCOND(op)) {
2963 /* relational op: the result is an int */
2964 vtop->type.t = VT_INT;
2965 } else {
2966 vtop->type.t = t;
2969 // Make sure that we have converted to an rvalue:
2970 if (vtop->r & VT_LVAL)
2971 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2974 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2975 #define gen_cvt_itof1 gen_cvt_itof
2976 #else
2977 /* generic itof for unsigned long long case */
2978 static void gen_cvt_itof1(int t)
2980 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2981 (VT_LLONG | VT_UNSIGNED)) {
2983 if (t == VT_FLOAT)
2984 vpush_helper_func(TOK___floatundisf);
2985 #if LDOUBLE_SIZE != 8
2986 else if (t == VT_LDOUBLE)
2987 vpush_helper_func(TOK___floatundixf);
2988 #endif
2989 else
2990 vpush_helper_func(TOK___floatundidf);
2991 vrott(2);
2992 gfunc_call(1);
2993 vpushi(0);
2994 PUT_R_RET(vtop, t);
2995 } else {
2996 gen_cvt_itof(t);
2999 #endif
3001 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3002 #define gen_cvt_ftoi1 gen_cvt_ftoi
3003 #else
3004 /* generic ftoi for unsigned long long case */
3005 static void gen_cvt_ftoi1(int t)
3007 int st;
3008 if (t == (VT_LLONG | VT_UNSIGNED)) {
3009 /* not handled natively */
3010 st = vtop->type.t & VT_BTYPE;
3011 if (st == VT_FLOAT)
3012 vpush_helper_func(TOK___fixunssfdi);
3013 #if LDOUBLE_SIZE != 8
3014 else if (st == VT_LDOUBLE)
3015 vpush_helper_func(TOK___fixunsxfdi);
3016 #endif
3017 else
3018 vpush_helper_func(TOK___fixunsdfdi);
3019 vrott(2);
3020 gfunc_call(1);
3021 vpushi(0);
3022 PUT_R_RET(vtop, t);
3023 } else {
3024 gen_cvt_ftoi(t);
3027 #endif
3029 /* special delayed cast for char/short */
3030 static void force_charshort_cast(void)
3032 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3033 int dbt = vtop->type.t;
3034 vtop->r &= ~VT_MUSTCAST;
3035 vtop->type.t = sbt;
3036 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3037 vtop->type.t = dbt;
3040 static void gen_cast_s(int t)
3042 CType type;
3043 type.t = t;
3044 type.ref = NULL;
3045 gen_cast(&type);
3048 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3049 static void gen_cast(CType *type)
3051 int sbt, dbt, sf, df, c;
3052 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3054 /* special delayed cast for char/short */
3055 if (vtop->r & VT_MUSTCAST)
3056 force_charshort_cast();
3058 /* bitfields first get cast to ints */
3059 if (vtop->type.t & VT_BITFIELD)
3060 gv(RC_INT);
3062 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3063 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3064 if (sbt == VT_FUNC)
3065 sbt = VT_PTR;
3067 again:
3068 if (sbt != dbt) {
3069 sf = is_float(sbt);
3070 df = is_float(dbt);
3071 dbt_bt = dbt & VT_BTYPE;
3072 sbt_bt = sbt & VT_BTYPE;
3073 if (dbt_bt == VT_VOID)
3074 goto done;
3075 if (sbt_bt == VT_VOID) {
3076 error:
3077 cast_error(&vtop->type, type);
3080 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3081 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3082 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3083 #endif
3084 if (c) {
3085 /* constant case: we can do it now */
3086 /* XXX: in ISOC, cannot do it if error in convert */
3087 if (sbt == VT_FLOAT)
3088 vtop->c.ld = vtop->c.f;
3089 else if (sbt == VT_DOUBLE)
3090 vtop->c.ld = vtop->c.d;
3092 if (df) {
3093 if (sbt_bt == VT_LLONG) {
3094 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3095 vtop->c.ld = vtop->c.i;
3096 else
3097 vtop->c.ld = -(long double)-vtop->c.i;
3098 } else if(!sf) {
3099 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3100 vtop->c.ld = (uint32_t)vtop->c.i;
3101 else
3102 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3105 if (dbt == VT_FLOAT)
3106 vtop->c.f = (float)vtop->c.ld;
3107 else if (dbt == VT_DOUBLE)
3108 vtop->c.d = (double)vtop->c.ld;
3109 } else if (sf && dbt == VT_BOOL) {
3110 vtop->c.i = (vtop->c.ld != 0);
3111 } else {
3112 if(sf)
3113 vtop->c.i = vtop->c.ld;
3114 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3116 else if (sbt & VT_UNSIGNED)
3117 vtop->c.i = (uint32_t)vtop->c.i;
3118 else
3119 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3121 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3123 else if (dbt == VT_BOOL)
3124 vtop->c.i = (vtop->c.i != 0);
3125 else {
3126 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3127 dbt_bt == VT_SHORT ? 0xffff :
3128 0xffffffff;
3129 vtop->c.i &= m;
3130 if (!(dbt & VT_UNSIGNED))
3131 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3134 goto done;
3136 } else if (dbt == VT_BOOL
3137 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3138 == (VT_CONST | VT_SYM)) {
3139 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3140 vtop->r = VT_CONST;
3141 vtop->c.i = 1;
3142 goto done;
3145 /* cannot generate code for global or static initializers */
3146 if (nocode_wanted & DATA_ONLY_WANTED)
3147 goto done;
3149 /* non constant case: generate code */
3150 if (dbt == VT_BOOL) {
3151 gen_test_zero(TOK_NE);
3152 goto done;
3155 if (sf || df) {
3156 if (sf && df) {
3157 /* convert from fp to fp */
3158 gen_cvt_ftof(dbt);
3159 } else if (df) {
3160 /* convert int to fp */
3161 gen_cvt_itof1(dbt);
3162 } else {
3163 /* convert fp to int */
3164 sbt = dbt;
3165 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3166 sbt = VT_INT;
3167 gen_cvt_ftoi1(sbt);
3168 goto again; /* may need char/short cast */
3170 goto done;
3173 ds = btype_size(dbt_bt);
3174 ss = btype_size(sbt_bt);
3175 if (ds == 0 || ss == 0)
3176 goto error;
3178 if (IS_ENUM(type->t) && type->ref->c < 0)
3179 tcc_error("cast to incomplete type");
3181 /* same size and no sign conversion needed */
3182 if (ds == ss && ds >= 4)
3183 goto done;
3184 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3185 tcc_warning("cast between pointer and integer of different size");
3186 if (sbt_bt == VT_PTR) {
3187 /* put integer type to allow logical operations below */
3188 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3192 /* processor allows { int a = 0, b = *(char*)&a; }
3193 That means that if we cast to less width, we can just
3194 change the type and read it still later. */
3195 #define ALLOW_SUBTYPE_ACCESS 1
3197 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3198 /* value still in memory */
3199 if (ds <= ss)
3200 goto done;
3201 /* ss <= 4 here */
3202 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3203 gv(RC_INT);
3204 goto done; /* no 64bit envolved */
3207 gv(RC_INT);
3209 trunc = 0;
3210 #if PTR_SIZE == 4
3211 if (ds == 8) {
3212 /* generate high word */
3213 if (sbt & VT_UNSIGNED) {
3214 vpushi(0);
3215 gv(RC_INT);
3216 } else {
3217 gv_dup();
3218 vpushi(31);
3219 gen_op(TOK_SAR);
3221 lbuild(dbt);
3222 } else if (ss == 8) {
3223 /* from long long: just take low order word */
3224 lexpand();
3225 vpop();
3227 ss = 4;
3229 #elif PTR_SIZE == 8
3230 if (ds == 8) {
3231 /* need to convert from 32bit to 64bit */
3232 if (sbt & VT_UNSIGNED) {
3233 #if defined(TCC_TARGET_RISCV64)
3234 /* RISC-V keeps 32bit vals in registers sign-extended.
3235 So here we need a zero-extension. */
3236 trunc = 32;
3237 #else
3238 goto done;
3239 #endif
3240 } else {
3241 gen_cvt_sxtw();
3242 goto done;
3244 ss = ds, ds = 4, dbt = sbt;
3245 } else if (ss == 8) {
3246 /* RISC-V keeps 32bit vals in registers sign-extended.
3247 So here we need a sign-extension for signed types and
3248 zero-extension. for unsigned types. */
3249 #if !defined(TCC_TARGET_RISCV64)
3250 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3251 #endif
3252 } else {
3253 ss = 4;
3255 #endif
3257 if (ds >= ss)
3258 goto done;
3259 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3260 if (ss == 4) {
3261 gen_cvt_csti(dbt);
3262 goto done;
3264 #endif
3265 bits = (ss - ds) * 8;
3266 /* for unsigned, gen_op will convert SAR to SHR */
3267 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3268 vpushi(bits);
3269 gen_op(TOK_SHL);
3270 vpushi(bits - trunc);
3271 gen_op(TOK_SAR);
3272 vpushi(trunc);
3273 gen_op(TOK_SHR);
3275 done:
3276 vtop->type = *type;
3277 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3280 /* return type size as known at compile time. Put alignment at 'a' */
3281 ST_FUNC int type_size(CType *type, int *a)
3283 Sym *s;
3284 int bt;
3286 bt = type->t & VT_BTYPE;
3287 if (bt == VT_STRUCT) {
3288 /* struct/union */
3289 s = type->ref;
3290 *a = s->r;
3291 return s->c;
3292 } else if (bt == VT_PTR) {
3293 if (type->t & VT_ARRAY) {
3294 int ts;
3296 s = type->ref;
3297 ts = type_size(&s->type, a);
3299 if (ts < 0 && s->c < 0)
3300 ts = -ts;
3302 return ts * s->c;
3303 } else {
3304 *a = PTR_SIZE;
3305 return PTR_SIZE;
3307 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3308 *a = 0;
3309 return -1; /* incomplete enum */
3310 } else if (bt == VT_LDOUBLE) {
3311 *a = LDOUBLE_ALIGN;
3312 return LDOUBLE_SIZE;
3313 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3314 #ifdef TCC_TARGET_I386
3315 #ifdef TCC_TARGET_PE
3316 *a = 8;
3317 #else
3318 *a = 4;
3319 #endif
3320 #elif defined(TCC_TARGET_ARM)
3321 #ifdef TCC_ARM_EABI
3322 *a = 8;
3323 #else
3324 *a = 4;
3325 #endif
3326 #else
3327 *a = 8;
3328 #endif
3329 return 8;
3330 } else if (bt == VT_INT || bt == VT_FLOAT) {
3331 *a = 4;
3332 return 4;
3333 } else if (bt == VT_SHORT) {
3334 *a = 2;
3335 return 2;
3336 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3337 *a = 8;
3338 return 16;
3339 } else {
3340 /* char, void, function, _Bool */
3341 *a = 1;
3342 return 1;
3346 /* push type size as known at runtime time on top of value stack. Put
3347 alignment at 'a' */
3348 static void vpush_type_size(CType *type, int *a)
3350 if (type->t & VT_VLA) {
3351 type_size(&type->ref->type, a);
3352 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3353 } else {
3354 int size = type_size(type, a);
3355 if (size < 0)
3356 tcc_error("unknown type size");
3357 #if PTR_SIZE == 8
3358 vpushll(size);
3359 #else
3360 vpushi(size);
3361 #endif
3365 /* return the pointed type of t */
3366 static inline CType *pointed_type(CType *type)
3368 return &type->ref->type;
3371 /* modify type so that its it is a pointer to type. */
3372 ST_FUNC void mk_pointer(CType *type)
3374 Sym *s;
3375 s = sym_push(SYM_FIELD, type, 0, -1);
3376 type->t = VT_PTR | (type->t & VT_STORAGE);
3377 type->ref = s;
3380 /* return true if type1 and type2 are exactly the same (including
3381 qualifiers).
3383 static int is_compatible_types(CType *type1, CType *type2)
3385 return compare_types(type1,type2,0);
3388 /* return true if type1 and type2 are the same (ignoring qualifiers).
3390 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3392 return compare_types(type1,type2,1);
3395 static void cast_error(CType *st, CType *dt)
3397 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3400 /* verify type compatibility to store vtop in 'dt' type */
3401 static void verify_assign_cast(CType *dt)
3403 CType *st, *type1, *type2;
3404 int dbt, sbt, qualwarn, lvl;
3406 st = &vtop->type; /* source type */
3407 dbt = dt->t & VT_BTYPE;
3408 sbt = st->t & VT_BTYPE;
3409 if (dt->t & VT_CONSTANT)
3410 tcc_warning("assignment of read-only location");
3411 switch(dbt) {
3412 case VT_VOID:
3413 if (sbt != dbt)
3414 tcc_error("assignment to void expression");
3415 break;
3416 case VT_PTR:
3417 /* special cases for pointers */
3418 /* '0' can also be a pointer */
3419 if (is_null_pointer(vtop))
3420 break;
3421 /* accept implicit pointer to integer cast with warning */
3422 if (is_integer_btype(sbt)) {
3423 tcc_warning("assignment makes pointer from integer without a cast");
3424 break;
3426 type1 = pointed_type(dt);
3427 if (sbt == VT_PTR)
3428 type2 = pointed_type(st);
3429 else if (sbt == VT_FUNC)
3430 type2 = st; /* a function is implicitly a function pointer */
3431 else
3432 goto error;
3433 if (is_compatible_types(type1, type2))
3434 break;
3435 for (qualwarn = lvl = 0;; ++lvl) {
3436 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3437 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3438 qualwarn = 1;
3439 dbt = type1->t & (VT_BTYPE|VT_LONG);
3440 sbt = type2->t & (VT_BTYPE|VT_LONG);
3441 if (dbt != VT_PTR || sbt != VT_PTR)
3442 break;
3443 type1 = pointed_type(type1);
3444 type2 = pointed_type(type2);
3446 if (!is_compatible_unqualified_types(type1, type2)) {
3447 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3448 /* void * can match anything */
3449 } else if (dbt == sbt
3450 && is_integer_btype(sbt & VT_BTYPE)
3451 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3452 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3453 /* Like GCC don't warn by default for merely changes
3454 in pointer target signedness. Do warn for different
3455 base types, though, in particular for unsigned enums
3456 and signed int targets. */
3457 } else {
3458 tcc_warning("assignment from incompatible pointer type");
3459 break;
3462 if (qualwarn)
3463 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3464 break;
3465 case VT_BYTE:
3466 case VT_SHORT:
3467 case VT_INT:
3468 case VT_LLONG:
3469 if (sbt == VT_PTR || sbt == VT_FUNC) {
3470 tcc_warning("assignment makes integer from pointer without a cast");
3471 } else if (sbt == VT_STRUCT) {
3472 goto case_VT_STRUCT;
3474 /* XXX: more tests */
3475 break;
3476 case VT_STRUCT:
3477 case_VT_STRUCT:
3478 if (!is_compatible_unqualified_types(dt, st)) {
3479 error:
3480 cast_error(st, dt);
3482 break;
3486 static void gen_assign_cast(CType *dt)
3488 verify_assign_cast(dt);
3489 gen_cast(dt);
3492 /* store vtop in lvalue pushed on stack */
3493 ST_FUNC void vstore(void)
3495 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3497 ft = vtop[-1].type.t;
3498 sbt = vtop->type.t & VT_BTYPE;
3499 dbt = ft & VT_BTYPE;
3500 verify_assign_cast(&vtop[-1].type);
3502 if (sbt == VT_STRUCT) {
3503 /* if structure, only generate pointer */
3504 /* structure assignment : generate memcpy */
3505 size = type_size(&vtop->type, &align);
3506 /* destination, keep on stack() as result */
3507 vpushv(vtop - 1);
3508 #ifdef CONFIG_TCC_BCHECK
3509 if (vtop->r & VT_MUSTBOUND)
3510 gbound(); /* check would be wrong after gaddrof() */
3511 #endif
3512 vtop->type.t = VT_PTR;
3513 gaddrof();
3514 /* source */
3515 vswap();
3516 #ifdef CONFIG_TCC_BCHECK
3517 if (vtop->r & VT_MUSTBOUND)
3518 gbound();
3519 #endif
3520 vtop->type.t = VT_PTR;
3521 gaddrof();
3523 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3524 if (1
3525 #ifdef CONFIG_TCC_BCHECK
3526 && !tcc_state->do_bounds_check
3527 #endif
3529 gen_struct_copy(size);
3530 } else
3531 #endif
3533 /* type size */
3534 vpushi(size);
3535 /* Use memmove, rather than memcpy, as dest and src may be same: */
3536 #ifdef TCC_ARM_EABI
3537 if(!(align & 7))
3538 vpush_helper_func(TOK_memmove8);
3539 else if(!(align & 3))
3540 vpush_helper_func(TOK_memmove4);
3541 else
3542 #endif
3543 vpush_helper_func(TOK_memmove);
3544 vrott(4);
3545 gfunc_call(3);
3548 } else if (ft & VT_BITFIELD) {
3549 /* bitfield store handling */
3551 /* save lvalue as expression result (example: s.b = s.a = n;) */
3552 vdup(), vtop[-1] = vtop[-2];
3554 bit_pos = BIT_POS(ft);
3555 bit_size = BIT_SIZE(ft);
3556 /* remove bit field info to avoid loops */
3557 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3559 if (dbt == VT_BOOL) {
3560 gen_cast(&vtop[-1].type);
3561 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3563 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3564 if (dbt != VT_BOOL) {
3565 gen_cast(&vtop[-1].type);
3566 dbt = vtop[-1].type.t & VT_BTYPE;
3568 if (r == VT_STRUCT) {
3569 store_packed_bf(bit_pos, bit_size);
3570 } else {
3571 unsigned long long mask = (1ULL << bit_size) - 1;
3572 if (dbt != VT_BOOL) {
3573 /* mask source */
3574 if (dbt == VT_LLONG)
3575 vpushll(mask);
3576 else
3577 vpushi((unsigned)mask);
3578 gen_op('&');
3580 /* shift source */
3581 vpushi(bit_pos);
3582 gen_op(TOK_SHL);
3583 vswap();
3584 /* duplicate destination */
3585 vdup();
3586 vrott(3);
3587 /* load destination, mask and or with source */
3588 if (dbt == VT_LLONG)
3589 vpushll(~(mask << bit_pos));
3590 else
3591 vpushi(~((unsigned)mask << bit_pos));
3592 gen_op('&');
3593 gen_op('|');
3594 /* store result */
3595 vstore();
3596 /* ... and discard */
3597 vpop();
3599 } else if (dbt == VT_VOID) {
3600 --vtop;
3601 } else {
3602 /* optimize char/short casts */
3603 delayed_cast = 0;
3604 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3605 && is_integer_btype(sbt)
3607 if ((vtop->r & VT_MUSTCAST)
3608 && btype_size(dbt) > btype_size(sbt)
3610 force_charshort_cast();
3611 delayed_cast = 1;
3612 } else {
3613 gen_cast(&vtop[-1].type);
3616 #ifdef CONFIG_TCC_BCHECK
3617 /* bound check case */
3618 if (vtop[-1].r & VT_MUSTBOUND) {
3619 vswap();
3620 gbound();
3621 vswap();
3623 #endif
3624 gv(RC_TYPE(dbt)); /* generate value */
3626 if (delayed_cast) {
3627 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3628 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3629 vtop->type.t = ft & VT_TYPE;
3632 /* if lvalue was saved on stack, must read it */
3633 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3634 SValue sv;
3635 r = get_reg(RC_INT);
3636 sv.type.t = VT_PTRDIFF_T;
3637 sv.r = VT_LOCAL | VT_LVAL;
3638 sv.c.i = vtop[-1].c.i;
3639 load(r, &sv);
3640 vtop[-1].r = r | VT_LVAL;
3643 r = vtop->r & VT_VALMASK;
3644 /* two word case handling :
3645 store second register at word + 4 (or +8 for x86-64) */
3646 if (USING_TWO_WORDS(dbt)) {
3647 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3648 vtop[-1].type.t = load_type;
3649 store(r, vtop - 1);
3650 vswap();
3651 /* convert to int to increment easily */
3652 vtop->type.t = VT_PTRDIFF_T;
3653 gaddrof();
3654 vpushs(PTR_SIZE);
3655 gen_op('+');
3656 vtop->r |= VT_LVAL;
3657 vswap();
3658 vtop[-1].type.t = load_type;
3659 /* XXX: it works because r2 is spilled last ! */
3660 store(vtop->r2, vtop - 1);
3661 } else {
3662 /* single word */
3663 store(r, vtop - 1);
3665 vswap();
3666 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3670 /* post defines POST/PRE add. c is the token ++ or -- */
3671 ST_FUNC void inc(int post, int c)
3673 test_lvalue();
3674 vdup(); /* save lvalue */
3675 if (post) {
3676 gv_dup(); /* duplicate value */
3677 vrotb(3);
3678 vrotb(3);
3680 /* add constant */
3681 vpushi(c - TOK_MID);
3682 gen_op('+');
3683 vstore(); /* store value */
3684 if (post)
3685 vpop(); /* if post op, return saved value */
3688 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3690 /* read the string */
3691 if (tok != TOK_STR)
3692 expect(msg);
3693 cstr_new(astr);
3694 while (tok == TOK_STR) {
3695 /* XXX: add \0 handling too ? */
3696 cstr_cat(astr, tokc.str.data, -1);
3697 next();
3699 cstr_ccat(astr, '\0');
3702 /* If I is >= 1 and a power of two, returns log2(i)+1.
3703 If I is 0 returns 0. */
3704 ST_FUNC int exact_log2p1(int i)
3706 int ret;
3707 if (!i)
3708 return 0;
3709 for (ret = 1; i >= 1 << 8; ret += 8)
3710 i >>= 8;
3711 if (i >= 1 << 4)
3712 ret += 4, i >>= 4;
3713 if (i >= 1 << 2)
3714 ret += 2, i >>= 2;
3715 if (i >= 1 << 1)
3716 ret++;
3717 return ret;
3720 /* Parse __attribute__((...)) GNUC extension. */
3721 static void parse_attribute(AttributeDef *ad)
3723 int t, n;
3724 CString astr;
3726 redo:
3727 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3728 return;
3729 next();
3730 skip('(');
3731 skip('(');
3732 while (tok != ')') {
3733 if (tok < TOK_IDENT)
3734 expect("attribute name");
3735 t = tok;
3736 next();
3737 switch(t) {
3738 case TOK_CLEANUP1:
3739 case TOK_CLEANUP2:
3741 Sym *s;
3743 skip('(');
3744 s = sym_find(tok);
3745 if (!s) {
3746 tcc_warning_c(warn_implicit_function_declaration)(
3747 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3748 s = external_global_sym(tok, &func_old_type);
3749 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3750 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3751 ad->cleanup_func = s;
3752 next();
3753 skip(')');
3754 break;
3756 case TOK_CONSTRUCTOR1:
3757 case TOK_CONSTRUCTOR2:
3758 ad->f.func_ctor = 1;
3759 break;
3760 case TOK_DESTRUCTOR1:
3761 case TOK_DESTRUCTOR2:
3762 ad->f.func_dtor = 1;
3763 break;
3764 case TOK_ALWAYS_INLINE1:
3765 case TOK_ALWAYS_INLINE2:
3766 ad->f.func_alwinl = 1;
3767 break;
3768 case TOK_SECTION1:
3769 case TOK_SECTION2:
3770 skip('(');
3771 parse_mult_str(&astr, "section name");
3772 ad->section = find_section(tcc_state, (char *)astr.data);
3773 skip(')');
3774 cstr_free(&astr);
3775 break;
3776 case TOK_ALIAS1:
3777 case TOK_ALIAS2:
3778 skip('(');
3779 parse_mult_str(&astr, "alias(\"target\")");
3780 ad->alias_target = /* save string as token, for later */
3781 tok_alloc((char*)astr.data, astr.size-1)->tok;
3782 skip(')');
3783 cstr_free(&astr);
3784 break;
3785 case TOK_VISIBILITY1:
3786 case TOK_VISIBILITY2:
3787 skip('(');
3788 parse_mult_str(&astr,
3789 "visibility(\"default|hidden|internal|protected\")");
3790 if (!strcmp (astr.data, "default"))
3791 ad->a.visibility = STV_DEFAULT;
3792 else if (!strcmp (astr.data, "hidden"))
3793 ad->a.visibility = STV_HIDDEN;
3794 else if (!strcmp (astr.data, "internal"))
3795 ad->a.visibility = STV_INTERNAL;
3796 else if (!strcmp (astr.data, "protected"))
3797 ad->a.visibility = STV_PROTECTED;
3798 else
3799 expect("visibility(\"default|hidden|internal|protected\")");
3800 skip(')');
3801 cstr_free(&astr);
3802 break;
3803 case TOK_ALIGNED1:
3804 case TOK_ALIGNED2:
3805 if (tok == '(') {
3806 next();
3807 n = expr_const();
3808 if (n <= 0 || (n & (n - 1)) != 0)
3809 tcc_error("alignment must be a positive power of two");
3810 skip(')');
3811 } else {
3812 n = MAX_ALIGN;
3814 ad->a.aligned = exact_log2p1(n);
3815 if (n != 1 << (ad->a.aligned - 1))
3816 tcc_error("alignment of %d is larger than implemented", n);
3817 break;
3818 case TOK_PACKED1:
3819 case TOK_PACKED2:
3820 ad->a.packed = 1;
3821 break;
3822 case TOK_WEAK1:
3823 case TOK_WEAK2:
3824 ad->a.weak = 1;
3825 break;
3826 case TOK_UNUSED1:
3827 case TOK_UNUSED2:
3828 /* currently, no need to handle it because tcc does not
3829 track unused objects */
3830 break;
3831 case TOK_NORETURN1:
3832 case TOK_NORETURN2:
3833 ad->f.func_noreturn = 1;
3834 break;
3835 case TOK_CDECL1:
3836 case TOK_CDECL2:
3837 case TOK_CDECL3:
3838 ad->f.func_call = FUNC_CDECL;
3839 break;
3840 case TOK_STDCALL1:
3841 case TOK_STDCALL2:
3842 case TOK_STDCALL3:
3843 ad->f.func_call = FUNC_STDCALL;
3844 break;
3845 #ifdef TCC_TARGET_I386
3846 case TOK_REGPARM1:
3847 case TOK_REGPARM2:
3848 skip('(');
3849 n = expr_const();
3850 if (n > 3)
3851 n = 3;
3852 else if (n < 0)
3853 n = 0;
3854 if (n > 0)
3855 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3856 skip(')');
3857 break;
3858 case TOK_FASTCALL1:
3859 case TOK_FASTCALL2:
3860 case TOK_FASTCALL3:
3861 ad->f.func_call = FUNC_FASTCALLW;
3862 break;
3863 #endif
3864 case TOK_MODE:
3865 skip('(');
3866 switch(tok) {
3867 case TOK_MODE_DI:
3868 ad->attr_mode = VT_LLONG + 1;
3869 break;
3870 case TOK_MODE_QI:
3871 ad->attr_mode = VT_BYTE + 1;
3872 break;
3873 case TOK_MODE_HI:
3874 ad->attr_mode = VT_SHORT + 1;
3875 break;
3876 case TOK_MODE_SI:
3877 case TOK_MODE_word:
3878 ad->attr_mode = VT_INT + 1;
3879 break;
3880 default:
3881 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3882 break;
3884 next();
3885 skip(')');
3886 break;
3887 case TOK_DLLEXPORT:
3888 ad->a.dllexport = 1;
3889 break;
3890 case TOK_NODECORATE:
3891 ad->a.nodecorate = 1;
3892 break;
3893 case TOK_DLLIMPORT:
3894 ad->a.dllimport = 1;
3895 break;
3896 default:
3897 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3898 /* skip parameters */
3899 if (tok == '(') {
3900 int parenthesis = 0;
3901 do {
3902 if (tok == '(')
3903 parenthesis++;
3904 else if (tok == ')')
3905 parenthesis--;
3906 next();
3907 } while (parenthesis && tok != -1);
3909 break;
3911 if (tok != ',')
3912 break;
3913 next();
3915 skip(')');
3916 skip(')');
3917 goto redo;
3920 static Sym * find_field (CType *type, int v, int *cumofs)
3922 Sym *s = type->ref;
3923 int v1 = v | SYM_FIELD;
3925 while ((s = s->next) != NULL) {
3926 if (s->v == v1) {
3927 *cumofs += s->c;
3928 return s;
3930 if ((s->type.t & VT_BTYPE) == VT_STRUCT
3931 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
3932 /* try to find field in anonymous sub-struct/union */
3933 Sym *ret = find_field (&s->type, v1, cumofs);
3934 if (ret) {
3935 *cumofs += s->c;
3936 return ret;
3941 if (!(v & SYM_FIELD)) { /* top-level call */
3942 s = type->ref;
3943 if (s->c < 0)
3944 tcc_error("dereferencing incomplete type '%s'",
3945 get_tok_str(s->v & ~SYM_STRUCT, 0));
3946 else
3947 tcc_error("field not found: %s",
3948 get_tok_str(v, &tokc));
3950 return NULL;
3953 static void check_fields (CType *type, int check)
3955 Sym *s = type->ref;
3957 while ((s = s->next) != NULL) {
3958 int v = s->v & ~SYM_FIELD;
3959 if (v < SYM_FIRST_ANOM) {
3960 TokenSym *ts = table_ident[v - TOK_IDENT];
3961 if (check && (ts->tok & SYM_FIELD))
3962 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3963 ts->tok ^= SYM_FIELD;
3964 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3965 check_fields (&s->type, check);
3969 static void struct_layout(CType *type, AttributeDef *ad)
3971 int size, align, maxalign, offset, c, bit_pos, bit_size;
3972 int packed, a, bt, prevbt, prev_bit_size;
3973 int pcc = !tcc_state->ms_bitfields;
3974 int pragma_pack = *tcc_state->pack_stack_ptr;
3975 Sym *f;
3977 maxalign = 1;
3978 offset = 0;
3979 c = 0;
3980 bit_pos = 0;
3981 prevbt = VT_STRUCT; /* make it never match */
3982 prev_bit_size = 0;
3984 //#define BF_DEBUG
3986 for (f = type->ref->next; f; f = f->next) {
3987 if (f->type.t & VT_BITFIELD)
3988 bit_size = BIT_SIZE(f->type.t);
3989 else
3990 bit_size = -1;
3991 size = type_size(&f->type, &align);
3992 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3993 packed = 0;
3995 if (pcc && bit_size == 0) {
3996 /* in pcc mode, packing does not affect zero-width bitfields */
3998 } else {
3999 /* in pcc mode, attribute packed overrides if set. */
4000 if (pcc && (f->a.packed || ad->a.packed))
4001 align = packed = 1;
4003 /* pragma pack overrides align if lesser and packs bitfields always */
4004 if (pragma_pack) {
4005 packed = 1;
4006 if (pragma_pack < align)
4007 align = pragma_pack;
4008 /* in pcc mode pragma pack also overrides individual align */
4009 if (pcc && pragma_pack < a)
4010 a = 0;
4013 /* some individual align was specified */
4014 if (a)
4015 align = a;
4017 if (type->ref->type.t == VT_UNION) {
4018 if (pcc && bit_size >= 0)
4019 size = (bit_size + 7) >> 3;
4020 offset = 0;
4021 if (size > c)
4022 c = size;
4024 } else if (bit_size < 0) {
4025 if (pcc)
4026 c += (bit_pos + 7) >> 3;
4027 c = (c + align - 1) & -align;
4028 offset = c;
4029 if (size > 0)
4030 c += size;
4031 bit_pos = 0;
4032 prevbt = VT_STRUCT;
4033 prev_bit_size = 0;
4035 } else {
4036 /* A bit-field. Layout is more complicated. There are two
4037 options: PCC (GCC) compatible and MS compatible */
4038 if (pcc) {
4039 /* In PCC layout a bit-field is placed adjacent to the
4040 preceding bit-fields, except if:
4041 - it has zero-width
4042 - an individual alignment was given
4043 - it would overflow its base type container and
4044 there is no packing */
4045 if (bit_size == 0) {
4046 new_field:
4047 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4048 bit_pos = 0;
4049 } else if (f->a.aligned) {
4050 goto new_field;
4051 } else if (!packed) {
4052 int a8 = align * 8;
4053 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4054 if (ofs > size / align)
4055 goto new_field;
4058 /* in pcc mode, long long bitfields have type int if they fit */
4059 if (size == 8 && bit_size <= 32)
4060 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4062 while (bit_pos >= align * 8)
4063 c += align, bit_pos -= align * 8;
4064 offset = c;
4066 /* In PCC layout named bit-fields influence the alignment
4067 of the containing struct using the base types alignment,
4068 except for packed fields (which here have correct align). */
4069 if (f->v & SYM_FIRST_ANOM
4070 // && bit_size // ??? gcc on ARM/rpi does that
4072 align = 1;
4074 } else {
4075 bt = f->type.t & VT_BTYPE;
4076 if ((bit_pos + bit_size > size * 8)
4077 || (bit_size > 0) == (bt != prevbt)
4079 c = (c + align - 1) & -align;
4080 offset = c;
4081 bit_pos = 0;
4082 /* In MS bitfield mode a bit-field run always uses
4083 at least as many bits as the underlying type.
4084 To start a new run it's also required that this
4085 or the last bit-field had non-zero width. */
4086 if (bit_size || prev_bit_size)
4087 c += size;
4089 /* In MS layout the records alignment is normally
4090 influenced by the field, except for a zero-width
4091 field at the start of a run (but by further zero-width
4092 fields it is again). */
4093 if (bit_size == 0 && prevbt != bt)
4094 align = 1;
4095 prevbt = bt;
4096 prev_bit_size = bit_size;
4099 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4100 | (bit_pos << VT_STRUCT_SHIFT);
4101 bit_pos += bit_size;
4103 if (align > maxalign)
4104 maxalign = align;
4106 #ifdef BF_DEBUG
4107 printf("set field %s offset %-2d size %-2d align %-2d",
4108 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4109 if (f->type.t & VT_BITFIELD) {
4110 printf(" pos %-2d bits %-2d",
4111 BIT_POS(f->type.t),
4112 BIT_SIZE(f->type.t)
4115 printf("\n");
4116 #endif
4118 f->c = offset;
4119 f->r = 0;
4122 if (pcc)
4123 c += (bit_pos + 7) >> 3;
4125 /* store size and alignment */
4126 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4127 if (a < maxalign)
4128 a = maxalign;
4129 type->ref->r = a;
4130 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4131 /* can happen if individual align for some member was given. In
4132 this case MSVC ignores maxalign when aligning the size */
4133 a = pragma_pack;
4134 if (a < bt)
4135 a = bt;
4137 c = (c + a - 1) & -a;
4138 type->ref->c = c;
4140 #ifdef BF_DEBUG
4141 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4142 #endif
4144 /* check whether we can access bitfields by their type */
4145 for (f = type->ref->next; f; f = f->next) {
4146 int s, px, cx, c0;
4147 CType t;
4149 if (0 == (f->type.t & VT_BITFIELD))
4150 continue;
4151 f->type.ref = f;
4152 f->auxtype = -1;
4153 bit_size = BIT_SIZE(f->type.t);
4154 if (bit_size == 0)
4155 continue;
4156 bit_pos = BIT_POS(f->type.t);
4157 size = type_size(&f->type, &align);
4159 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4160 #ifdef TCC_TARGET_ARM
4161 && !(f->c & (align - 1))
4162 #endif
4164 continue;
4166 /* try to access the field using a different type */
4167 c0 = -1, s = align = 1;
4168 t.t = VT_BYTE;
4169 for (;;) {
4170 px = f->c * 8 + bit_pos;
4171 cx = (px >> 3) & -align;
4172 px = px - (cx << 3);
4173 if (c0 == cx)
4174 break;
4175 s = (px + bit_size + 7) >> 3;
4176 if (s > 4) {
4177 t.t = VT_LLONG;
4178 } else if (s > 2) {
4179 t.t = VT_INT;
4180 } else if (s > 1) {
4181 t.t = VT_SHORT;
4182 } else {
4183 t.t = VT_BYTE;
4185 s = type_size(&t, &align);
4186 c0 = cx;
4189 if (px + bit_size <= s * 8 && cx + s <= c
4190 #ifdef TCC_TARGET_ARM
4191 && !(cx & (align - 1))
4192 #endif
4194 /* update offset and bit position */
4195 f->c = cx;
4196 bit_pos = px;
4197 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4198 | (bit_pos << VT_STRUCT_SHIFT);
4199 if (s != size)
4200 f->auxtype = t.t;
4201 #ifdef BF_DEBUG
4202 printf("FIX field %s offset %-2d size %-2d align %-2d "
4203 "pos %-2d bits %-2d\n",
4204 get_tok_str(f->v & ~SYM_FIELD, NULL),
4205 cx, s, align, px, bit_size);
4206 #endif
4207 } else {
4208 /* fall back to load/store single-byte wise */
4209 f->auxtype = VT_STRUCT;
4210 #ifdef BF_DEBUG
4211 printf("FIX field %s : load byte-wise\n",
4212 get_tok_str(f->v & ~SYM_FIELD, NULL));
4213 #endif
4218 static void do_Static_assert(void);
4220 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4221 static void struct_decl(CType *type, int u)
4223 int v, c, size, align, flexible;
4224 int bit_size, bsize, bt;
4225 Sym *s, *ss, **ps;
4226 AttributeDef ad, ad1;
4227 CType type1, btype;
4229 memset(&ad, 0, sizeof ad);
4230 next();
4231 parse_attribute(&ad);
4232 if (tok != '{') {
4233 v = tok;
4234 next();
4235 /* struct already defined ? return it */
4236 if (v < TOK_IDENT)
4237 expect("struct/union/enum name");
4238 s = struct_find(v);
4239 if (s && (s->sym_scope == local_scope || tok != '{')) {
4240 if (u == s->type.t)
4241 goto do_decl;
4242 if (u == VT_ENUM && IS_ENUM(s->type.t))
4243 goto do_decl;
4244 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4246 } else {
4247 v = anon_sym++;
4249 /* Record the original enum/struct/union token. */
4250 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4251 type1.ref = NULL;
4252 /* we put an undefined size for struct/union */
4253 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4254 s->r = 0; /* default alignment is zero as gcc */
4255 do_decl:
4256 type->t = s->type.t;
4257 type->ref = s;
4259 if (tok == '{') {
4260 next();
4261 if (s->c != -1)
4262 tcc_error("struct/union/enum already defined");
4263 s->c = -2;
4264 /* cannot be empty */
4265 /* non empty enums are not allowed */
4266 ps = &s->next;
4267 if (u == VT_ENUM) {
4268 long long ll = 0, pl = 0, nl = 0;
4269 CType t;
4270 t.ref = s;
4271 /* enum symbols have static storage */
4272 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4273 for(;;) {
4274 v = tok;
4275 if (v < TOK_UIDENT)
4276 expect("identifier");
4277 ss = sym_find(v);
4278 if (ss && !local_stack)
4279 tcc_error("redefinition of enumerator '%s'",
4280 get_tok_str(v, NULL));
4281 next();
4282 if (tok == '=') {
4283 next();
4284 ll = expr_const64();
4286 ss = sym_push(v, &t, VT_CONST, 0);
4287 ss->enum_val = ll;
4288 *ps = ss, ps = &ss->next;
4289 if (ll < nl)
4290 nl = ll;
4291 if (ll > pl)
4292 pl = ll;
4293 if (tok != ',')
4294 break;
4295 next();
4296 ll++;
4297 /* NOTE: we accept a trailing comma */
4298 if (tok == '}')
4299 break;
4301 skip('}');
4302 /* set integral type of the enum */
4303 t.t = VT_INT;
4304 if (nl >= 0) {
4305 if (pl != (unsigned)pl)
4306 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4307 t.t |= VT_UNSIGNED;
4308 } else if (pl != (int)pl || nl != (int)nl)
4309 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4310 s->type.t = type->t = t.t | VT_ENUM;
4311 s->c = 0;
4312 /* set type for enum members */
4313 for (ss = s->next; ss; ss = ss->next) {
4314 ll = ss->enum_val;
4315 if (ll == (int)ll) /* default is int if it fits */
4316 continue;
4317 if (t.t & VT_UNSIGNED) {
4318 ss->type.t |= VT_UNSIGNED;
4319 if (ll == (unsigned)ll)
4320 continue;
4322 ss->type.t = (ss->type.t & ~VT_BTYPE)
4323 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4325 } else {
4326 c = 0;
4327 flexible = 0;
4328 while (tok != '}') {
4329 if (tok == TOK_STATIC_ASSERT) {
4330 do_Static_assert();
4331 continue;
4333 if (!parse_btype(&btype, &ad1, 0)) {
4334 skip(';');
4335 continue;
4337 while (1) {
4338 if (flexible)
4339 tcc_error("flexible array member '%s' not at the end of struct",
4340 get_tok_str(v, NULL));
4341 bit_size = -1;
4342 v = 0;
4343 type1 = btype;
4344 if (tok != ':') {
4345 if (tok != ';')
4346 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4347 if (v == 0) {
4348 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4349 expect("identifier");
4350 else {
4351 int v = btype.ref->v;
4352 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4353 if (tcc_state->ms_extensions == 0)
4354 expect("identifier");
4358 if (type_size(&type1, &align) < 0) {
4359 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4360 flexible = 1;
4361 else
4362 tcc_error("field '%s' has incomplete type",
4363 get_tok_str(v, NULL));
4365 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4366 (type1.t & VT_BTYPE) == VT_VOID ||
4367 (type1.t & VT_STORAGE))
4368 tcc_error("invalid type for '%s'",
4369 get_tok_str(v, NULL));
4371 if (tok == ':') {
4372 next();
4373 bit_size = expr_const();
4374 /* XXX: handle v = 0 case for messages */
4375 if (bit_size < 0)
4376 tcc_error("negative width in bit-field '%s'",
4377 get_tok_str(v, NULL));
4378 if (v && bit_size == 0)
4379 tcc_error("zero width for bit-field '%s'",
4380 get_tok_str(v, NULL));
4381 parse_attribute(&ad1);
4383 size = type_size(&type1, &align);
4384 if (bit_size >= 0) {
4385 bt = type1.t & VT_BTYPE;
4386 if (bt != VT_INT &&
4387 bt != VT_BYTE &&
4388 bt != VT_SHORT &&
4389 bt != VT_BOOL &&
4390 bt != VT_LLONG)
4391 tcc_error("bitfields must have scalar type");
4392 bsize = size * 8;
4393 if (bit_size > bsize) {
4394 tcc_error("width of '%s' exceeds its type",
4395 get_tok_str(v, NULL));
4396 } else if (bit_size == bsize
4397 && !ad.a.packed && !ad1.a.packed) {
4398 /* no need for bit fields */
4400 } else if (bit_size == 64) {
4401 tcc_error("field width 64 not implemented");
4402 } else {
4403 type1.t = (type1.t & ~VT_STRUCT_MASK)
4404 | VT_BITFIELD
4405 | (bit_size << (VT_STRUCT_SHIFT + 6));
4408 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4409 /* Remember we've seen a real field to check
4410 for placement of flexible array member. */
4411 c = 1;
4413 /* If member is a struct or bit-field, enforce
4414 placing into the struct (as anonymous). */
4415 if (v == 0 &&
4416 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4417 bit_size >= 0)) {
4418 v = anon_sym++;
4420 if (v) {
4421 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4422 ss->a = ad1.a;
4423 *ps = ss;
4424 ps = &ss->next;
4426 if (tok == ';' || tok == TOK_EOF)
4427 break;
4428 skip(',');
4430 skip(';');
4432 skip('}');
4433 parse_attribute(&ad);
4434 if (ad.cleanup_func) {
4435 tcc_warning("attribute '__cleanup__' ignored on type");
4437 check_fields(type, 1);
4438 check_fields(type, 0);
4439 struct_layout(type, &ad);
4440 if (debug_modes)
4441 tcc_debug_fix_anon(tcc_state, type);
4446 static void sym_to_attr(AttributeDef *ad, Sym *s)
4448 merge_symattr(&ad->a, &s->a);
4449 merge_funcattr(&ad->f, &s->f);
4452 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4453 are added to the element type, copied because it could be a typedef. */
4454 static void parse_btype_qualify(CType *type, int qualifiers)
4456 while (type->t & VT_ARRAY) {
4457 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4458 type = &type->ref->type;
4460 type->t |= qualifiers;
4463 /* return 0 if no type declaration. otherwise, return the basic type
4464 and skip it.
4466 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4468 int t, u, bt, st, type_found, typespec_found, g, n;
4469 Sym *s;
4470 CType type1;
4472 memset(ad, 0, sizeof(AttributeDef));
4473 type_found = 0;
4474 typespec_found = 0;
4475 t = VT_INT;
4476 bt = st = -1;
4477 type->ref = NULL;
4479 while(1) {
4480 switch(tok) {
4481 case TOK_EXTENSION:
4482 /* currently, we really ignore extension */
4483 next();
4484 continue;
4486 /* basic types */
4487 case TOK_CHAR:
4488 u = VT_BYTE;
4489 basic_type:
4490 next();
4491 basic_type1:
4492 if (u == VT_SHORT || u == VT_LONG) {
4493 if (st != -1 || (bt != -1 && bt != VT_INT))
4494 tmbt: tcc_error("too many basic types");
4495 st = u;
4496 } else {
4497 if (bt != -1 || (st != -1 && u != VT_INT))
4498 goto tmbt;
4499 bt = u;
4501 if (u != VT_INT)
4502 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4503 typespec_found = 1;
4504 break;
4505 case TOK_VOID:
4506 u = VT_VOID;
4507 goto basic_type;
4508 case TOK_SHORT:
4509 u = VT_SHORT;
4510 goto basic_type;
4511 case TOK_INT:
4512 u = VT_INT;
4513 goto basic_type;
4514 case TOK_ALIGNAS:
4515 { int n;
4516 AttributeDef ad1;
4517 next();
4518 skip('(');
4519 memset(&ad1, 0, sizeof(AttributeDef));
4520 if (parse_btype(&type1, &ad1, 0)) {
4521 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4522 if (ad1.a.aligned)
4523 n = 1 << (ad1.a.aligned - 1);
4524 else
4525 type_size(&type1, &n);
4526 } else {
4527 n = expr_const();
4528 if (n < 0 || (n & (n - 1)) != 0)
4529 tcc_error("alignment must be a positive power of two");
4531 skip(')');
4532 ad->a.aligned = exact_log2p1(n);
4534 continue;
4535 case TOK_LONG:
4536 if ((t & VT_BTYPE) == VT_DOUBLE) {
4537 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4538 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4539 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4540 } else {
4541 u = VT_LONG;
4542 goto basic_type;
4544 next();
4545 break;
4546 #ifdef TCC_TARGET_ARM64
4547 case TOK_UINT128:
4548 /* GCC's __uint128_t appears in some Linux header files. Make it a
4549 synonym for long double to get the size and alignment right. */
4550 u = VT_LDOUBLE;
4551 goto basic_type;
4552 #endif
4553 case TOK_BOOL:
4554 u = VT_BOOL;
4555 goto basic_type;
4556 case TOK_COMPLEX:
4557 tcc_error("_Complex is not yet supported");
4558 case TOK_FLOAT:
4559 u = VT_FLOAT;
4560 goto basic_type;
4561 case TOK_DOUBLE:
4562 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4563 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4564 } else {
4565 u = VT_DOUBLE;
4566 goto basic_type;
4568 next();
4569 break;
4570 case TOK_ENUM:
4571 struct_decl(&type1, VT_ENUM);
4572 basic_type2:
4573 u = type1.t;
4574 type->ref = type1.ref;
4575 goto basic_type1;
4576 case TOK_STRUCT:
4577 struct_decl(&type1, VT_STRUCT);
4578 goto basic_type2;
4579 case TOK_UNION:
4580 struct_decl(&type1, VT_UNION);
4581 goto basic_type2;
4583 /* type modifiers */
4584 case TOK__Atomic:
4585 next();
4586 type->t = t;
4587 parse_btype_qualify(type, VT_ATOMIC);
4588 t = type->t;
4589 if (tok == '(') {
4590 parse_expr_type(&type1);
4591 /* remove all storage modifiers except typedef */
4592 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4593 if (type1.ref)
4594 sym_to_attr(ad, type1.ref);
4595 goto basic_type2;
4597 break;
4598 case TOK_CONST1:
4599 case TOK_CONST2:
4600 case TOK_CONST3:
4601 type->t = t;
4602 parse_btype_qualify(type, VT_CONSTANT);
4603 t = type->t;
4604 next();
4605 break;
4606 case TOK_VOLATILE1:
4607 case TOK_VOLATILE2:
4608 case TOK_VOLATILE3:
4609 type->t = t;
4610 parse_btype_qualify(type, VT_VOLATILE);
4611 t = type->t;
4612 next();
4613 break;
4614 case TOK_SIGNED1:
4615 case TOK_SIGNED2:
4616 case TOK_SIGNED3:
4617 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4618 tcc_error("signed and unsigned modifier");
4619 t |= VT_DEFSIGN;
4620 next();
4621 typespec_found = 1;
4622 break;
4623 case TOK_REGISTER:
4624 case TOK_AUTO:
4625 case TOK_RESTRICT1:
4626 case TOK_RESTRICT2:
4627 case TOK_RESTRICT3:
4628 next();
4629 break;
4630 case TOK_UNSIGNED:
4631 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4632 tcc_error("signed and unsigned modifier");
4633 t |= VT_DEFSIGN | VT_UNSIGNED;
4634 next();
4635 typespec_found = 1;
4636 break;
4638 /* storage */
4639 case TOK_EXTERN:
4640 g = VT_EXTERN;
4641 goto storage;
4642 case TOK_STATIC:
4643 g = VT_STATIC;
4644 goto storage;
4645 case TOK_TYPEDEF:
4646 g = VT_TYPEDEF;
4647 goto storage;
4648 storage:
4649 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4650 tcc_error("multiple storage classes");
4651 t |= g;
4652 next();
4653 break;
4654 case TOK_INLINE1:
4655 case TOK_INLINE2:
4656 case TOK_INLINE3:
4657 t |= VT_INLINE;
4658 next();
4659 break;
4660 case TOK_NORETURN3:
4661 next();
4662 ad->f.func_noreturn = 1;
4663 break;
4664 /* GNUC attribute */
4665 case TOK_ATTRIBUTE1:
4666 case TOK_ATTRIBUTE2:
4667 parse_attribute(ad);
4668 if (ad->attr_mode) {
4669 u = ad->attr_mode -1;
4670 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4672 continue;
4673 /* GNUC typeof */
4674 case TOK_TYPEOF1:
4675 case TOK_TYPEOF2:
4676 case TOK_TYPEOF3:
4677 next();
4678 parse_expr_type(&type1);
4679 /* remove all storage modifiers except typedef */
4680 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4681 if (type1.ref)
4682 sym_to_attr(ad, type1.ref);
4683 goto basic_type2;
4684 case TOK_THREAD_LOCAL:
4685 tcc_error("_Thread_local is not implemented");
4686 default:
4687 if (typespec_found)
4688 goto the_end;
4689 s = sym_find(tok);
4690 if (!s || !(s->type.t & VT_TYPEDEF))
4691 goto the_end;
4693 n = tok, next();
4694 if (tok == ':' && ignore_label) {
4695 /* ignore if it's a label */
4696 unget_tok(n);
4697 goto the_end;
4700 t &= ~(VT_BTYPE|VT_LONG);
4701 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4702 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4703 type->ref = s->type.ref;
4704 if (t)
4705 parse_btype_qualify(type, t);
4706 t = type->t;
4707 /* get attributes from typedef */
4708 sym_to_attr(ad, s);
4709 typespec_found = 1;
4710 st = bt = -2;
4711 break;
4713 type_found = 1;
4715 the_end:
4716 if (tcc_state->char_is_unsigned) {
4717 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4718 t |= VT_UNSIGNED;
4720 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4721 bt = t & (VT_BTYPE|VT_LONG);
4722 if (bt == VT_LONG)
4723 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4724 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4725 if (bt == VT_LDOUBLE)
4726 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4727 #endif
4728 type->t = t;
4729 return type_found;
4732 /* convert a function parameter type (array to pointer and function to
4733 function pointer) */
4734 static inline void convert_parameter_type(CType *pt)
4736 /* remove const and volatile qualifiers (XXX: const could be used
4737 to indicate a const function parameter */
4738 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4739 /* array must be transformed to pointer according to ANSI C */
4740 pt->t &= ~VT_ARRAY;
4741 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4742 mk_pointer(pt);
4746 ST_FUNC void parse_asm_str(CString *astr)
4748 skip('(');
4749 parse_mult_str(astr, "string constant");
4752 /* Parse an asm label and return the token */
4753 static int asm_label_instr(void)
4755 int v;
4756 CString astr;
4758 next();
4759 parse_asm_str(&astr);
4760 skip(')');
4761 #ifdef ASM_DEBUG
4762 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4763 #endif
4764 v = tok_alloc(astr.data, astr.size - 1)->tok;
4765 cstr_free(&astr);
4766 return v;
4769 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4771 int n, l, t1, arg_size, align;
4772 Sym **plast, *s, *first;
4773 AttributeDef ad1;
4774 CType pt;
4775 TokenString *vla_array_tok = NULL;
4776 int *vla_array_str = NULL;
4778 if (tok == '(') {
4779 /* function type, or recursive declarator (return if so) */
4780 next();
4781 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4782 return 0;
4783 if (tok == ')')
4784 l = 0;
4785 else if (parse_btype(&pt, &ad1, 0))
4786 l = FUNC_NEW;
4787 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4788 merge_attr (ad, &ad1);
4789 return 0;
4790 } else
4791 l = FUNC_OLD;
4793 first = NULL;
4794 plast = &first;
4795 arg_size = 0;
4796 ++local_scope;
4797 if (l) {
4798 for(;;) {
4799 /* read param name and compute offset */
4800 if (l != FUNC_OLD) {
4801 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4802 break;
4803 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4804 if ((pt.t & VT_BTYPE) == VT_VOID)
4805 tcc_error("parameter declared as void");
4806 if (n == 0)
4807 n = SYM_FIELD;
4808 } else {
4809 n = tok;
4810 pt.t = VT_VOID; /* invalid type */
4811 pt.ref = NULL;
4812 next();
4814 if (n < TOK_UIDENT)
4815 expect("identifier");
4816 convert_parameter_type(&pt);
4817 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4818 /* these symbols may be evaluated for VLArrays (see below, under
4819 nocode_wanted) which is why we push them here as normal symbols
4820 temporarily. Example: int func(int a, int b[++a]); */
4821 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4822 *plast = s;
4823 plast = &s->next;
4824 if (tok == ')')
4825 break;
4826 skip(',');
4827 if (l == FUNC_NEW && tok == TOK_DOTS) {
4828 l = FUNC_ELLIPSIS;
4829 next();
4830 break;
4832 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4833 tcc_error("invalid type");
4835 } else
4836 /* if no parameters, then old type prototype */
4837 l = FUNC_OLD;
4838 skip(')');
4839 /* remove parameter symbols from token table, keep on stack */
4840 if (first) {
4841 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4842 for (s = first; s; s = s->next)
4843 s->v |= SYM_FIELD;
4845 --local_scope;
4846 /* NOTE: const is ignored in returned type as it has a special
4847 meaning in gcc / C++ */
4848 type->t &= ~VT_CONSTANT;
4849 /* some ancient pre-K&R C allows a function to return an array
4850 and the array brackets to be put after the arguments, such
4851 that "int c()[]" means something like "int[] c()" */
4852 if (tok == '[') {
4853 next();
4854 skip(']'); /* only handle simple "[]" */
4855 mk_pointer(type);
4857 /* we push a anonymous symbol which will contain the function prototype */
4858 ad->f.func_args = arg_size;
4859 ad->f.func_type = l;
4860 s = sym_push(SYM_FIELD, type, 0, 0);
4861 s->a = ad->a;
4862 s->f = ad->f;
4863 s->next = first;
4864 type->t = VT_FUNC;
4865 type->ref = s;
4866 } else if (tok == '[') {
4867 int saved_nocode_wanted = nocode_wanted;
4868 /* array definition */
4869 next();
4870 n = -1;
4871 t1 = 0;
4872 if (td & TYPE_PARAM) while (1) {
4873 /* XXX The optional type-quals and static should only be accepted
4874 in parameter decls. The '*' as well, and then even only
4875 in prototypes (not function defs). */
4876 switch (tok) {
4877 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4878 case TOK_CONST1:
4879 case TOK_VOLATILE1:
4880 case TOK_STATIC:
4881 case '*':
4882 next();
4883 continue;
4884 default:
4885 break;
4887 if (tok != ']') {
4888 /* Code generation is not done now but has to be done
4889 at start of function. Save code here for later use. */
4890 nocode_wanted = 1;
4891 skip_or_save_block(&vla_array_tok);
4892 unget_tok(0);
4893 vla_array_str = vla_array_tok->str;
4894 begin_macro(vla_array_tok, 2);
4895 next();
4896 gexpr();
4897 end_macro();
4898 next();
4899 goto check;
4901 break;
4903 } else if (tok != ']') {
4904 if (!local_stack || (storage & VT_STATIC))
4905 vpushi(expr_const());
4906 else {
4907 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4908 length must always be evaluated, even under nocode_wanted,
4909 so that its size slot is initialized (e.g. under sizeof
4910 or typeof). */
4911 nocode_wanted = 0;
4912 gexpr();
4914 check:
4915 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4916 n = vtop->c.i;
4917 if (n < 0)
4918 tcc_error("invalid array size");
4919 } else {
4920 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4921 tcc_error("size of variable length array should be an integer");
4922 n = 0;
4923 t1 = VT_VLA;
4926 skip(']');
4927 /* parse next post type */
4928 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4930 if ((type->t & VT_BTYPE) == VT_FUNC)
4931 tcc_error("declaration of an array of functions");
4932 if ((type->t & VT_BTYPE) == VT_VOID
4933 || type_size(type, &align) < 0)
4934 tcc_error("declaration of an array of incomplete type elements");
4936 t1 |= type->t & VT_VLA;
4938 if (t1 & VT_VLA) {
4939 if (n < 0) {
4940 if (td & TYPE_NEST)
4941 tcc_error("need explicit inner array size in VLAs");
4943 else {
4944 loc -= type_size(&int_type, &align);
4945 loc &= -align;
4946 n = loc;
4948 vpush_type_size(type, &align);
4949 gen_op('*');
4950 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4951 vswap();
4952 vstore();
4955 if (n != -1)
4956 vpop();
4957 nocode_wanted = saved_nocode_wanted;
4959 /* we push an anonymous symbol which will contain the array
4960 element type */
4961 s = sym_push(SYM_FIELD, type, 0, n);
4962 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4963 type->ref = s;
4965 if (vla_array_str) {
4966 if (t1 & VT_VLA)
4967 s->vla_array_str = vla_array_str;
4968 else
4969 tok_str_free_str(vla_array_str);
4972 return 1;
4975 /* Parse a type declarator (except basic type), and return the type
4976 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4977 expected. 'type' should contain the basic type. 'ad' is the
4978 attribute definition of the basic type. It can be modified by
4979 type_decl(). If this (possibly abstract) declarator is a pointer chain
4980 it returns the innermost pointed to type (equals *type, but is a different
4981 pointer), otherwise returns type itself, that's used for recursive calls. */
4982 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4984 CType *post, *ret;
4985 int qualifiers, storage;
4987 /* recursive type, remove storage bits first, apply them later again */
4988 storage = type->t & VT_STORAGE;
4989 type->t &= ~VT_STORAGE;
4990 post = ret = type;
4992 while (tok == '*') {
4993 qualifiers = 0;
4994 redo:
4995 next();
4996 switch(tok) {
4997 case TOK__Atomic:
4998 qualifiers |= VT_ATOMIC;
4999 goto redo;
5000 case TOK_CONST1:
5001 case TOK_CONST2:
5002 case TOK_CONST3:
5003 qualifiers |= VT_CONSTANT;
5004 goto redo;
5005 case TOK_VOLATILE1:
5006 case TOK_VOLATILE2:
5007 case TOK_VOLATILE3:
5008 qualifiers |= VT_VOLATILE;
5009 goto redo;
5010 case TOK_RESTRICT1:
5011 case TOK_RESTRICT2:
5012 case TOK_RESTRICT3:
5013 goto redo;
5014 /* XXX: clarify attribute handling */
5015 case TOK_ATTRIBUTE1:
5016 case TOK_ATTRIBUTE2:
5017 parse_attribute(ad);
5018 break;
5020 mk_pointer(type);
5021 type->t |= qualifiers;
5022 if (ret == type)
5023 /* innermost pointed to type is the one for the first derivation */
5024 ret = pointed_type(type);
5027 if (tok == '(') {
5028 /* This is possibly a parameter type list for abstract declarators
5029 ('int ()'), use post_type for testing this. */
5030 if (!post_type(type, ad, 0, td)) {
5031 /* It's not, so it's a nested declarator, and the post operations
5032 apply to the innermost pointed to type (if any). */
5033 /* XXX: this is not correct to modify 'ad' at this point, but
5034 the syntax is not clear */
5035 parse_attribute(ad);
5036 post = type_decl(type, ad, v, td);
5037 skip(')');
5038 } else
5039 goto abstract;
5040 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5041 /* type identifier */
5042 *v = tok;
5043 next();
5044 } else {
5045 abstract:
5046 if (!(td & TYPE_ABSTRACT))
5047 expect("identifier");
5048 *v = 0;
5050 post_type(post, ad, post != ret ? 0 : storage,
5051 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5052 parse_attribute(ad);
5053 type->t |= storage;
5054 return ret;
5057 /* indirection with full error checking and bound check */
5058 ST_FUNC void indir(void)
5060 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5061 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5062 return;
5063 expect("pointer");
5065 if (vtop->r & VT_LVAL)
5066 gv(RC_INT);
5067 vtop->type = *pointed_type(&vtop->type);
5068 /* Arrays and functions are never lvalues */
5069 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5070 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5071 vtop->r |= VT_LVAL;
5072 /* if bound checking, the referenced pointer must be checked */
5073 #ifdef CONFIG_TCC_BCHECK
5074 if (tcc_state->do_bounds_check)
5075 vtop->r |= VT_MUSTBOUND;
5076 #endif
5080 /* pass a parameter to a function and do type checking and casting */
5081 static void gfunc_param_typed(Sym *func, Sym *arg)
5083 int func_type;
5084 CType type;
5086 func_type = func->f.func_type;
5087 if (func_type == FUNC_OLD ||
5088 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5089 /* default casting : only need to convert float to double */
5090 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5091 gen_cast_s(VT_DOUBLE);
5092 } else if (vtop->type.t & VT_BITFIELD) {
5093 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5094 type.ref = vtop->type.ref;
5095 gen_cast(&type);
5096 } else if (vtop->r & VT_MUSTCAST) {
5097 force_charshort_cast();
5099 } else if (arg == NULL) {
5100 tcc_error("too many arguments to function");
5101 } else {
5102 type = arg->type;
5103 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5104 gen_assign_cast(&type);
5108 /* parse an expression and return its type without any side effect. */
5109 static void expr_type(CType *type, void (*expr_fn)(void))
5111 nocode_wanted++;
5112 expr_fn();
5113 *type = vtop->type;
5114 vpop();
5115 nocode_wanted--;
5118 /* parse an expression of the form '(type)' or '(expr)' and return its
5119 type */
5120 static void parse_expr_type(CType *type)
5122 int n;
5123 AttributeDef ad;
5125 skip('(');
5126 if (parse_btype(type, &ad, 0)) {
5127 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5128 } else {
5129 expr_type(type, gexpr);
5131 skip(')');
5134 static void parse_type(CType *type)
5136 AttributeDef ad;
5137 int n;
5139 if (!parse_btype(type, &ad, 0)) {
5140 expect("type");
5142 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5145 static void parse_builtin_params(int nc, const char *args)
5147 char c, sep = '(';
5148 CType type;
5149 if (nc)
5150 nocode_wanted++;
5151 next();
5152 if (*args == 0)
5153 skip(sep);
5154 while ((c = *args++)) {
5155 skip(sep);
5156 sep = ',';
5157 if (c == 't') {
5158 parse_type(&type);
5159 vpush(&type);
5160 continue;
5162 expr_eq();
5163 type.ref = NULL;
5164 type.t = 0;
5165 switch (c) {
5166 case 'e':
5167 continue;
5168 case 'V':
5169 type.t = VT_CONSTANT;
5170 case 'v':
5171 type.t |= VT_VOID;
5172 mk_pointer (&type);
5173 break;
5174 case 'S':
5175 type.t = VT_CONSTANT;
5176 case 's':
5177 type.t |= char_type.t;
5178 mk_pointer (&type);
5179 break;
5180 case 'i':
5181 type.t = VT_INT;
5182 break;
5183 case 'l':
5184 type.t = VT_SIZE_T;
5185 break;
5186 default:
5187 break;
5189 gen_assign_cast(&type);
5191 skip(')');
5192 if (nc)
5193 nocode_wanted--;
5196 static void parse_atomic(int atok)
5198 int size, align, arg, t, save = 0;
5199 CType *atom, *atom_ptr, ct = {0};
5200 SValue store;
5201 char buf[40];
5202 static const char *const templates[] = {
5204 * Each entry consists of callback and function template.
5205 * The template represents argument types and return type.
5207 * ? void (return-only)
5208 * b bool
5209 * a atomic
5210 * A read-only atomic
5211 * p pointer to memory
5212 * v value
5213 * l load pointer
5214 * s save pointer
5215 * m memory model
5218 /* keep in order of appearance in tcctok.h: */
5219 /* __atomic_store */ "alm.?",
5220 /* __atomic_load */ "Asm.v",
5221 /* __atomic_exchange */ "alsm.v",
5222 /* __atomic_compare_exchange */ "aplbmm.b",
5223 /* __atomic_fetch_add */ "avm.v",
5224 /* __atomic_fetch_sub */ "avm.v",
5225 /* __atomic_fetch_or */ "avm.v",
5226 /* __atomic_fetch_xor */ "avm.v",
5227 /* __atomic_fetch_and */ "avm.v",
5228 /* __atomic_fetch_nand */ "avm.v",
5229 /* __atomic_and_fetch */ "avm.v",
5230 /* __atomic_sub_fetch */ "avm.v",
5231 /* __atomic_or_fetch */ "avm.v",
5232 /* __atomic_xor_fetch */ "avm.v",
5233 /* __atomic_and_fetch */ "avm.v",
5234 /* __atomic_nand_fetch */ "avm.v"
5236 const char *template = templates[(atok - TOK___atomic_store)];
5238 atom = atom_ptr = NULL;
5239 size = 0; /* pacify compiler */
5240 next();
5241 skip('(');
5242 for (arg = 0;;) {
5243 expr_eq();
5244 switch (template[arg]) {
5245 case 'a':
5246 case 'A':
5247 atom_ptr = &vtop->type;
5248 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5249 expect("pointer");
5250 atom = pointed_type(atom_ptr);
5251 size = type_size(atom, &align);
5252 if (size > 8
5253 || (size & (size - 1))
5254 || (atok > TOK___atomic_compare_exchange
5255 && (0 == btype_size(atom->t & VT_BTYPE)
5256 || (atom->t & VT_BTYPE) == VT_PTR)))
5257 expect("integral or integer-sized pointer target type");
5258 /* GCC does not care either: */
5259 /* if (!(atom->t & VT_ATOMIC))
5260 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5261 break;
5263 case 'p':
5264 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5265 || type_size(pointed_type(&vtop->type), &align) != size)
5266 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5267 gen_assign_cast(atom_ptr);
5268 break;
5269 case 'v':
5270 gen_assign_cast(atom);
5271 break;
5272 case 'l':
5273 indir();
5274 gen_assign_cast(atom);
5275 break;
5276 case 's':
5277 save = 1;
5278 indir();
5279 store = *vtop;
5280 vpop();
5281 break;
5282 case 'm':
5283 gen_assign_cast(&int_type);
5284 break;
5285 case 'b':
5286 ct.t = VT_BOOL;
5287 gen_assign_cast(&ct);
5288 break;
5290 if ('.' == template[++arg])
5291 break;
5292 skip(',');
5294 skip(')');
5296 ct.t = VT_VOID;
5297 switch (template[arg + 1]) {
5298 case 'b':
5299 ct.t = VT_BOOL;
5300 break;
5301 case 'v':
5302 ct = *atom;
5303 break;
5306 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5307 vpush_helper_func(tok_alloc_const(buf));
5308 vrott(arg - save + 1);
5309 gfunc_call(arg - save);
5311 vpush(&ct);
5312 PUT_R_RET(vtop, ct.t);
5313 t = ct.t & VT_BTYPE;
5314 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5315 #ifdef PROMOTE_RET
5316 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5317 #else
5318 vtop->type.t = VT_INT;
5319 #endif
5321 gen_cast(&ct);
5322 if (save) {
5323 vpush(&ct);
5324 *vtop = store;
5325 vswap();
5326 vstore();
5330 ST_FUNC void unary(void)
5332 int n, t, align, size, r, sizeof_caller;
5333 CType type;
5334 Sym *s;
5335 AttributeDef ad;
5337 /* generate line number info */
5338 if (debug_modes)
5339 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5341 sizeof_caller = in_sizeof;
5342 in_sizeof = 0;
5343 type.ref = NULL;
5344 /* XXX: GCC 2.95.3 does not generate a table although it should be
5345 better here */
5346 tok_next:
5347 switch(tok) {
5348 case TOK_EXTENSION:
5349 next();
5350 goto tok_next;
5351 case TOK_LCHAR:
5352 #ifdef TCC_TARGET_PE
5353 t = VT_SHORT|VT_UNSIGNED;
5354 goto push_tokc;
5355 #endif
5356 case TOK_CINT:
5357 case TOK_CCHAR:
5358 t = VT_INT;
5359 push_tokc:
5360 type.t = t;
5361 vsetc(&type, VT_CONST, &tokc);
5362 next();
5363 break;
5364 case TOK_CUINT:
5365 t = VT_INT | VT_UNSIGNED;
5366 goto push_tokc;
5367 case TOK_CLLONG:
5368 t = VT_LLONG;
5369 goto push_tokc;
5370 case TOK_CULLONG:
5371 t = VT_LLONG | VT_UNSIGNED;
5372 goto push_tokc;
5373 case TOK_CFLOAT:
5374 t = VT_FLOAT;
5375 goto push_tokc;
5376 case TOK_CDOUBLE:
5377 t = VT_DOUBLE;
5378 goto push_tokc;
5379 case TOK_CLDOUBLE:
5380 t = VT_LDOUBLE;
5381 goto push_tokc;
5382 case TOK_CLONG:
5383 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5384 goto push_tokc;
5385 case TOK_CULONG:
5386 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5387 goto push_tokc;
5388 case TOK___FUNCTION__:
5389 if (!gnu_ext)
5390 goto tok_identifier;
5391 /* fall thru */
5392 case TOK___FUNC__:
5394 Section *sec;
5395 int len;
5396 /* special function name identifier */
5397 len = strlen(funcname) + 1;
5398 /* generate char[len] type */
5399 type.t = char_type.t;
5400 if (tcc_state->warn_write_strings & WARN_ON)
5401 type.t |= VT_CONSTANT;
5402 mk_pointer(&type);
5403 type.t |= VT_ARRAY;
5404 type.ref->c = len;
5405 sec = rodata_section;
5406 vpush_ref(&type, sec, sec->data_offset, len);
5407 if (!NODATA_WANTED)
5408 memcpy(section_ptr_add(sec, len), funcname, len);
5409 next();
5411 break;
5412 case TOK_LSTR:
5413 #ifdef TCC_TARGET_PE
5414 t = VT_SHORT | VT_UNSIGNED;
5415 #else
5416 t = VT_INT;
5417 #endif
5418 goto str_init;
5419 case TOK_STR:
5420 /* string parsing */
5421 t = char_type.t;
5422 str_init:
5423 if (tcc_state->warn_write_strings & WARN_ON)
5424 t |= VT_CONSTANT;
5425 type.t = t;
5426 mk_pointer(&type);
5427 type.t |= VT_ARRAY;
5428 memset(&ad, 0, sizeof(AttributeDef));
5429 ad.section = rodata_section;
5430 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5431 break;
5432 case '(':
5433 next();
5434 /* cast ? */
5435 if (parse_btype(&type, &ad, 0)) {
5436 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5437 skip(')');
5438 /* check ISOC99 compound literal */
5439 if (tok == '{') {
5440 /* data is allocated locally by default */
5441 if (global_expr)
5442 r = VT_CONST;
5443 else
5444 r = VT_LOCAL;
5445 /* all except arrays are lvalues */
5446 if (!(type.t & VT_ARRAY))
5447 r |= VT_LVAL;
5448 memset(&ad, 0, sizeof(AttributeDef));
5449 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5450 } else {
5451 if (sizeof_caller) {
5452 vpush(&type);
5453 return;
5455 unary();
5456 gen_cast(&type);
5458 } else if (tok == '{') {
5459 int saved_nocode_wanted = nocode_wanted;
5460 if (const_wanted && !(nocode_wanted & unevalmask))
5461 expect("constant");
5462 if (0 == local_scope)
5463 tcc_error("statement expression outside of function");
5464 /* save all registers */
5465 save_regs(0);
5466 /* statement expression : we do not accept break/continue
5467 inside as GCC does. We do retain the nocode_wanted state,
5468 as statement expressions can't ever be entered from the
5469 outside, so any reactivation of code emission (from labels
5470 or loop heads) can be disabled again after the end of it. */
5471 block(1);
5472 /* If the statement expr can be entered, then we retain the current
5473 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5474 If it can't be entered then the state is that from before the
5475 statement expression. */
5476 if (saved_nocode_wanted)
5477 nocode_wanted = saved_nocode_wanted;
5478 skip(')');
5479 } else {
5480 gexpr();
5481 skip(')');
5483 break;
5484 case '*':
5485 next();
5486 unary();
5487 indir();
5488 break;
5489 case '&':
5490 next();
5491 unary();
5492 /* functions names must be treated as function pointers,
5493 except for unary '&' and sizeof. Since we consider that
5494 functions are not lvalues, we only have to handle it
5495 there and in function calls. */
5496 /* arrays can also be used although they are not lvalues */
5497 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5498 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5499 test_lvalue();
5500 if (vtop->sym)
5501 vtop->sym->a.addrtaken = 1;
5502 mk_pointer(&vtop->type);
5503 gaddrof();
5504 break;
5505 case '!':
5506 next();
5507 unary();
5508 gen_test_zero(TOK_EQ);
5509 break;
5510 case '~':
5511 next();
5512 unary();
5513 vpushi(-1);
5514 gen_op('^');
5515 break;
5516 case '+':
5517 next();
5518 unary();
5519 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5520 tcc_error("pointer not accepted for unary plus");
5521 /* In order to force cast, we add zero, except for floating point
5522 where we really need an noop (otherwise -0.0 will be transformed
5523 into +0.0). */
5524 if (!is_float(vtop->type.t)) {
5525 vpushi(0);
5526 gen_op('+');
5528 break;
5529 case TOK_SIZEOF:
5530 case TOK_ALIGNOF1:
5531 case TOK_ALIGNOF2:
5532 case TOK_ALIGNOF3:
5533 t = tok;
5534 next();
5535 in_sizeof++;
5536 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5537 if (t == TOK_SIZEOF) {
5538 vpush_type_size(&type, &align);
5539 gen_cast_s(VT_SIZE_T);
5540 } else {
5541 type_size(&type, &align);
5542 s = NULL;
5543 if (vtop[1].r & VT_SYM)
5544 s = vtop[1].sym; /* hack: accessing previous vtop */
5545 if (s && s->a.aligned)
5546 align = 1 << (s->a.aligned - 1);
5547 vpushs(align);
5549 break;
5551 case TOK_builtin_expect:
5552 /* __builtin_expect is a no-op for now */
5553 parse_builtin_params(0, "ee");
5554 vpop();
5555 break;
5556 case TOK_builtin_types_compatible_p:
5557 parse_builtin_params(0, "tt");
5558 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5559 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5560 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5561 vtop -= 2;
5562 vpushi(n);
5563 break;
5564 case TOK_builtin_choose_expr:
5566 int64_t c;
5567 next();
5568 skip('(');
5569 c = expr_const64();
5570 skip(',');
5571 if (!c) {
5572 nocode_wanted++;
5574 expr_eq();
5575 if (!c) {
5576 vpop();
5577 nocode_wanted--;
5579 skip(',');
5580 if (c) {
5581 nocode_wanted++;
5583 expr_eq();
5584 if (c) {
5585 vpop();
5586 nocode_wanted--;
5588 skip(')');
5590 break;
5591 case TOK_builtin_constant_p:
5592 constant_p = 1;
5593 parse_builtin_params(1, "e");
5594 n = constant_p &&
5595 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5596 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5597 vtop--;
5598 vpushi(n);
5599 break;
5600 case TOK_builtin_frame_address:
5601 case TOK_builtin_return_address:
5603 int tok1 = tok;
5604 int64_t level;
5605 next();
5606 skip('(');
5607 level = expr_const64();
5608 if (level < 0) {
5609 tcc_error("%s only takes positive integers",
5610 tok1 == TOK_builtin_return_address ?
5611 "__builtin_return_address" :
5612 "__builtin_frame_address");
5614 skip(')');
5615 type.t = VT_VOID;
5616 mk_pointer(&type);
5617 vset(&type, VT_LOCAL, 0); /* local frame */
5618 while (level--) {
5619 #ifdef TCC_TARGET_RISCV64
5620 vpushi(2*PTR_SIZE);
5621 gen_op('-');
5622 #endif
5623 mk_pointer(&vtop->type);
5624 indir(); /* -> parent frame */
5626 if (tok1 == TOK_builtin_return_address) {
5627 // assume return address is just above frame pointer on stack
5628 #ifdef TCC_TARGET_ARM
5629 vpushi(2*PTR_SIZE);
5630 gen_op('+');
5631 #elif defined TCC_TARGET_RISCV64
5632 vpushi(PTR_SIZE);
5633 gen_op('-');
5634 #else
5635 vpushi(PTR_SIZE);
5636 gen_op('+');
5637 #endif
5638 mk_pointer(&vtop->type);
5639 indir();
5642 break;
5643 #ifdef TCC_TARGET_RISCV64
5644 case TOK_builtin_va_start:
5645 parse_builtin_params(0, "ee");
5646 r = vtop->r & VT_VALMASK;
5647 if (r == VT_LLOCAL)
5648 r = VT_LOCAL;
5649 if (r != VT_LOCAL)
5650 tcc_error("__builtin_va_start expects a local variable");
5651 gen_va_start();
5652 vstore();
5653 break;
5654 #endif
5655 #ifdef TCC_TARGET_X86_64
5656 #ifdef TCC_TARGET_PE
5657 case TOK_builtin_va_start:
5658 parse_builtin_params(0, "ee");
5659 r = vtop->r & VT_VALMASK;
5660 if (r == VT_LLOCAL)
5661 r = VT_LOCAL;
5662 if (r != VT_LOCAL)
5663 tcc_error("__builtin_va_start expects a local variable");
5664 vtop->r = r;
5665 vtop->type = char_pointer_type;
5666 vtop->c.i += 8;
5667 vstore();
5668 break;
5669 #else
5670 case TOK_builtin_va_arg_types:
5671 parse_builtin_params(0, "t");
5672 vpushi(classify_x86_64_va_arg(&vtop->type));
5673 vswap();
5674 vpop();
5675 break;
5676 #endif
5677 #endif
5679 #ifdef TCC_TARGET_ARM64
5680 case TOK_builtin_va_start: {
5681 parse_builtin_params(0, "ee");
5682 //xx check types
5683 gen_va_start();
5684 vpushi(0);
5685 vtop->type.t = VT_VOID;
5686 break;
5688 case TOK_builtin_va_arg: {
5689 parse_builtin_params(0, "et");
5690 type = vtop->type;
5691 vpop();
5692 //xx check types
5693 gen_va_arg(&type);
5694 vtop->type = type;
5695 break;
5697 case TOK___arm64_clear_cache: {
5698 parse_builtin_params(0, "ee");
5699 gen_clear_cache();
5700 vpushi(0);
5701 vtop->type.t = VT_VOID;
5702 break;
5704 #endif
5706 /* atomic operations */
5707 case TOK___atomic_store:
5708 case TOK___atomic_load:
5709 case TOK___atomic_exchange:
5710 case TOK___atomic_compare_exchange:
5711 case TOK___atomic_fetch_add:
5712 case TOK___atomic_fetch_sub:
5713 case TOK___atomic_fetch_or:
5714 case TOK___atomic_fetch_xor:
5715 case TOK___atomic_fetch_and:
5716 case TOK___atomic_fetch_nand:
5717 case TOK___atomic_add_fetch:
5718 case TOK___atomic_sub_fetch:
5719 case TOK___atomic_or_fetch:
5720 case TOK___atomic_xor_fetch:
5721 case TOK___atomic_and_fetch:
5722 case TOK___atomic_nand_fetch:
5723 parse_atomic(tok);
5724 break;
5726 /* pre operations */
5727 case TOK_INC:
5728 case TOK_DEC:
5729 t = tok;
5730 next();
5731 unary();
5732 inc(0, t);
5733 break;
5734 case '-':
5735 next();
5736 unary();
5737 if (is_float(vtop->type.t)) {
5738 gen_opif(TOK_NEG);
5739 } else {
5740 vpushi(0);
5741 vswap();
5742 gen_op('-');
5744 break;
5745 case TOK_LAND:
5746 if (!gnu_ext)
5747 goto tok_identifier;
5748 next();
5749 /* allow to take the address of a label */
5750 if (tok < TOK_UIDENT)
5751 expect("label identifier");
5752 s = label_find(tok);
5753 if (!s) {
5754 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5755 } else {
5756 if (s->r == LABEL_DECLARED)
5757 s->r = LABEL_FORWARD;
5759 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5760 s->type.t = VT_VOID;
5761 mk_pointer(&s->type);
5762 s->type.t |= VT_STATIC;
5764 vpushsym(&s->type, s);
5765 next();
5766 break;
5768 case TOK_GENERIC:
5770 CType controlling_type;
5771 int has_default = 0;
5772 int has_match = 0;
5773 int learn = 0;
5774 TokenString *str = NULL;
5775 int saved_const_wanted = const_wanted;
5777 next();
5778 skip('(');
5779 const_wanted = 0;
5780 expr_type(&controlling_type, expr_eq);
5781 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5782 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5783 mk_pointer(&controlling_type);
5784 const_wanted = saved_const_wanted;
5785 for (;;) {
5786 learn = 0;
5787 skip(',');
5788 if (tok == TOK_DEFAULT) {
5789 if (has_default)
5790 tcc_error("too many 'default'");
5791 has_default = 1;
5792 if (!has_match)
5793 learn = 1;
5794 next();
5795 } else {
5796 AttributeDef ad_tmp;
5797 int itmp;
5798 CType cur_type;
5800 parse_btype(&cur_type, &ad_tmp, 0);
5801 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5802 if (compare_types(&controlling_type, &cur_type, 0)) {
5803 if (has_match) {
5804 tcc_error("type match twice");
5806 has_match = 1;
5807 learn = 1;
5810 skip(':');
5811 if (learn) {
5812 if (str)
5813 tok_str_free(str);
5814 skip_or_save_block(&str);
5815 } else {
5816 skip_or_save_block(NULL);
5818 if (tok == ')')
5819 break;
5821 if (!str) {
5822 char buf[60];
5823 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5824 tcc_error("type '%s' does not match any association", buf);
5826 begin_macro(str, 1);
5827 next();
5828 expr_eq();
5829 if (tok != TOK_EOF)
5830 expect(",");
5831 end_macro();
5832 next();
5833 break;
5835 // special qnan , snan and infinity values
5836 case TOK___NAN__:
5837 n = 0x7fc00000;
5838 special_math_val:
5839 vpushi(n);
5840 vtop->type.t = VT_FLOAT;
5841 next();
5842 break;
5843 case TOK___SNAN__:
5844 n = 0x7f800001;
5845 goto special_math_val;
5846 case TOK___INF__:
5847 n = 0x7f800000;
5848 goto special_math_val;
5850 default:
5851 tok_identifier:
5852 t = tok;
5853 next();
5854 if (t < TOK_UIDENT)
5855 expect("identifier");
5856 s = sym_find(t);
5857 if (!s || IS_ASM_SYM(s)) {
5858 const char *name = get_tok_str(t, NULL);
5859 if (tok != '(')
5860 tcc_error("'%s' undeclared", name);
5861 /* for simple function calls, we tolerate undeclared
5862 external reference to int() function */
5863 tcc_warning_c(warn_implicit_function_declaration)(
5864 "implicit declaration of function '%s'", name);
5865 s = external_global_sym(t, &func_old_type);
5868 r = s->r;
5869 /* A symbol that has a register is a local register variable,
5870 which starts out as VT_LOCAL value. */
5871 if ((r & VT_VALMASK) < VT_CONST)
5872 r = (r & ~VT_VALMASK) | VT_LOCAL;
5874 vset(&s->type, r, s->c);
5875 /* Point to s as backpointer (even without r&VT_SYM).
5876 Will be used by at least the x86 inline asm parser for
5877 regvars. */
5878 vtop->sym = s;
5880 if (r & VT_SYM) {
5881 vtop->c.i = 0;
5882 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5883 vtop->c.i = s->enum_val;
5885 break;
5888 /* post operations */
5889 while (1) {
5890 if (tok == TOK_INC || tok == TOK_DEC) {
5891 inc(1, tok);
5892 next();
5893 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5894 int qualifiers, cumofs = 0;
5895 /* field */
5896 if (tok == TOK_ARROW)
5897 indir();
5898 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5899 test_lvalue();
5900 gaddrof();
5901 /* expect pointer on structure */
5902 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5903 expect("struct or union");
5904 if (tok == TOK_CDOUBLE)
5905 expect("field name");
5906 next();
5907 if (tok == TOK_CINT || tok == TOK_CUINT)
5908 expect("field name");
5909 s = find_field(&vtop->type, tok, &cumofs);
5910 /* add field offset to pointer */
5911 vtop->type = char_pointer_type; /* change type to 'char *' */
5912 vpushi(cumofs);
5913 gen_op('+');
5914 /* change type to field type, and set to lvalue */
5915 vtop->type = s->type;
5916 vtop->type.t |= qualifiers;
5917 /* an array is never an lvalue */
5918 if (!(vtop->type.t & VT_ARRAY)) {
5919 vtop->r |= VT_LVAL;
5920 #ifdef CONFIG_TCC_BCHECK
5921 /* if bound checking, the referenced pointer must be checked */
5922 if (tcc_state->do_bounds_check)
5923 vtop->r |= VT_MUSTBOUND;
5924 #endif
5926 next();
5927 } else if (tok == '[') {
5928 next();
5929 gexpr();
5930 gen_op('+');
5931 indir();
5932 skip(']');
5933 } else if (tok == '(') {
5934 SValue ret;
5935 Sym *sa;
5936 int nb_args, ret_nregs, ret_align, regsize, variadic;
5938 /* function call */
5939 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5940 /* pointer test (no array accepted) */
5941 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5942 vtop->type = *pointed_type(&vtop->type);
5943 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5944 goto error_func;
5945 } else {
5946 error_func:
5947 expect("function pointer");
5949 } else {
5950 vtop->r &= ~VT_LVAL; /* no lvalue */
5952 /* get return type */
5953 s = vtop->type.ref;
5954 next();
5955 sa = s->next; /* first parameter */
5956 nb_args = regsize = 0;
5957 ret.r2 = VT_CONST;
5958 /* compute first implicit argument if a structure is returned */
5959 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5960 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5961 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5962 &ret_align, &regsize);
5963 if (ret_nregs <= 0) {
5964 /* get some space for the returned structure */
5965 size = type_size(&s->type, &align);
5966 #ifdef TCC_TARGET_ARM64
5967 /* On arm64, a small struct is return in registers.
5968 It is much easier to write it to memory if we know
5969 that we are allowed to write some extra bytes, so
5970 round the allocated space up to a power of 2: */
5971 if (size < 16)
5972 while (size & (size - 1))
5973 size = (size | (size - 1)) + 1;
5974 #endif
5975 loc = (loc - size) & -align;
5976 ret.type = s->type;
5977 ret.r = VT_LOCAL | VT_LVAL;
5978 /* pass it as 'int' to avoid structure arg passing
5979 problems */
5980 vseti(VT_LOCAL, loc);
5981 #ifdef CONFIG_TCC_BCHECK
5982 if (tcc_state->do_bounds_check)
5983 --loc;
5984 #endif
5985 ret.c = vtop->c;
5986 if (ret_nregs < 0)
5987 vtop--;
5988 else
5989 nb_args++;
5991 } else {
5992 ret_nregs = 1;
5993 ret.type = s->type;
5996 if (ret_nregs > 0) {
5997 /* return in register */
5998 ret.c.i = 0;
5999 PUT_R_RET(&ret, ret.type.t);
6001 if (tok != ')') {
6002 for(;;) {
6003 expr_eq();
6004 gfunc_param_typed(s, sa);
6005 nb_args++;
6006 if (sa)
6007 sa = sa->next;
6008 if (tok == ')')
6009 break;
6010 skip(',');
6013 if (sa)
6014 tcc_error("too few arguments to function");
6015 skip(')');
6016 gfunc_call(nb_args);
6018 if (ret_nregs < 0) {
6019 vsetc(&ret.type, ret.r, &ret.c);
6020 #ifdef TCC_TARGET_RISCV64
6021 arch_transfer_ret_regs(1);
6022 #endif
6023 } else {
6024 /* return value */
6025 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6026 vsetc(&ret.type, r, &ret.c);
6027 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6030 /* handle packed struct return */
6031 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6032 int addr, offset;
6034 size = type_size(&s->type, &align);
6035 /* We're writing whole regs often, make sure there's enough
6036 space. Assume register size is power of 2. */
6037 if (regsize > align)
6038 align = regsize;
6039 loc = (loc - size) & -align;
6040 addr = loc;
6041 offset = 0;
6042 for (;;) {
6043 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6044 vswap();
6045 vstore();
6046 vtop--;
6047 if (--ret_nregs == 0)
6048 break;
6049 offset += regsize;
6051 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6054 /* Promote char/short return values. This is matters only
6055 for calling function that were not compiled by TCC and
6056 only on some architectures. For those where it doesn't
6057 matter we expect things to be already promoted to int,
6058 but not larger. */
6059 t = s->type.t & VT_BTYPE;
6060 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6061 #ifdef PROMOTE_RET
6062 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6063 #else
6064 vtop->type.t = VT_INT;
6065 #endif
6068 if (s->f.func_noreturn) {
6069 if (debug_modes)
6070 tcc_tcov_block_end(tcc_state, -1);
6071 CODE_OFF();
6073 } else {
6074 break;
6079 #ifndef precedence_parser /* original top-down parser */
6081 static void expr_prod(void)
6083 int t;
6085 unary();
6086 while ((t = tok) == '*' || t == '/' || t == '%') {
6087 next();
6088 unary();
6089 gen_op(t);
6093 static void expr_sum(void)
6095 int t;
6097 expr_prod();
6098 while ((t = tok) == '+' || t == '-') {
6099 next();
6100 expr_prod();
6101 gen_op(t);
6105 static void expr_shift(void)
6107 int t;
6109 expr_sum();
6110 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6111 next();
6112 expr_sum();
6113 gen_op(t);
6117 static void expr_cmp(void)
6119 int t;
6121 expr_shift();
6122 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6123 t == TOK_ULT || t == TOK_UGE) {
6124 next();
6125 expr_shift();
6126 gen_op(t);
6130 static void expr_cmpeq(void)
6132 int t;
6134 expr_cmp();
6135 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6136 next();
6137 expr_cmp();
6138 gen_op(t);
6142 static void expr_and(void)
6144 expr_cmpeq();
6145 while (tok == '&') {
6146 next();
6147 expr_cmpeq();
6148 gen_op('&');
6152 static void expr_xor(void)
6154 expr_and();
6155 while (tok == '^') {
6156 next();
6157 expr_and();
6158 gen_op('^');
6162 static void expr_or(void)
6164 expr_xor();
6165 while (tok == '|') {
6166 next();
6167 expr_xor();
6168 gen_op('|');
6172 static void expr_landor(int op);
6174 static void expr_land(void)
6176 expr_or();
6177 if (tok == TOK_LAND)
6178 expr_landor(tok);
6181 static void expr_lor(void)
6183 expr_land();
6184 if (tok == TOK_LOR)
6185 expr_landor(tok);
6188 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6189 #else /* defined precedence_parser */
6190 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6191 # define expr_lor() unary(), expr_infix(1)
6193 static int precedence(int tok)
6195 switch (tok) {
6196 case TOK_LOR: return 1;
6197 case TOK_LAND: return 2;
6198 case '|': return 3;
6199 case '^': return 4;
6200 case '&': return 5;
6201 case TOK_EQ: case TOK_NE: return 6;
6202 relat: case TOK_ULT: case TOK_UGE: return 7;
6203 case TOK_SHL: case TOK_SAR: return 8;
6204 case '+': case '-': return 9;
6205 case '*': case '/': case '%': return 10;
6206 default:
6207 if (tok >= TOK_ULE && tok <= TOK_GT)
6208 goto relat;
6209 return 0;
6212 static unsigned char prec[256];
6213 static void init_prec(void)
6215 int i;
6216 for (i = 0; i < 256; i++)
6217 prec[i] = precedence(i);
6219 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6221 static void expr_landor(int op);
6223 static void expr_infix(int p)
6225 int t = tok, p2;
6226 while ((p2 = precedence(t)) >= p) {
6227 if (t == TOK_LOR || t == TOK_LAND) {
6228 expr_landor(t);
6229 } else {
6230 next();
6231 unary();
6232 if (precedence(tok) > p2)
6233 expr_infix(p2 + 1);
6234 gen_op(t);
6236 t = tok;
6239 #endif
6241 /* Assuming vtop is a value used in a conditional context
6242 (i.e. compared with zero) return 0 if it's false, 1 if
6243 true and -1 if it can't be statically determined. */
6244 static int condition_3way(void)
6246 int c = -1;
6247 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6248 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6249 vdup();
6250 gen_cast_s(VT_BOOL);
6251 c = vtop->c.i;
6252 vpop();
6254 return c;
6257 static void expr_landor(int op)
6259 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6260 for(;;) {
6261 c = f ? i : condition_3way();
6262 if (c < 0)
6263 save_regs(1), cc = 0;
6264 else if (c != i)
6265 nocode_wanted++, f = 1;
6266 if (tok != op)
6267 break;
6268 if (c < 0)
6269 t = gvtst(i, t);
6270 else
6271 vpop();
6272 next();
6273 expr_landor_next(op);
6275 if (cc || f) {
6276 vpop();
6277 vpushi(i ^ f);
6278 gsym(t);
6279 nocode_wanted -= f;
6280 } else {
6281 gvtst_set(i, t);
6285 static int is_cond_bool(SValue *sv)
6287 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6288 && (sv->type.t & VT_BTYPE) == VT_INT)
6289 return (unsigned)sv->c.i < 2;
6290 if (sv->r == VT_CMP)
6291 return 1;
6292 return 0;
6295 static void expr_cond(void)
6297 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6298 SValue sv;
6299 CType type;
6300 int ncw_prev;
6302 expr_lor();
6303 if (tok == '?') {
6304 next();
6305 c = condition_3way();
6306 g = (tok == ':' && gnu_ext);
6307 tt = 0;
6308 if (!g) {
6309 if (c < 0) {
6310 save_regs(1);
6311 tt = gvtst(1, 0);
6312 } else {
6313 vpop();
6315 } else if (c < 0) {
6316 /* needed to avoid having different registers saved in
6317 each branch */
6318 save_regs(1);
6319 gv_dup();
6320 tt = gvtst(0, 0);
6323 ncw_prev = nocode_wanted;
6324 if (c == 0)
6325 nocode_wanted++;
6326 if (!g)
6327 gexpr();
6329 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6330 mk_pointer(&vtop->type);
6331 sv = *vtop; /* save value to handle it later */
6332 vtop--; /* no vpop so that FP stack is not flushed */
6334 if (g) {
6335 u = tt;
6336 } else if (c < 0) {
6337 u = gjmp(0);
6338 gsym(tt);
6339 } else
6340 u = 0;
6342 nocode_wanted = ncw_prev;
6343 if (c == 1)
6344 nocode_wanted++;
6345 skip(':');
6346 expr_cond();
6348 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6349 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6350 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6351 this code jumps directly to the if's then/else branches. */
6352 t1 = gvtst(0, 0);
6353 t2 = gjmp(0);
6354 gsym(u);
6355 vpushv(&sv);
6356 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6357 gvtst_set(0, t1);
6358 gvtst_set(1, t2);
6359 nocode_wanted = ncw_prev;
6360 // tcc_warning("two conditions expr_cond");
6361 return;
6364 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6365 mk_pointer(&vtop->type);
6367 /* cast operands to correct type according to ISOC rules */
6368 if (!combine_types(&type, &sv, vtop, '?'))
6369 type_incompatibility_error(&sv.type, &vtop->type,
6370 "type mismatch in conditional expression (have '%s' and '%s')");
6371 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6372 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6373 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6375 /* now we convert second operand */
6376 if (c != 1) {
6377 gen_cast(&type);
6378 if (islv) {
6379 mk_pointer(&vtop->type);
6380 gaddrof();
6381 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6382 gaddrof();
6385 rc = RC_TYPE(type.t);
6386 /* for long longs, we use fixed registers to avoid having
6387 to handle a complicated move */
6388 if (USING_TWO_WORDS(type.t))
6389 rc = RC_RET(type.t);
6391 tt = r2 = 0;
6392 if (c < 0) {
6393 r2 = gv(rc);
6394 tt = gjmp(0);
6396 gsym(u);
6397 nocode_wanted = ncw_prev;
6399 /* this is horrible, but we must also convert first
6400 operand */
6401 if (c != 0) {
6402 *vtop = sv;
6403 gen_cast(&type);
6404 if (islv) {
6405 mk_pointer(&vtop->type);
6406 gaddrof();
6407 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6408 gaddrof();
6411 if (c < 0) {
6412 r1 = gv(rc);
6413 move_reg(r2, r1, islv ? VT_PTR : type.t);
6414 vtop->r = r2;
6415 gsym(tt);
6418 if (islv)
6419 indir();
6423 static void expr_eq(void)
6425 int t;
6427 expr_cond();
6428 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6429 test_lvalue();
6430 next();
6431 if (t == '=') {
6432 expr_eq();
6433 } else {
6434 vdup();
6435 expr_eq();
6436 gen_op(TOK_ASSIGN_OP(t));
6438 vstore();
6442 ST_FUNC void gexpr(void)
6444 while (1) {
6445 expr_eq();
6446 if (tok != ',')
6447 break;
6448 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6449 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6450 vpop();
6451 next();
6455 /* parse a constant expression and return value in vtop. */
6456 static void expr_const1(void)
6458 const_wanted++;
6459 nocode_wanted += unevalmask + 1;
6460 expr_cond();
6461 nocode_wanted -= unevalmask + 1;
6462 const_wanted--;
6465 /* parse an integer constant and return its value. */
6466 static inline int64_t expr_const64(void)
6468 int64_t c;
6469 expr_const1();
6470 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6471 expect("constant expression");
6472 c = vtop->c.i;
6473 vpop();
6474 return c;
6477 /* parse an integer constant and return its value.
6478 Complain if it doesn't fit 32bit (signed or unsigned). */
6479 ST_FUNC int expr_const(void)
6481 int c;
6482 int64_t wc = expr_const64();
6483 c = wc;
6484 if (c != wc && (unsigned)c != wc)
6485 tcc_error("constant exceeds 32 bit");
6486 return c;
6489 /* ------------------------------------------------------------------------- */
6490 /* return from function */
6492 #ifndef TCC_TARGET_ARM64
6493 static void gfunc_return(CType *func_type)
6495 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6496 CType type, ret_type;
6497 int ret_align, ret_nregs, regsize;
6498 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6499 &ret_align, &regsize);
6500 if (ret_nregs < 0) {
6501 #ifdef TCC_TARGET_RISCV64
6502 arch_transfer_ret_regs(0);
6503 #endif
6504 } else if (0 == ret_nregs) {
6505 /* if returning structure, must copy it to implicit
6506 first pointer arg location */
6507 type = *func_type;
6508 mk_pointer(&type);
6509 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6510 indir();
6511 vswap();
6512 /* copy structure value to pointer */
6513 vstore();
6514 } else {
6515 /* returning structure packed into registers */
6516 int size, addr, align, rc;
6517 size = type_size(func_type,&align);
6518 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6519 (vtop->c.i & (ret_align-1)))
6520 && (align & (ret_align-1))) {
6521 loc = (loc - size) & -ret_align;
6522 addr = loc;
6523 type = *func_type;
6524 vset(&type, VT_LOCAL | VT_LVAL, addr);
6525 vswap();
6526 vstore();
6527 vpop();
6528 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6530 vtop->type = ret_type;
6531 rc = RC_RET(ret_type.t);
6532 if (ret_nregs == 1)
6533 gv(rc);
6534 else {
6535 for (;;) {
6536 vdup();
6537 gv(rc);
6538 vpop();
6539 if (--ret_nregs == 0)
6540 break;
6541 /* We assume that when a structure is returned in multiple
6542 registers, their classes are consecutive values of the
6543 suite s(n) = 2^n */
6544 rc <<= 1;
6545 vtop->c.i += regsize;
6549 } else {
6550 gv(RC_RET(func_type->t));
6552 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6554 #endif
6556 static void check_func_return(void)
6558 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6559 return;
6560 if (!strcmp (funcname, "main")
6561 && (func_vt.t & VT_BTYPE) == VT_INT) {
6562 /* main returns 0 by default */
6563 vpushi(0);
6564 gen_assign_cast(&func_vt);
6565 gfunc_return(&func_vt);
6566 } else {
6567 tcc_warning("function might return no value: '%s'", funcname);
6571 /* ------------------------------------------------------------------------- */
6572 /* switch/case */
6574 static int case_cmpi(const void *pa, const void *pb)
6576 int64_t a = (*(struct case_t**) pa)->v1;
6577 int64_t b = (*(struct case_t**) pb)->v1;
6578 return a < b ? -1 : a > b;
6581 static int case_cmpu(const void *pa, const void *pb)
6583 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6584 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6585 return a < b ? -1 : a > b;
6588 static void gtst_addr(int t, int a)
6590 gsym_addr(gvtst(0, t), a);
6593 static void gcase(struct case_t **base, int len, int *bsym)
6595 struct case_t *p;
6596 int e;
6597 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6598 while (len > 8) {
6599 /* binary search */
6600 p = base[len/2];
6601 vdup();
6602 if (ll)
6603 vpushll(p->v2);
6604 else
6605 vpushi(p->v2);
6606 gen_op(TOK_LE);
6607 e = gvtst(1, 0);
6608 vdup();
6609 if (ll)
6610 vpushll(p->v1);
6611 else
6612 vpushi(p->v1);
6613 gen_op(TOK_GE);
6614 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6615 /* x < v1 */
6616 gcase(base, len/2, bsym);
6617 /* x > v2 */
6618 gsym(e);
6619 e = len/2 + 1;
6620 base += e; len -= e;
6622 /* linear scan */
6623 while (len--) {
6624 p = *base++;
6625 vdup();
6626 if (ll)
6627 vpushll(p->v2);
6628 else
6629 vpushi(p->v2);
6630 if (p->v1 == p->v2) {
6631 gen_op(TOK_EQ);
6632 gtst_addr(0, p->sym);
6633 } else {
6634 gen_op(TOK_LE);
6635 e = gvtst(1, 0);
6636 vdup();
6637 if (ll)
6638 vpushll(p->v1);
6639 else
6640 vpushi(p->v1);
6641 gen_op(TOK_GE);
6642 gtst_addr(0, p->sym);
6643 gsym(e);
6646 *bsym = gjmp(*bsym);
6649 /* ------------------------------------------------------------------------- */
6650 /* __attribute__((cleanup(fn))) */
6652 static void try_call_scope_cleanup(Sym *stop)
6654 Sym *cls = cur_scope->cl.s;
6656 for (; cls != stop; cls = cls->ncl) {
6657 Sym *fs = cls->next;
6658 Sym *vs = cls->prev_tok;
6660 vpushsym(&fs->type, fs);
6661 vset(&vs->type, vs->r, vs->c);
6662 vtop->sym = vs;
6663 mk_pointer(&vtop->type);
6664 gaddrof();
6665 gfunc_call(1);
6669 static void try_call_cleanup_goto(Sym *cleanupstate)
6671 Sym *oc, *cc;
6672 int ocd, ccd;
6674 if (!cur_scope->cl.s)
6675 return;
6677 /* search NCA of both cleanup chains given parents and initial depth */
6678 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6679 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6681 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6683 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6686 try_call_scope_cleanup(cc);
6689 /* call 'func' for each __attribute__((cleanup(func))) */
6690 static void block_cleanup(struct scope *o)
6692 int jmp = 0;
6693 Sym *g, **pg;
6694 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6695 if (g->prev_tok->r & LABEL_FORWARD) {
6696 Sym *pcl = g->next;
6697 if (!jmp)
6698 jmp = gjmp(0);
6699 gsym(pcl->jnext);
6700 try_call_scope_cleanup(o->cl.s);
6701 pcl->jnext = gjmp(0);
6702 if (!o->cl.n)
6703 goto remove_pending;
6704 g->c = o->cl.n;
6705 pg = &g->prev;
6706 } else {
6707 remove_pending:
6708 *pg = g->prev;
6709 sym_free(g);
6712 gsym(jmp);
6713 try_call_scope_cleanup(o->cl.s);
6716 /* ------------------------------------------------------------------------- */
6717 /* VLA */
6719 static void vla_restore(int loc)
6721 if (loc)
6722 gen_vla_sp_restore(loc);
6725 static void vla_leave(struct scope *o)
6727 struct scope *c = cur_scope, *v = NULL;
6728 for (; c != o && c; c = c->prev)
6729 if (c->vla.num)
6730 v = c;
6731 if (v)
6732 vla_restore(v->vla.locorig);
6735 /* ------------------------------------------------------------------------- */
6736 /* local scopes */
6738 static void new_scope(struct scope *o)
6740 /* copy and link previous scope */
6741 *o = *cur_scope;
6742 o->prev = cur_scope;
6743 cur_scope = o;
6744 cur_scope->vla.num = 0;
6746 /* record local declaration stack position */
6747 o->lstk = local_stack;
6748 o->llstk = local_label_stack;
6749 ++local_scope;
6751 if (debug_modes)
6752 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6755 static void prev_scope(struct scope *o, int is_expr)
6757 vla_leave(o->prev);
6759 if (o->cl.s != o->prev->cl.s)
6760 block_cleanup(o->prev);
6762 /* pop locally defined labels */
6763 label_pop(&local_label_stack, o->llstk, is_expr);
6765 /* In the is_expr case (a statement expression is finished here),
6766 vtop might refer to symbols on the local_stack. Either via the
6767 type or via vtop->sym. We can't pop those nor any that in turn
6768 might be referred to. To make it easier we don't roll back
6769 any symbols in that case; some upper level call to block() will
6770 do that. We do have to remove such symbols from the lookup
6771 tables, though. sym_pop will do that. */
6773 /* pop locally defined symbols */
6774 pop_local_syms(o->lstk, is_expr);
6775 cur_scope = o->prev;
6776 --local_scope;
6778 if (debug_modes)
6779 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6782 /* leave a scope via break/continue(/goto) */
6783 static void leave_scope(struct scope *o)
6785 if (!o)
6786 return;
6787 try_call_scope_cleanup(o->cl.s);
6788 vla_leave(o);
6791 /* ------------------------------------------------------------------------- */
6792 /* call block from 'for do while' loops */
6794 static void lblock(int *bsym, int *csym)
6796 struct scope *lo = loop_scope, *co = cur_scope;
6797 int *b = co->bsym, *c = co->csym;
6798 if (csym) {
6799 co->csym = csym;
6800 loop_scope = co;
6802 co->bsym = bsym;
6803 block(0);
6804 co->bsym = b;
6805 if (csym) {
6806 co->csym = c;
6807 loop_scope = lo;
6811 static void block(int is_expr)
6813 int a, b, c, d, e, t;
6814 struct scope o;
6815 Sym *s;
6817 if (is_expr) {
6818 /* default return value is (void) */
6819 vpushi(0);
6820 vtop->type.t = VT_VOID;
6823 again:
6824 t = tok;
6825 /* If the token carries a value, next() might destroy it. Only with
6826 invalid code such as f(){"123"4;} */
6827 if (TOK_HAS_VALUE(t))
6828 goto expr;
6829 next();
6831 if (debug_modes)
6832 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6834 if (t == TOK_IF) {
6835 //new_scope(&o); //?? breaks tests2.122
6836 skip('(');
6837 gexpr();
6838 skip(')');
6839 a = gvtst(1, 0);
6840 block(0);
6841 if (tok == TOK_ELSE) {
6842 d = gjmp(0);
6843 gsym(a);
6844 next();
6845 block(0);
6846 gsym(d); /* patch else jmp */
6847 } else {
6848 gsym(a);
6850 //prev_scope(&o,0); //?? breaks tests2.122
6852 } else if (t == TOK_WHILE) {
6853 new_scope(&o);
6854 d = gind();
6855 skip('(');
6856 gexpr();
6857 skip(')');
6858 a = gvtst(1, 0);
6859 b = 0;
6860 lblock(&a, &b);
6861 gjmp_addr(d);
6862 gsym_addr(b, d);
6863 gsym(a);
6864 prev_scope(&o,0);
6865 } else if (t == '{') {
6866 new_scope(&o);
6868 /* handle local labels declarations */
6869 while (tok == TOK_LABEL) {
6870 do {
6871 next();
6872 if (tok < TOK_UIDENT)
6873 expect("label identifier");
6874 label_push(&local_label_stack, tok, LABEL_DECLARED);
6875 next();
6876 } while (tok == ',');
6877 skip(';');
6880 while (tok != '}') {
6881 decl(VT_LOCAL);
6882 if (tok != '}') {
6883 if (is_expr)
6884 vpop();
6885 block(is_expr);
6889 prev_scope(&o, is_expr);
6890 if (local_scope)
6891 next();
6892 else if (!nocode_wanted)
6893 check_func_return();
6895 } else if (t == TOK_RETURN) {
6896 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6897 if (tok != ';') {
6898 gexpr();
6899 if (b) {
6900 gen_assign_cast(&func_vt);
6901 } else {
6902 if (vtop->type.t != VT_VOID)
6903 tcc_warning("void function returns a value");
6904 vtop--;
6906 } else if (b) {
6907 tcc_warning("'return' with no value");
6908 b = 0;
6910 leave_scope(root_scope);
6911 if (b)
6912 gfunc_return(&func_vt);
6913 skip(';');
6914 /* jump unless last stmt in top-level block */
6915 if (tok != '}' || local_scope != 1)
6916 rsym = gjmp(rsym);
6917 if (debug_modes)
6918 tcc_tcov_block_end (tcc_state, -1);
6919 CODE_OFF();
6921 } else if (t == TOK_BREAK) {
6922 /* compute jump */
6923 if (!cur_scope->bsym)
6924 tcc_error("cannot break");
6925 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6926 leave_scope(cur_switch->scope);
6927 else
6928 leave_scope(loop_scope);
6929 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6930 skip(';');
6932 } else if (t == TOK_CONTINUE) {
6933 /* compute jump */
6934 if (!cur_scope->csym)
6935 tcc_error("cannot continue");
6936 leave_scope(loop_scope);
6937 *cur_scope->csym = gjmp(*cur_scope->csym);
6938 skip(';');
6940 } else if (t == TOK_FOR) {
6941 new_scope(&o);
6943 skip('(');
6944 if (tok != ';') {
6945 /* c99 for-loop init decl? */
6946 if (!decl(VT_JMP)) {
6947 /* no, regular for-loop init expr */
6948 gexpr();
6949 vpop();
6952 skip(';');
6953 a = b = 0;
6954 c = d = gind();
6955 if (tok != ';') {
6956 gexpr();
6957 a = gvtst(1, 0);
6959 skip(';');
6960 if (tok != ')') {
6961 e = gjmp(0);
6962 d = gind();
6963 gexpr();
6964 vpop();
6965 gjmp_addr(c);
6966 gsym(e);
6968 skip(')');
6969 lblock(&a, &b);
6970 gjmp_addr(d);
6971 gsym_addr(b, d);
6972 gsym(a);
6973 prev_scope(&o, 0);
6975 } else if (t == TOK_DO) {
6976 new_scope(&o);
6977 a = b = 0;
6978 d = gind();
6979 lblock(&a, &b);
6980 gsym(b);
6981 skip(TOK_WHILE);
6982 skip('(');
6983 gexpr();
6984 skip(')');
6985 skip(';');
6986 prev_scope(&o,0);
6987 c = gvtst(0, 0);
6988 gsym_addr(c, d);
6989 gsym(a);
6991 } else if (t == TOK_SWITCH) {
6992 struct switch_t *sw;
6994 new_scope(&o);
6995 sw = tcc_mallocz(sizeof *sw);
6996 sw->bsym = &a;
6997 sw->scope = cur_scope;
6998 sw->prev = cur_switch;
6999 sw->nocode_wanted = nocode_wanted;
7000 cur_switch = sw;
7002 skip('(');
7003 gexpr();
7004 skip(')');
7005 sw->sv = *vtop--; /* save switch value */
7007 a = 0;
7008 b = gjmp(0); /* jump to first case */
7009 lblock(&a, NULL);
7010 a = gjmp(a); /* add implicit break */
7011 /* case lookup */
7012 gsym(b);
7014 if (sw->nocode_wanted)
7015 goto skip_switch;
7016 if (sw->sv.type.t & VT_UNSIGNED)
7017 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7018 else
7019 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7020 for (b = 1; b < sw->n; b++)
7021 if (sw->sv.type.t & VT_UNSIGNED
7022 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7023 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7024 tcc_error("duplicate case value");
7025 vpushv(&sw->sv);
7026 gv(RC_INT);
7027 d = 0, gcase(sw->p, sw->n, &d);
7028 vpop();
7029 if (sw->def_sym)
7030 gsym_addr(d, sw->def_sym);
7031 else
7032 gsym(d);
7033 skip_switch:
7034 /* break label */
7035 gsym(a);
7037 dynarray_reset(&sw->p, &sw->n);
7038 cur_switch = sw->prev;
7039 tcc_free(sw);
7040 prev_scope(&o,0);
7042 } else if (t == TOK_CASE) {
7043 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7044 if (!cur_switch)
7045 expect("switch");
7046 cr->v1 = cr->v2 = expr_const64();
7047 if (gnu_ext && tok == TOK_DOTS) {
7048 next();
7049 cr->v2 = expr_const64();
7050 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7051 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7052 tcc_warning("empty case range");
7054 /* case and default are unreachable from a switch under nocode_wanted */
7055 if (!cur_switch->nocode_wanted)
7056 cr->sym = gind();
7057 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7058 skip(':');
7059 is_expr = 0;
7060 goto block_after_label;
7062 } else if (t == TOK_DEFAULT) {
7063 if (!cur_switch)
7064 expect("switch");
7065 if (cur_switch->def_sym)
7066 tcc_error("too many 'default'");
7067 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7068 skip(':');
7069 is_expr = 0;
7070 goto block_after_label;
7072 } else if (t == TOK_GOTO) {
7073 if (cur_scope->vla.num)
7074 vla_restore(cur_scope->vla.locorig);
7075 if (tok == '*' && gnu_ext) {
7076 /* computed goto */
7077 next();
7078 gexpr();
7079 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7080 expect("pointer");
7081 ggoto();
7083 } else if (tok >= TOK_UIDENT) {
7084 s = label_find(tok);
7085 /* put forward definition if needed */
7086 if (!s)
7087 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7088 else if (s->r == LABEL_DECLARED)
7089 s->r = LABEL_FORWARD;
7091 if (s->r & LABEL_FORWARD) {
7092 /* start new goto chain for cleanups, linked via label->next */
7093 if (cur_scope->cl.s && !nocode_wanted) {
7094 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7095 pending_gotos->prev_tok = s;
7096 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7097 pending_gotos->next = s;
7099 s->jnext = gjmp(s->jnext);
7100 } else {
7101 try_call_cleanup_goto(s->cleanupstate);
7102 gjmp_addr(s->jnext);
7104 next();
7106 } else {
7107 expect("label identifier");
7109 skip(';');
7111 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7112 asm_instr();
7114 } else {
7115 if (tok == ':' && t >= TOK_UIDENT) {
7116 /* label case */
7117 next();
7118 s = label_find(t);
7119 if (s) {
7120 if (s->r == LABEL_DEFINED)
7121 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7122 s->r = LABEL_DEFINED;
7123 if (s->next) {
7124 Sym *pcl; /* pending cleanup goto */
7125 for (pcl = s->next; pcl; pcl = pcl->prev)
7126 gsym(pcl->jnext);
7127 sym_pop(&s->next, NULL, 0);
7128 } else
7129 gsym(s->jnext);
7130 } else {
7131 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7133 s->jnext = gind();
7134 s->cleanupstate = cur_scope->cl.s;
7136 block_after_label:
7138 /* Accept attributes after labels (e.g. 'unused') */
7139 AttributeDef ad_tmp;
7140 parse_attribute(&ad_tmp);
7142 if (debug_modes)
7143 tcc_tcov_reset_ind(tcc_state);
7144 vla_restore(cur_scope->vla.loc);
7145 if (tok != '}')
7146 goto again;
7147 /* we accept this, but it is a mistake */
7148 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7150 } else {
7151 /* expression case */
7152 if (t != ';') {
7153 unget_tok(t);
7154 expr:
7155 if (is_expr) {
7156 vpop();
7157 gexpr();
7158 } else {
7159 gexpr();
7160 vpop();
7162 skip(';');
7167 if (debug_modes)
7168 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7171 /* This skips over a stream of tokens containing balanced {} and ()
7172 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7173 with a '{'). If STR then allocates and stores the skipped tokens
7174 in *STR. This doesn't check if () and {} are nested correctly,
7175 i.e. "({)}" is accepted. */
7176 static void skip_or_save_block(TokenString **str)
7178 int braces = tok == '{';
7179 int level = 0;
7180 if (str)
7181 *str = tok_str_alloc();
7183 while (1) {
7184 int t = tok;
7185 if (level == 0
7186 && (t == ','
7187 || t == ';'
7188 || t == '}'
7189 || t == ')'
7190 || t == ']'))
7191 break;
7192 if (t == TOK_EOF) {
7193 if (str || level > 0)
7194 tcc_error("unexpected end of file");
7195 else
7196 break;
7198 if (str)
7199 tok_str_add_tok(*str);
7200 next();
7201 if (t == '{' || t == '(' || t == '[') {
7202 level++;
7203 } else if (t == '}' || t == ')' || t == ']') {
7204 level--;
7205 if (level == 0 && braces && t == '}')
7206 break;
7209 if (str) {
7210 tok_str_add(*str, -1);
7211 tok_str_add(*str, 0);
7215 #define EXPR_CONST 1
7216 #define EXPR_ANY 2
7218 static void parse_init_elem(int expr_type)
7220 int saved_global_expr;
7221 switch(expr_type) {
7222 case EXPR_CONST:
7223 /* compound literals must be allocated globally in this case */
7224 saved_global_expr = global_expr;
7225 global_expr = 1;
7226 expr_const1();
7227 global_expr = saved_global_expr;
7228 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7229 (compound literals). */
7230 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7231 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7232 || vtop->sym->v < SYM_FIRST_ANOM))
7233 #ifdef TCC_TARGET_PE
7234 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7235 #endif
7237 tcc_error("initializer element is not constant");
7238 break;
7239 case EXPR_ANY:
7240 expr_eq();
7241 break;
7245 #if 1
7246 static void init_assert(init_params *p, int offset)
7248 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7249 : !nocode_wanted && offset > p->local_offset)
7250 tcc_internal_error("initializer overflow");
7252 #else
7253 #define init_assert(sec, offset)
7254 #endif
7256 /* put zeros for variable based init */
7257 static void init_putz(init_params *p, unsigned long c, int size)
7259 init_assert(p, c + size);
7260 if (p->sec) {
7261 /* nothing to do because globals are already set to zero */
7262 } else {
7263 vpush_helper_func(TOK_memset);
7264 vseti(VT_LOCAL, c);
7265 #ifdef TCC_TARGET_ARM
7266 vpushs(size);
7267 vpushi(0);
7268 #else
7269 vpushi(0);
7270 vpushs(size);
7271 #endif
7272 gfunc_call(3);
7276 #define DIF_FIRST 1
7277 #define DIF_SIZE_ONLY 2
7278 #define DIF_HAVE_ELEM 4
7279 #define DIF_CLEAR 8
7281 /* delete relocations for specified range c ... c + size. Unfortunatly
7282 in very special cases, relocations may occur unordered */
7283 static void decl_design_delrels(Section *sec, int c, int size)
7285 ElfW_Rel *rel, *rel2, *rel_end;
7286 if (!sec || !sec->reloc)
7287 return;
7288 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7289 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7290 while (rel < rel_end) {
7291 if (rel->r_offset >= c && rel->r_offset < c + size) {
7292 sec->reloc->data_offset -= sizeof *rel;
7293 } else {
7294 if (rel2 != rel)
7295 memcpy(rel2, rel, sizeof *rel);
7296 ++rel2;
7298 ++rel;
7302 static void decl_design_flex(init_params *p, Sym *ref, int index)
7304 if (ref == p->flex_array_ref) {
7305 if (index >= ref->c)
7306 ref->c = index + 1;
7307 } else if (ref->c < 0)
7308 tcc_error("flexible array has zero size in this context");
7311 /* t is the array or struct type. c is the array or struct
7312 address. cur_field is the pointer to the current
7313 field, for arrays the 'c' member contains the current start
7314 index. 'flags' is as in decl_initializer.
7315 'al' contains the already initialized length of the
7316 current container (starting at c). This returns the new length of that. */
7317 static int decl_designator(init_params *p, CType *type, unsigned long c,
7318 Sym **cur_field, int flags, int al)
7320 Sym *s, *f;
7321 int index, index_last, align, l, nb_elems, elem_size;
7322 unsigned long corig = c;
7324 elem_size = 0;
7325 nb_elems = 1;
7327 if (flags & DIF_HAVE_ELEM)
7328 goto no_designator;
7330 if (gnu_ext && tok >= TOK_UIDENT) {
7331 l = tok, next();
7332 if (tok == ':')
7333 goto struct_field;
7334 unget_tok(l);
7337 /* NOTE: we only support ranges for last designator */
7338 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7339 if (tok == '[') {
7340 if (!(type->t & VT_ARRAY))
7341 expect("array type");
7342 next();
7343 index = index_last = expr_const();
7344 if (tok == TOK_DOTS && gnu_ext) {
7345 next();
7346 index_last = expr_const();
7348 skip(']');
7349 s = type->ref;
7350 decl_design_flex(p, s, index_last);
7351 if (index < 0 || index_last >= s->c || index_last < index)
7352 tcc_error("index exceeds array bounds or range is empty");
7353 if (cur_field)
7354 (*cur_field)->c = index_last;
7355 type = pointed_type(type);
7356 elem_size = type_size(type, &align);
7357 c += index * elem_size;
7358 nb_elems = index_last - index + 1;
7359 } else {
7360 int cumofs;
7361 next();
7362 l = tok;
7363 struct_field:
7364 next();
7365 if ((type->t & VT_BTYPE) != VT_STRUCT)
7366 expect("struct/union type");
7367 cumofs = 0;
7368 f = find_field(type, l, &cumofs);
7369 if (cur_field)
7370 *cur_field = f;
7371 type = &f->type;
7372 c += cumofs;
7374 cur_field = NULL;
7376 if (!cur_field) {
7377 if (tok == '=') {
7378 next();
7379 } else if (!gnu_ext) {
7380 expect("=");
7382 } else {
7383 no_designator:
7384 if (type->t & VT_ARRAY) {
7385 index = (*cur_field)->c;
7386 s = type->ref;
7387 decl_design_flex(p, s, index);
7388 if (index >= s->c)
7389 tcc_error("too many initializers");
7390 type = pointed_type(type);
7391 elem_size = type_size(type, &align);
7392 c += index * elem_size;
7393 } else {
7394 f = *cur_field;
7395 /* Skip bitfield padding. Also with size 32 and 64. */
7396 while (f && (f->v & SYM_FIRST_ANOM) &&
7397 is_integer_btype(f->type.t & VT_BTYPE))
7398 *cur_field = f = f->next;
7399 if (!f)
7400 tcc_error("too many initializers");
7401 type = &f->type;
7402 c += f->c;
7406 if (!elem_size) /* for structs */
7407 elem_size = type_size(type, &align);
7409 /* Using designators the same element can be initialized more
7410 than once. In that case we need to delete possibly already
7411 existing relocations. */
7412 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7413 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7414 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7417 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7419 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7420 Sym aref = {0};
7421 CType t1;
7422 int i;
7423 if (p->sec || (type->t & VT_ARRAY)) {
7424 /* make init_putv/vstore believe it were a struct */
7425 aref.c = elem_size;
7426 t1.t = VT_STRUCT, t1.ref = &aref;
7427 type = &t1;
7429 if (p->sec)
7430 vpush_ref(type, p->sec, c, elem_size);
7431 else
7432 vset(type, VT_LOCAL|VT_LVAL, c);
7433 for (i = 1; i < nb_elems; i++) {
7434 vdup();
7435 init_putv(p, type, c + elem_size * i);
7437 vpop();
7440 c += nb_elems * elem_size;
7441 if (c - corig > al)
7442 al = c - corig;
7443 return al;
7446 /* store a value or an expression directly in global data or in local array */
7447 static void init_putv(init_params *p, CType *type, unsigned long c)
7449 int bt;
7450 void *ptr;
7451 CType dtype;
7452 int size, align;
7453 Section *sec = p->sec;
7454 uint64_t val;
7456 dtype = *type;
7457 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7459 size = type_size(type, &align);
7460 if (type->t & VT_BITFIELD)
7461 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7462 init_assert(p, c + size);
7464 if (sec) {
7465 /* XXX: not portable */
7466 /* XXX: generate error if incorrect relocation */
7467 gen_assign_cast(&dtype);
7468 bt = type->t & VT_BTYPE;
7470 if ((vtop->r & VT_SYM)
7471 && bt != VT_PTR
7472 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7473 || (type->t & VT_BITFIELD))
7474 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7476 tcc_error("initializer element is not computable at load time");
7478 if (NODATA_WANTED) {
7479 vtop--;
7480 return;
7483 ptr = sec->data + c;
7484 val = vtop->c.i;
7486 /* XXX: make code faster ? */
7487 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7488 vtop->sym->v >= SYM_FIRST_ANOM &&
7489 /* XXX This rejects compound literals like
7490 '(void *){ptr}'. The problem is that '&sym' is
7491 represented the same way, which would be ruled out
7492 by the SYM_FIRST_ANOM check above, but also '"string"'
7493 in 'char *p = "string"' is represented the same
7494 with the type being VT_PTR and the symbol being an
7495 anonymous one. That is, there's no difference in vtop
7496 between '(void *){x}' and '&(void *){x}'. Ignore
7497 pointer typed entities here. Hopefully no real code
7498 will ever use compound literals with scalar type. */
7499 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7500 /* These come from compound literals, memcpy stuff over. */
7501 Section *ssec;
7502 ElfSym *esym;
7503 ElfW_Rel *rel;
7504 esym = elfsym(vtop->sym);
7505 ssec = tcc_state->sections[esym->st_shndx];
7506 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7507 if (ssec->reloc) {
7508 /* We need to copy over all memory contents, and that
7509 includes relocations. Use the fact that relocs are
7510 created it order, so look from the end of relocs
7511 until we hit one before the copied region. */
7512 unsigned long relofs = ssec->reloc->data_offset;
7513 while (relofs >= sizeof(*rel)) {
7514 relofs -= sizeof(*rel);
7515 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7516 if (rel->r_offset >= esym->st_value + size)
7517 continue;
7518 if (rel->r_offset < esym->st_value)
7519 break;
7520 put_elf_reloca(symtab_section, sec,
7521 c + rel->r_offset - esym->st_value,
7522 ELFW(R_TYPE)(rel->r_info),
7523 ELFW(R_SYM)(rel->r_info),
7524 #if PTR_SIZE == 8
7525 rel->r_addend
7526 #else
7528 #endif
7532 } else {
7533 if (type->t & VT_BITFIELD) {
7534 int bit_pos, bit_size, bits, n;
7535 unsigned char *p, v, m;
7536 bit_pos = BIT_POS(vtop->type.t);
7537 bit_size = BIT_SIZE(vtop->type.t);
7538 p = (unsigned char*)ptr + (bit_pos >> 3);
7539 bit_pos &= 7, bits = 0;
7540 while (bit_size) {
7541 n = 8 - bit_pos;
7542 if (n > bit_size)
7543 n = bit_size;
7544 v = val >> bits << bit_pos;
7545 m = ((1 << n) - 1) << bit_pos;
7546 *p = (*p & ~m) | (v & m);
7547 bits += n, bit_size -= n, bit_pos = 0, ++p;
7549 } else
7550 switch(bt) {
7551 case VT_BOOL:
7552 *(char *)ptr = val != 0;
7553 break;
7554 case VT_BYTE:
7555 *(char *)ptr = val;
7556 break;
7557 case VT_SHORT:
7558 write16le(ptr, val);
7559 break;
7560 case VT_FLOAT:
7561 write32le(ptr, val);
7562 break;
7563 case VT_DOUBLE:
7564 write64le(ptr, val);
7565 break;
7566 case VT_LDOUBLE:
7567 #if defined TCC_IS_NATIVE_387
7568 /* Host and target platform may be different but both have x87.
7569 On windows, tcc does not use VT_LDOUBLE, except when it is a
7570 cross compiler. In this case a mingw gcc as host compiler
7571 comes here with 10-byte long doubles, while msvc or tcc won't.
7572 tcc itself can still translate by asm.
7573 In any case we avoid possibly random bytes 11 and 12.
7575 if (sizeof (long double) >= 10)
7576 memcpy(ptr, &vtop->c.ld, 10);
7577 #ifdef __TINYC__
7578 else if (sizeof (long double) == sizeof (double))
7579 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7580 #endif
7581 else if (vtop->c.ld == 0.0)
7583 else
7584 #endif
7585 /* For other platforms it should work natively, but may not work
7586 for cross compilers */
7587 if (sizeof(long double) == LDOUBLE_SIZE)
7588 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7589 else if (sizeof(double) == LDOUBLE_SIZE)
7590 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7591 #ifndef TCC_CROSS_TEST
7592 else
7593 tcc_error("can't cross compile long double constants");
7594 #endif
7595 break;
7597 #if PTR_SIZE == 8
7598 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7599 case VT_LLONG:
7600 case VT_PTR:
7601 if (vtop->r & VT_SYM)
7602 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7603 else
7604 write64le(ptr, val);
7605 break;
7606 case VT_INT:
7607 write32le(ptr, val);
7608 break;
7609 #else
7610 case VT_LLONG:
7611 write64le(ptr, val);
7612 break;
7613 case VT_PTR:
7614 case VT_INT:
7615 if (vtop->r & VT_SYM)
7616 greloc(sec, vtop->sym, c, R_DATA_PTR);
7617 write32le(ptr, val);
7618 break;
7619 #endif
7620 default:
7621 //tcc_internal_error("unexpected type");
7622 break;
7625 vtop--;
7626 } else {
7627 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7628 vswap();
7629 vstore();
7630 vpop();
7634 /* 't' contains the type and storage info. 'c' is the offset of the
7635 object in section 'sec'. If 'sec' is NULL, it means stack based
7636 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7637 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7638 size only evaluation is wanted (only for arrays). */
7639 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7641 int len, n, no_oblock, i;
7642 int size1, align1;
7643 Sym *s, *f;
7644 Sym indexsym;
7645 CType *t1;
7647 /* generate line number info */
7648 if (debug_modes && !p->sec)
7649 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7651 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7652 /* In case of strings we have special handling for arrays, so
7653 don't consume them as initializer value (which would commit them
7654 to some anonymous symbol). */
7655 tok != TOK_LSTR && tok != TOK_STR &&
7656 (!(flags & DIF_SIZE_ONLY)
7657 /* a struct may be initialized from a struct of same type, as in
7658 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7659 In that case we need to parse the element in order to check
7660 it for compatibility below */
7661 || (type->t & VT_BTYPE) == VT_STRUCT)
7663 int ncw_prev = nocode_wanted;
7664 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7665 ++nocode_wanted;
7666 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7667 nocode_wanted = ncw_prev;
7668 flags |= DIF_HAVE_ELEM;
7671 if (type->t & VT_ARRAY) {
7672 no_oblock = 1;
7673 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7674 tok == '{') {
7675 skip('{');
7676 no_oblock = 0;
7679 s = type->ref;
7680 n = s->c;
7681 t1 = pointed_type(type);
7682 size1 = type_size(t1, &align1);
7684 /* only parse strings here if correct type (otherwise: handle
7685 them as ((w)char *) expressions */
7686 if ((tok == TOK_LSTR &&
7687 #ifdef TCC_TARGET_PE
7688 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7689 #else
7690 (t1->t & VT_BTYPE) == VT_INT
7691 #endif
7692 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7693 len = 0;
7694 cstr_reset(&initstr);
7695 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7696 tcc_error("unhandled string literal merging");
7697 while (tok == TOK_STR || tok == TOK_LSTR) {
7698 if (initstr.size)
7699 initstr.size -= size1;
7700 if (tok == TOK_STR)
7701 len += tokc.str.size;
7702 else
7703 len += tokc.str.size / sizeof(nwchar_t);
7704 len--;
7705 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7706 next();
7708 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7709 && tok != TOK_EOF) {
7710 /* Not a lone literal but part of a bigger expression. */
7711 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7712 tokc.str.size = initstr.size;
7713 tokc.str.data = initstr.data;
7714 goto do_init_array;
7717 decl_design_flex(p, s, len);
7718 if (!(flags & DIF_SIZE_ONLY)) {
7719 int nb = n, ch;
7720 if (len < nb)
7721 nb = len;
7722 if (len > nb)
7723 tcc_warning("initializer-string for array is too long");
7724 /* in order to go faster for common case (char
7725 string in global variable, we handle it
7726 specifically */
7727 if (p->sec && size1 == 1) {
7728 init_assert(p, c + nb);
7729 if (!NODATA_WANTED)
7730 memcpy(p->sec->data + c, initstr.data, nb);
7731 } else {
7732 for(i=0;i<n;i++) {
7733 if (i >= nb) {
7734 /* only add trailing zero if enough storage (no
7735 warning in this case since it is standard) */
7736 if (flags & DIF_CLEAR)
7737 break;
7738 if (n - i >= 4) {
7739 init_putz(p, c + i * size1, (n - i) * size1);
7740 break;
7742 ch = 0;
7743 } else if (size1 == 1)
7744 ch = ((unsigned char *)initstr.data)[i];
7745 else
7746 ch = ((nwchar_t *)initstr.data)[i];
7747 vpushi(ch);
7748 init_putv(p, t1, c + i * size1);
7752 } else {
7754 do_init_array:
7755 indexsym.c = 0;
7756 f = &indexsym;
7758 do_init_list:
7759 /* zero memory once in advance */
7760 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7761 init_putz(p, c, n*size1);
7762 flags |= DIF_CLEAR;
7765 len = 0;
7766 /* GNU extension: if the initializer is empty for a flex array,
7767 it's size is zero. We won't enter the loop, so set the size
7768 now. */
7769 decl_design_flex(p, s, len);
7770 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7771 len = decl_designator(p, type, c, &f, flags, len);
7772 flags &= ~DIF_HAVE_ELEM;
7773 if (type->t & VT_ARRAY) {
7774 ++indexsym.c;
7775 /* special test for multi dimensional arrays (may not
7776 be strictly correct if designators are used at the
7777 same time) */
7778 if (no_oblock && len >= n*size1)
7779 break;
7780 } else {
7781 if (s->type.t == VT_UNION)
7782 f = NULL;
7783 else
7784 f = f->next;
7785 if (no_oblock && f == NULL)
7786 break;
7789 if (tok == '}')
7790 break;
7791 skip(',');
7794 if (!no_oblock)
7795 skip('}');
7797 } else if ((flags & DIF_HAVE_ELEM)
7798 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7799 The source type might have VT_CONSTANT set, which is
7800 of course assignable to non-const elements. */
7801 && is_compatible_unqualified_types(type, &vtop->type)) {
7802 goto one_elem;
7804 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7805 no_oblock = 1;
7806 if ((flags & DIF_FIRST) || tok == '{') {
7807 skip('{');
7808 no_oblock = 0;
7810 s = type->ref;
7811 f = s->next;
7812 n = s->c;
7813 size1 = 1;
7814 goto do_init_list;
7816 } else if (tok == '{') {
7817 if (flags & DIF_HAVE_ELEM)
7818 skip(';');
7819 next();
7820 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7821 skip('}');
7823 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7824 /* If we supported only ISO C we wouldn't have to accept calling
7825 this on anything than an array if DIF_SIZE_ONLY (and even then
7826 only on the outermost level, so no recursion would be needed),
7827 because initializing a flex array member isn't supported.
7828 But GNU C supports it, so we need to recurse even into
7829 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7830 /* just skip expression */
7831 if (flags & DIF_HAVE_ELEM)
7832 vpop();
7833 else
7834 skip_or_save_block(NULL);
7836 } else {
7837 if (!(flags & DIF_HAVE_ELEM)) {
7838 /* This should happen only when we haven't parsed
7839 the init element above for fear of committing a
7840 string constant to memory too early. */
7841 if (tok != TOK_STR && tok != TOK_LSTR)
7842 expect("string constant");
7843 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7845 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7846 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7847 && vtop->c.i == 0
7848 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7850 vpop();
7851 else
7852 init_putv(p, type, c);
7856 /* parse an initializer for type 't' if 'has_init' is non zero, and
7857 allocate space in local or global data space ('r' is either
7858 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7859 variable 'v' of scope 'scope' is declared before initializers
7860 are parsed. If 'v' is zero, then a reference to the new object
7861 is put in the value stack. If 'has_init' is 2, a special parsing
7862 is done to handle string constants. */
7863 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7864 int has_init, int v, int global)
7866 int size, align, addr;
7867 TokenString *init_str = NULL;
7869 Section *sec;
7870 Sym *flexible_array;
7871 Sym *sym;
7872 int saved_nocode_wanted = nocode_wanted;
7873 #ifdef CONFIG_TCC_BCHECK
7874 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7875 #endif
7876 init_params p = {0};
7878 /* Always allocate static or global variables */
7879 if (v && (r & VT_VALMASK) == VT_CONST)
7880 nocode_wanted |= DATA_ONLY_WANTED;
7882 flexible_array = NULL;
7883 size = type_size(type, &align);
7885 /* exactly one flexible array may be initialized, either the
7886 toplevel array or the last member of the toplevel struct */
7888 if (size < 0) {
7889 /* If the base type itself was an array type of unspecified size
7890 (like in 'typedef int arr[]; arr x = {1};') then we will
7891 overwrite the unknown size by the real one for this decl.
7892 We need to unshare the ref symbol holding that size. */
7893 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7894 p.flex_array_ref = type->ref;
7896 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7897 Sym *field = type->ref->next;
7898 if (field) {
7899 while (field->next)
7900 field = field->next;
7901 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7902 flexible_array = field;
7903 p.flex_array_ref = field->type.ref;
7904 size = -1;
7909 if (size < 0) {
7910 /* If unknown size, do a dry-run 1st pass */
7911 if (!has_init)
7912 tcc_error("unknown type size");
7913 if (has_init == 2) {
7914 /* only get strings */
7915 init_str = tok_str_alloc();
7916 while (tok == TOK_STR || tok == TOK_LSTR) {
7917 tok_str_add_tok(init_str);
7918 next();
7920 tok_str_add(init_str, -1);
7921 tok_str_add(init_str, 0);
7922 } else
7923 skip_or_save_block(&init_str);
7924 unget_tok(0);
7926 /* compute size */
7927 begin_macro(init_str, 1);
7928 next();
7929 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7930 /* prepare second initializer parsing */
7931 macro_ptr = init_str->str;
7932 next();
7934 /* if still unknown size, error */
7935 size = type_size(type, &align);
7936 if (size < 0)
7937 tcc_error("unknown type size");
7939 /* If there's a flex member and it was used in the initializer
7940 adjust size. */
7941 if (flexible_array && flexible_array->type.ref->c > 0)
7942 size += flexible_array->type.ref->c
7943 * pointed_size(&flexible_array->type);
7946 /* take into account specified alignment if bigger */
7947 if (ad->a.aligned) {
7948 int speca = 1 << (ad->a.aligned - 1);
7949 if (speca > align)
7950 align = speca;
7951 } else if (ad->a.packed) {
7952 align = 1;
7955 if (!v && NODATA_WANTED)
7956 size = 0, align = 1;
7958 if ((r & VT_VALMASK) == VT_LOCAL) {
7959 sec = NULL;
7960 #ifdef CONFIG_TCC_BCHECK
7961 if (bcheck && v) {
7962 /* add padding between stack variables for bound checking */
7963 loc -= align;
7965 #endif
7966 loc = (loc - size) & -align;
7967 addr = loc;
7968 p.local_offset = addr + size;
7969 #ifdef CONFIG_TCC_BCHECK
7970 if (bcheck && v) {
7971 /* add padding between stack variables for bound checking */
7972 loc -= align;
7974 #endif
7975 if (v) {
7976 /* local variable */
7977 #ifdef CONFIG_TCC_ASM
7978 if (ad->asm_label) {
7979 int reg = asm_parse_regvar(ad->asm_label);
7980 if (reg >= 0)
7981 r = (r & ~VT_VALMASK) | reg;
7983 #endif
7984 sym = sym_push(v, type, r, addr);
7985 if (ad->cleanup_func) {
7986 Sym *cls = sym_push2(&all_cleanups,
7987 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7988 cls->prev_tok = sym;
7989 cls->next = ad->cleanup_func;
7990 cls->ncl = cur_scope->cl.s;
7991 cur_scope->cl.s = cls;
7994 sym->a = ad->a;
7995 } else {
7996 /* push local reference */
7997 vset(type, r, addr);
7999 } else {
8000 sym = NULL;
8001 if (v && global) {
8002 /* see if the symbol was already defined */
8003 sym = sym_find(v);
8004 if (sym) {
8005 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8006 && sym->type.ref->c > type->ref->c) {
8007 /* flex array was already declared with explicit size
8008 extern int arr[10];
8009 int arr[] = { 1,2,3 }; */
8010 type->ref->c = sym->type.ref->c;
8011 size = type_size(type, &align);
8013 patch_storage(sym, ad, type);
8014 /* we accept several definitions of the same global variable. */
8015 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8016 goto no_alloc;
8020 /* allocate symbol in corresponding section */
8021 sec = ad->section;
8022 if (!sec) {
8023 CType *tp = type;
8024 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8025 tp = &tp->ref->type;
8026 if (tp->t & VT_CONSTANT) {
8027 sec = rodata_section;
8028 } else if (has_init) {
8029 sec = data_section;
8030 /*if (tcc_state->g_debug & 4)
8031 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8032 } else if (tcc_state->nocommon)
8033 sec = bss_section;
8036 if (sec) {
8037 addr = section_add(sec, size, align);
8038 #ifdef CONFIG_TCC_BCHECK
8039 /* add padding if bound check */
8040 if (bcheck)
8041 section_add(sec, 1, 1);
8042 #endif
8043 } else {
8044 addr = align; /* SHN_COMMON is special, symbol value is align */
8045 sec = common_section;
8048 if (v) {
8049 if (!sym) {
8050 sym = sym_push(v, type, r | VT_SYM, 0);
8051 patch_storage(sym, ad, NULL);
8053 /* update symbol definition */
8054 put_extern_sym(sym, sec, addr, size);
8055 } else {
8056 /* push global reference */
8057 vpush_ref(type, sec, addr, size);
8058 sym = vtop->sym;
8059 vtop->r |= r;
8062 #ifdef CONFIG_TCC_BCHECK
8063 /* handles bounds now because the symbol must be defined
8064 before for the relocation */
8065 if (bcheck) {
8066 addr_t *bounds_ptr;
8068 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8069 /* then add global bound info */
8070 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8071 bounds_ptr[0] = 0; /* relocated */
8072 bounds_ptr[1] = size;
8074 #endif
8077 if (type->t & VT_VLA) {
8078 int a;
8080 if (NODATA_WANTED)
8081 goto no_alloc;
8083 /* save before-VLA stack pointer if needed */
8084 if (cur_scope->vla.num == 0) {
8085 if (cur_scope->prev && cur_scope->prev->vla.num) {
8086 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8087 } else {
8088 gen_vla_sp_save(loc -= PTR_SIZE);
8089 cur_scope->vla.locorig = loc;
8093 vpush_type_size(type, &a);
8094 gen_vla_alloc(type, a);
8095 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8096 /* on _WIN64, because of the function args scratch area, the
8097 result of alloca differs from RSP and is returned in RAX. */
8098 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8099 #endif
8100 gen_vla_sp_save(addr);
8101 cur_scope->vla.loc = addr;
8102 cur_scope->vla.num++;
8103 } else if (has_init) {
8104 p.sec = sec;
8105 decl_initializer(&p, type, addr, DIF_FIRST);
8106 /* patch flexible array member size back to -1, */
8107 /* for possible subsequent similar declarations */
8108 if (flexible_array)
8109 flexible_array->type.ref->c = -1;
8112 no_alloc:
8113 /* restore parse state if needed */
8114 if (init_str) {
8115 end_macro();
8116 next();
8119 nocode_wanted = saved_nocode_wanted;
8122 /* generate vla code saved in post_type() */
8123 static void func_vla_arg_code(Sym *arg)
8125 int align;
8126 TokenString *vla_array_tok = NULL;
8128 if (arg->type.ref)
8129 func_vla_arg_code(arg->type.ref);
8131 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8132 loc -= type_size(&int_type, &align);
8133 loc &= -align;
8134 arg->type.ref->c = loc;
8136 unget_tok(0);
8137 vla_array_tok = tok_str_alloc();
8138 vla_array_tok->str = arg->type.ref->vla_array_str;
8139 begin_macro(vla_array_tok, 1);
8140 next();
8141 gexpr();
8142 end_macro();
8143 next();
8144 vpush_type_size(&arg->type.ref->type, &align);
8145 gen_op('*');
8146 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8147 vswap();
8148 vstore();
8149 vpop();
8153 static void func_vla_arg(Sym *sym)
8155 Sym *arg;
8157 for (arg = sym->type.ref->next; arg; arg = arg->next)
8158 if (arg->type.t & VT_VLA)
8159 func_vla_arg_code(arg);
8162 /* parse a function defined by symbol 'sym' and generate its code in
8163 'cur_text_section' */
8164 static void gen_function(Sym *sym)
8166 struct scope f = { 0 };
8167 cur_scope = root_scope = &f;
8168 nocode_wanted = 0;
8169 ind = cur_text_section->data_offset;
8170 if (sym->a.aligned) {
8171 size_t newoff = section_add(cur_text_section, 0,
8172 1 << (sym->a.aligned - 1));
8173 gen_fill_nops(newoff - ind);
8175 /* NOTE: we patch the symbol size later */
8176 put_extern_sym(sym, cur_text_section, ind, 0);
8177 if (sym->type.ref->f.func_ctor)
8178 add_array (tcc_state, ".init_array", sym->c);
8179 if (sym->type.ref->f.func_dtor)
8180 add_array (tcc_state, ".fini_array", sym->c);
8182 funcname = get_tok_str(sym->v, NULL);
8183 func_ind = ind;
8184 func_vt = sym->type.ref->type;
8185 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8187 /* put debug symbol */
8188 tcc_debug_funcstart(tcc_state, sym);
8189 /* push a dummy symbol to enable local sym storage */
8190 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8191 local_scope = 1; /* for function parameters */
8192 gfunc_prolog(sym);
8193 tcc_debug_prolog_epilog(tcc_state, 0);
8194 local_scope = 0;
8195 rsym = 0;
8196 clear_temp_local_var_list();
8197 func_vla_arg(sym);
8198 block(0);
8199 gsym(rsym);
8200 nocode_wanted = 0;
8201 /* reset local stack */
8202 pop_local_syms(NULL, 0);
8203 tcc_debug_prolog_epilog(tcc_state, 1);
8204 gfunc_epilog();
8205 cur_text_section->data_offset = ind;
8206 local_scope = 0;
8207 label_pop(&global_label_stack, NULL, 0);
8208 sym_pop(&all_cleanups, NULL, 0);
8209 /* patch symbol size */
8210 elfsym(sym)->st_size = ind - func_ind;
8211 /* end of function */
8212 tcc_debug_funcend(tcc_state, ind - func_ind);
8213 /* It's better to crash than to generate wrong code */
8214 cur_text_section = NULL;
8215 funcname = ""; /* for safety */
8216 func_vt.t = VT_VOID; /* for safety */
8217 func_var = 0; /* for safety */
8218 ind = 0; /* for safety */
8219 func_ind = -1;
8220 nocode_wanted = DATA_ONLY_WANTED;
8221 check_vstack();
8222 /* do this after funcend debug info */
8223 next();
8226 static void gen_inline_functions(TCCState *s)
8228 Sym *sym;
8229 int inline_generated, i;
8230 struct InlineFunc *fn;
8232 tcc_open_bf(s, ":inline:", 0);
8233 /* iterate while inline function are referenced */
8234 do {
8235 inline_generated = 0;
8236 for (i = 0; i < s->nb_inline_fns; ++i) {
8237 fn = s->inline_fns[i];
8238 sym = fn->sym;
8239 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8240 /* the function was used or forced (and then not internal):
8241 generate its code and convert it to a normal function */
8242 fn->sym = NULL;
8243 tcc_debug_putfile(s, fn->filename);
8244 begin_macro(fn->func_str, 1);
8245 next();
8246 cur_text_section = text_section;
8247 gen_function(sym);
8248 end_macro();
8250 inline_generated = 1;
8253 } while (inline_generated);
8254 tcc_close();
8257 static void free_inline_functions(TCCState *s)
8259 int i;
8260 /* free tokens of unused inline functions */
8261 for (i = 0; i < s->nb_inline_fns; ++i) {
8262 struct InlineFunc *fn = s->inline_fns[i];
8263 if (fn->sym)
8264 tok_str_free(fn->func_str);
8266 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8269 static void do_Static_assert(void){
8270 CString error_str;
8271 int c;
8273 next();
8274 skip('(');
8275 c = expr_const();
8277 if (tok == ')') {
8278 if (!c)
8279 tcc_error("_Static_assert fail");
8280 next();
8281 goto static_assert_out;
8284 skip(',');
8285 parse_mult_str(&error_str, "string constant");
8286 if (c == 0)
8287 tcc_error("%s", (char *)error_str.data);
8288 cstr_free(&error_str);
8289 skip(')');
8290 static_assert_out:
8291 skip(';');
8294 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8295 or VT_CMP if parsing old style parameter list
8296 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8297 static int decl(int l)
8299 int v, has_init, r, oldint;
8300 CType type, btype;
8301 Sym *sym;
8302 AttributeDef ad, adbase;
8304 while (1) {
8305 if (tok == TOK_STATIC_ASSERT) {
8306 do_Static_assert();
8307 continue;
8310 oldint = 0;
8311 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8312 if (l == VT_JMP)
8313 return 0;
8314 /* skip redundant ';' if not in old parameter decl scope */
8315 if (tok == ';' && l != VT_CMP) {
8316 next();
8317 continue;
8319 if (l != VT_CONST)
8320 break;
8321 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8322 /* global asm block */
8323 asm_global_instr();
8324 continue;
8326 if (tok >= TOK_UIDENT) {
8327 /* special test for old K&R protos without explicit int
8328 type. Only accepted when defining global data */
8329 btype.t = VT_INT;
8330 oldint = 1;
8331 } else {
8332 if (tok != TOK_EOF)
8333 expect("declaration");
8334 break;
8338 if (tok == ';') {
8339 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8340 v = btype.ref->v;
8341 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8342 tcc_warning("unnamed struct/union that defines no instances");
8343 next();
8344 continue;
8346 if (IS_ENUM(btype.t)) {
8347 next();
8348 continue;
8352 while (1) { /* iterate thru each declaration */
8353 type = btype;
8354 ad = adbase;
8355 type_decl(&type, &ad, &v, TYPE_DIRECT);
8356 #if 0
8358 char buf[500];
8359 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8360 printf("type = '%s'\n", buf);
8362 #endif
8363 if ((type.t & VT_BTYPE) == VT_FUNC) {
8364 if ((type.t & VT_STATIC) && (l != VT_CONST))
8365 tcc_error("function without file scope cannot be static");
8366 /* if old style function prototype, we accept a
8367 declaration list */
8368 sym = type.ref;
8369 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8370 func_vt = type;
8371 decl(VT_CMP);
8373 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8374 if (sym->f.func_alwinl
8375 && ((type.t & (VT_EXTERN | VT_INLINE))
8376 == (VT_EXTERN | VT_INLINE))) {
8377 /* always_inline functions must be handled as if they
8378 don't generate multiple global defs, even if extern
8379 inline, i.e. GNU inline semantics for those. Rewrite
8380 them into static inline. */
8381 type.t &= ~VT_EXTERN;
8382 type.t |= VT_STATIC;
8384 #endif
8385 /* always compile 'extern inline' */
8386 if (type.t & VT_EXTERN)
8387 type.t &= ~VT_INLINE;
8389 } else if (oldint) {
8390 tcc_warning("type defaults to int");
8393 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8394 ad.asm_label = asm_label_instr();
8395 /* parse one last attribute list, after asm label */
8396 parse_attribute(&ad);
8397 #if 0
8398 /* gcc does not allow __asm__("label") with function definition,
8399 but why not ... */
8400 if (tok == '{')
8401 expect(";");
8402 #endif
8405 #ifdef TCC_TARGET_PE
8406 if (ad.a.dllimport || ad.a.dllexport) {
8407 if (type.t & VT_STATIC)
8408 tcc_error("cannot have dll linkage with static");
8409 if (type.t & VT_TYPEDEF) {
8410 tcc_warning("'%s' attribute ignored for typedef",
8411 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8412 (ad.a.dllexport = 0, "dllexport"));
8413 } else if (ad.a.dllimport) {
8414 if ((type.t & VT_BTYPE) == VT_FUNC)
8415 ad.a.dllimport = 0;
8416 else
8417 type.t |= VT_EXTERN;
8420 #endif
8421 if (tok == '{') {
8422 if (l != VT_CONST)
8423 tcc_error("cannot use local functions");
8424 if ((type.t & VT_BTYPE) != VT_FUNC)
8425 expect("function definition");
8427 /* reject abstract declarators in function definition
8428 make old style params without decl have int type */
8429 sym = type.ref;
8430 while ((sym = sym->next) != NULL) {
8431 if (!(sym->v & ~SYM_FIELD))
8432 expect("identifier");
8433 if (sym->type.t == VT_VOID)
8434 sym->type = int_type;
8437 /* apply post-declaraton attributes */
8438 merge_funcattr(&type.ref->f, &ad.f);
8440 /* put function symbol */
8441 type.t &= ~VT_EXTERN;
8442 sym = external_sym(v, &type, 0, &ad);
8444 /* static inline functions are just recorded as a kind
8445 of macro. Their code will be emitted at the end of
8446 the compilation unit only if they are used */
8447 if (sym->type.t & VT_INLINE) {
8448 struct InlineFunc *fn;
8449 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8450 strcpy(fn->filename, file->filename);
8451 fn->sym = sym;
8452 skip_or_save_block(&fn->func_str);
8453 dynarray_add(&tcc_state->inline_fns,
8454 &tcc_state->nb_inline_fns, fn);
8455 } else {
8456 /* compute text section */
8457 cur_text_section = ad.section;
8458 if (!cur_text_section)
8459 cur_text_section = text_section;
8460 gen_function(sym);
8462 break;
8463 } else {
8464 if (l == VT_CMP) {
8465 /* find parameter in function parameter list */
8466 for (sym = func_vt.ref->next; sym; sym = sym->next)
8467 if ((sym->v & ~SYM_FIELD) == v)
8468 goto found;
8469 tcc_error("declaration for parameter '%s' but no such parameter",
8470 get_tok_str(v, NULL));
8471 found:
8472 if (type.t & VT_STORAGE) /* 'register' is okay */
8473 tcc_error("storage class specified for '%s'",
8474 get_tok_str(v, NULL));
8475 if (sym->type.t != VT_VOID)
8476 tcc_error("redefinition of parameter '%s'",
8477 get_tok_str(v, NULL));
8478 convert_parameter_type(&type);
8479 sym->type = type;
8480 } else if (type.t & VT_TYPEDEF) {
8481 /* save typedefed type */
8482 /* XXX: test storage specifiers ? */
8483 sym = sym_find(v);
8484 if (sym && sym->sym_scope == local_scope) {
8485 if (!is_compatible_types(&sym->type, &type)
8486 || !(sym->type.t & VT_TYPEDEF))
8487 tcc_error("incompatible redefinition of '%s'",
8488 get_tok_str(v, NULL));
8489 sym->type = type;
8490 } else {
8491 sym = sym_push(v, &type, 0, 0);
8493 sym->a = ad.a;
8494 if ((type.t & VT_BTYPE) == VT_FUNC)
8495 merge_funcattr(&sym->type.ref->f, &ad.f);
8496 if (debug_modes)
8497 tcc_debug_typedef (tcc_state, sym);
8498 } else if ((type.t & VT_BTYPE) == VT_VOID
8499 && !(type.t & VT_EXTERN)) {
8500 tcc_error("declaration of void object");
8501 } else {
8502 r = 0;
8503 if ((type.t & VT_BTYPE) == VT_FUNC) {
8504 /* external function definition */
8505 /* specific case for func_call attribute */
8506 merge_funcattr(&type.ref->f, &ad.f);
8507 } else if (!(type.t & VT_ARRAY)) {
8508 /* not lvalue if array */
8509 r |= VT_LVAL;
8511 has_init = (tok == '=');
8512 if (has_init && (type.t & VT_VLA))
8513 tcc_error("variable length array cannot be initialized");
8514 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8515 || (type.t & VT_BTYPE) == VT_FUNC
8516 /* as with GCC, uninitialized global arrays with no size
8517 are considered extern: */
8518 || ((type.t & VT_ARRAY) && !has_init
8519 && l == VT_CONST && type.ref->c < 0)
8521 /* external variable or function */
8522 type.t |= VT_EXTERN;
8523 sym = external_sym(v, &type, r, &ad);
8524 if (ad.alias_target) {
8525 /* Aliases need to be emitted when their target
8526 symbol is emitted, even if perhaps unreferenced.
8527 We only support the case where the base is
8528 already defined, otherwise we would need
8529 deferring to emit the aliases until the end of
8530 the compile unit. */
8531 Sym *alias_target = sym_find(ad.alias_target);
8532 ElfSym *esym = elfsym(alias_target);
8533 if (!esym)
8534 tcc_error("unsupported forward __alias__ attribute");
8535 put_extern_sym2(sym, esym->st_shndx,
8536 esym->st_value, esym->st_size, 1);
8538 } else {
8539 if (l == VT_CONST || (type.t & VT_STATIC))
8540 r |= VT_CONST;
8541 else
8542 r |= VT_LOCAL;
8543 if (has_init)
8544 next();
8545 else if (l == VT_CONST)
8546 /* uninitialized global variables may be overridden */
8547 type.t |= VT_EXTERN;
8548 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8551 if (tok != ',') {
8552 if (l == VT_JMP)
8553 return 1;
8554 skip(';');
8555 break;
8557 next();
8561 return 0;
8564 /* ------------------------------------------------------------------------- */
8565 #undef gjmp_addr
8566 #undef gjmp
8567 /* ------------------------------------------------------------------------- */