Revert ""Fix" nocode_wanted in expr_landor"
[tinycc.git] / tccgen.c
blobda5c81b164491615a5dedaecade93f13eb3f7e2e
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
64 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
65 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
66 ST_DATA int func_vc;
67 ST_DATA int func_ind;
68 ST_DATA const char *funcname;
69 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
70 static CString initstr;
72 #if PTR_SIZE == 4
73 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
74 #define VT_PTRDIFF_T VT_INT
75 #elif LONG_SIZE == 4
76 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
77 #define VT_PTRDIFF_T VT_LLONG
78 #else
79 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
80 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
81 #endif
83 static struct switch_t {
84 struct case_t {
85 int64_t v1, v2;
86 int sym;
87 } **p; int n; /* list of case ranges */
88 int def_sym; /* default symbol */
89 int nocode_wanted;
90 int *bsym;
91 struct scope *scope;
92 struct switch_t *prev;
93 SValue sv;
94 } *cur_switch; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
97 /*list of temporary local variables on the stack in current function. */
98 static struct temp_local_variable {
99 int location; //offset on stack. Svalue.c.i
100 short size;
101 short align;
102 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
103 static int nb_temp_local_vars;
105 static struct scope {
106 struct scope *prev;
107 struct { int loc, locorig, num; } vla;
108 struct { Sym *s; int n; } cl;
109 int *bsym, *csym;
110 Sym *lstk, *llstk;
111 } *cur_scope, *loop_scope, *root_scope;
113 typedef struct {
114 Section *sec;
115 int local_offset;
116 Sym *flex_array_ref;
117 } init_params;
119 #if 1
120 #define precedence_parser
121 static void init_prec(void);
122 #endif
124 static void gen_cast(CType *type);
125 static void gen_cast_s(int t);
126 static inline CType *pointed_type(CType *type);
127 static int is_compatible_types(CType *type1, CType *type2);
128 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
129 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
130 static void parse_expr_type(CType *type);
131 static void init_putv(init_params *p, CType *type, unsigned long c);
132 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
133 static void block(int is_expr);
134 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
135 static int decl(int l);
136 static void expr_eq(void);
137 static void vpush_type_size(CType *type, int *a);
138 static int is_compatible_unqualified_types(CType *type1, CType *type2);
139 static inline int64_t expr_const64(void);
140 static void vpush64(int ty, unsigned long long v);
141 static void vpush(CType *type);
142 static int gvtst(int inv, int t);
143 static void gen_inline_functions(TCCState *s);
144 static void free_inline_functions(TCCState *s);
145 static void skip_or_save_block(TokenString **str);
146 static void gv_dup(void);
147 static int get_temp_local_var(int size,int align);
148 static void clear_temp_local_var_list();
149 static void cast_error(CType *st, CType *dt);
151 /* ------------------------------------------------------------------------- */
152 /* Automagical code suppression */
154 /* Clear 'nocode_wanted' at forward label if it was used */
155 ST_FUNC void gsym(int t)
157 if (t) {
158 gsym_addr(t, ind);
159 CODE_ON();
163 /* Clear 'nocode_wanted' if current pc is a label */
164 static int gind()
166 int t = ind;
167 CODE_ON();
168 if (debug_modes)
169 tcc_tcov_block_begin(tcc_state);
170 return t;
173 /* Set 'nocode_wanted' after unconditional (backwards) jump */
174 static void gjmp_addr_acs(int t)
176 gjmp_addr(t);
177 CODE_OFF();
180 /* Set 'nocode_wanted' after unconditional (forwards) jump */
181 static int gjmp_acs(int t)
183 t = gjmp(t);
184 CODE_OFF();
185 return t;
188 /* These are #undef'd at the end of this file */
189 #define gjmp_addr gjmp_addr_acs
190 #define gjmp gjmp_acs
191 /* ------------------------------------------------------------------------- */
193 ST_INLN int is_float(int t)
195 int bt = t & VT_BTYPE;
196 return bt == VT_LDOUBLE
197 || bt == VT_DOUBLE
198 || bt == VT_FLOAT
199 || bt == VT_QFLOAT;
202 static inline int is_integer_btype(int bt)
204 return bt == VT_BYTE
205 || bt == VT_BOOL
206 || bt == VT_SHORT
207 || bt == VT_INT
208 || bt == VT_LLONG;
211 static int btype_size(int bt)
213 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
214 bt == VT_SHORT ? 2 :
215 bt == VT_INT ? 4 :
216 bt == VT_LLONG ? 8 :
217 bt == VT_PTR ? PTR_SIZE : 0;
220 /* returns function return register from type */
221 static int R_RET(int t)
223 if (!is_float(t))
224 return REG_IRET;
225 #ifdef TCC_TARGET_X86_64
226 if ((t & VT_BTYPE) == VT_LDOUBLE)
227 return TREG_ST0;
228 #elif defined TCC_TARGET_RISCV64
229 if ((t & VT_BTYPE) == VT_LDOUBLE)
230 return REG_IRET;
231 #endif
232 return REG_FRET;
235 /* returns 2nd function return register, if any */
236 static int R2_RET(int t)
238 t &= VT_BTYPE;
239 #if PTR_SIZE == 4
240 if (t == VT_LLONG)
241 return REG_IRE2;
242 #elif defined TCC_TARGET_X86_64
243 if (t == VT_QLONG)
244 return REG_IRE2;
245 if (t == VT_QFLOAT)
246 return REG_FRE2;
247 #elif defined TCC_TARGET_RISCV64
248 if (t == VT_LDOUBLE)
249 return REG_IRE2;
250 #endif
251 return VT_CONST;
254 /* returns true for two-word types */
255 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
257 /* put function return registers to stack value */
258 static void PUT_R_RET(SValue *sv, int t)
260 sv->r = R_RET(t), sv->r2 = R2_RET(t);
263 /* returns function return register class for type t */
264 static int RC_RET(int t)
266 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
269 /* returns generic register class for type t */
270 static int RC_TYPE(int t)
272 if (!is_float(t))
273 return RC_INT;
274 #ifdef TCC_TARGET_X86_64
275 if ((t & VT_BTYPE) == VT_LDOUBLE)
276 return RC_ST0;
277 if ((t & VT_BTYPE) == VT_QFLOAT)
278 return RC_FRET;
279 #elif defined TCC_TARGET_RISCV64
280 if ((t & VT_BTYPE) == VT_LDOUBLE)
281 return RC_INT;
282 #endif
283 return RC_FLOAT;
286 /* returns 2nd register class corresponding to t and rc */
287 static int RC2_TYPE(int t, int rc)
289 if (!USING_TWO_WORDS(t))
290 return 0;
291 #ifdef RC_IRE2
292 if (rc == RC_IRET)
293 return RC_IRE2;
294 #endif
295 #ifdef RC_FRE2
296 if (rc == RC_FRET)
297 return RC_FRE2;
298 #endif
299 if (rc & RC_FLOAT)
300 return RC_FLOAT;
301 return RC_INT;
304 /* we use our own 'finite' function to avoid potential problems with
305 non standard math libs */
306 /* XXX: endianness dependent */
307 ST_FUNC int ieee_finite(double d)
309 int p[4];
310 memcpy(p, &d, sizeof(double));
311 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
314 /* compiling intel long double natively */
315 #if (defined __i386__ || defined __x86_64__) \
316 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
317 # define TCC_IS_NATIVE_387
318 #endif
320 ST_FUNC void test_lvalue(void)
322 if (!(vtop->r & VT_LVAL))
323 expect("lvalue");
326 ST_FUNC void check_vstack(void)
328 if (vtop != vstack - 1)
329 tcc_error("internal compiler error: vstack leak (%d)",
330 (int)(vtop - vstack + 1));
333 /* vstack debugging aid */
334 #if 0
335 void pv (const char *lbl, int a, int b)
337 int i;
338 for (i = a; i < a + b; ++i) {
339 SValue *p = &vtop[-i];
340 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
341 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
344 #endif
346 /* ------------------------------------------------------------------------- */
347 /* initialize vstack and types. This must be done also for tcc -E */
348 ST_FUNC void tccgen_init(TCCState *s1)
350 vtop = vstack - 1;
351 memset(vtop, 0, sizeof *vtop);
353 /* define some often used types */
354 int_type.t = VT_INT;
356 char_type.t = VT_BYTE;
357 if (s1->char_is_unsigned)
358 char_type.t |= VT_UNSIGNED;
359 char_pointer_type = char_type;
360 mk_pointer(&char_pointer_type);
362 func_old_type.t = VT_FUNC;
363 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
364 func_old_type.ref->f.func_call = FUNC_CDECL;
365 func_old_type.ref->f.func_type = FUNC_OLD;
366 #ifdef precedence_parser
367 init_prec();
368 #endif
369 cstr_new(&initstr);
372 ST_FUNC int tccgen_compile(TCCState *s1)
374 cur_text_section = NULL;
375 funcname = "";
376 func_ind = -1;
377 anon_sym = SYM_FIRST_ANOM;
378 const_wanted = 0;
379 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
380 local_scope = 0;
381 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
383 tcc_debug_start(s1);
384 tcc_tcov_start (s1);
385 #ifdef TCC_TARGET_ARM
386 arm_init(s1);
387 #endif
388 #ifdef INC_DEBUG
389 printf("%s: **** new file\n", file->filename);
390 #endif
391 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
392 next();
393 decl(VT_CONST);
394 gen_inline_functions(s1);
395 check_vstack();
396 /* end of translation unit info */
397 tcc_debug_end(s1);
398 tcc_tcov_end(s1);
399 return 0;
402 ST_FUNC void tccgen_finish(TCCState *s1)
404 cstr_free(&initstr);
405 free_inline_functions(s1);
406 sym_pop(&global_stack, NULL, 0);
407 sym_pop(&local_stack, NULL, 0);
408 /* free preprocessor macros */
409 free_defines(NULL);
410 /* free sym_pools */
411 dynarray_reset(&sym_pools, &nb_sym_pools);
412 sym_free_first = NULL;
415 /* ------------------------------------------------------------------------- */
416 ST_FUNC ElfSym *elfsym(Sym *s)
418 if (!s || !s->c)
419 return NULL;
420 return &((ElfSym *)symtab_section->data)[s->c];
423 /* apply storage attributes to Elf symbol */
424 ST_FUNC void update_storage(Sym *sym)
426 ElfSym *esym;
427 int sym_bind, old_sym_bind;
429 esym = elfsym(sym);
430 if (!esym)
431 return;
433 if (sym->a.visibility)
434 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
435 | sym->a.visibility;
437 if (sym->type.t & (VT_STATIC | VT_INLINE))
438 sym_bind = STB_LOCAL;
439 else if (sym->a.weak)
440 sym_bind = STB_WEAK;
441 else
442 sym_bind = STB_GLOBAL;
443 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
444 if (sym_bind != old_sym_bind) {
445 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
448 #ifdef TCC_TARGET_PE
449 if (sym->a.dllimport)
450 esym->st_other |= ST_PE_IMPORT;
451 if (sym->a.dllexport)
452 esym->st_other |= ST_PE_EXPORT;
453 #endif
455 #if 0
456 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
457 get_tok_str(sym->v, NULL),
458 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
459 sym->a.visibility,
460 sym->a.dllexport,
461 sym->a.dllimport
463 #endif
466 /* ------------------------------------------------------------------------- */
467 /* update sym->c so that it points to an external symbol in section
468 'section' with value 'value' */
470 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
471 addr_t value, unsigned long size,
472 int can_add_underscore)
474 int sym_type, sym_bind, info, other, t;
475 ElfSym *esym;
476 const char *name;
477 char buf1[256];
479 if (!sym->c) {
480 name = get_tok_str(sym->v, NULL);
481 t = sym->type.t;
482 if ((t & VT_BTYPE) == VT_FUNC) {
483 sym_type = STT_FUNC;
484 } else if ((t & VT_BTYPE) == VT_VOID) {
485 sym_type = STT_NOTYPE;
486 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
487 sym_type = STT_FUNC;
488 } else {
489 sym_type = STT_OBJECT;
491 if (t & (VT_STATIC | VT_INLINE))
492 sym_bind = STB_LOCAL;
493 else
494 sym_bind = STB_GLOBAL;
495 other = 0;
497 #ifdef TCC_TARGET_PE
498 if (sym_type == STT_FUNC && sym->type.ref) {
499 Sym *ref = sym->type.ref;
500 if (ref->a.nodecorate) {
501 can_add_underscore = 0;
503 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
504 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
505 name = buf1;
506 other |= ST_PE_STDCALL;
507 can_add_underscore = 0;
510 #endif
512 if (sym->asm_label) {
513 name = get_tok_str(sym->asm_label, NULL);
514 can_add_underscore = 0;
517 if (tcc_state->leading_underscore && can_add_underscore) {
518 buf1[0] = '_';
519 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
520 name = buf1;
523 info = ELFW(ST_INFO)(sym_bind, sym_type);
524 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
526 if (debug_modes)
527 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
529 } else {
530 esym = elfsym(sym);
531 esym->st_value = value;
532 esym->st_size = size;
533 esym->st_shndx = sh_num;
535 update_storage(sym);
538 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
540 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
541 return;
542 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
545 /* add a new relocation entry to symbol 'sym' in section 's' */
546 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
547 addr_t addend)
549 int c = 0;
551 if (nocode_wanted && s == cur_text_section)
552 return;
554 if (sym) {
555 if (0 == sym->c)
556 put_extern_sym(sym, NULL, 0, 0);
557 c = sym->c;
560 /* now we can add ELF relocation info */
561 put_elf_reloca(symtab_section, s, offset, type, c, addend);
564 #if PTR_SIZE == 4
565 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
567 greloca(s, sym, offset, type, 0);
569 #endif
571 /* ------------------------------------------------------------------------- */
572 /* symbol allocator */
573 static Sym *__sym_malloc(void)
575 Sym *sym_pool, *sym, *last_sym;
576 int i;
578 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
579 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
581 last_sym = sym_free_first;
582 sym = sym_pool;
583 for(i = 0; i < SYM_POOL_NB; i++) {
584 sym->next = last_sym;
585 last_sym = sym;
586 sym++;
588 sym_free_first = last_sym;
589 return last_sym;
592 static inline Sym *sym_malloc(void)
594 Sym *sym;
595 #ifndef SYM_DEBUG
596 sym = sym_free_first;
597 if (!sym)
598 sym = __sym_malloc();
599 sym_free_first = sym->next;
600 return sym;
601 #else
602 sym = tcc_malloc(sizeof(Sym));
603 return sym;
604 #endif
607 ST_INLN void sym_free(Sym *sym)
609 #ifndef SYM_DEBUG
610 sym->next = sym_free_first;
611 sym_free_first = sym;
612 #else
613 tcc_free(sym);
614 #endif
617 /* push, without hashing */
618 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
620 Sym *s;
622 s = sym_malloc();
623 memset(s, 0, sizeof *s);
624 s->v = v;
625 s->type.t = t;
626 s->c = c;
627 /* add in stack */
628 s->prev = *ps;
629 *ps = s;
630 return s;
633 /* find a symbol and return its associated structure. 's' is the top
634 of the symbol stack */
635 ST_FUNC Sym *sym_find2(Sym *s, int v)
637 while (s) {
638 if (s->v == v)
639 return s;
640 else if (s->v == -1)
641 return NULL;
642 s = s->prev;
644 return NULL;
647 /* structure lookup */
648 ST_INLN Sym *struct_find(int v)
650 v -= TOK_IDENT;
651 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
652 return NULL;
653 return table_ident[v]->sym_struct;
656 /* find an identifier */
657 ST_INLN Sym *sym_find(int v)
659 v -= TOK_IDENT;
660 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
661 return NULL;
662 return table_ident[v]->sym_identifier;
665 static int sym_scope(Sym *s)
667 if (IS_ENUM_VAL (s->type.t))
668 return s->type.ref->sym_scope;
669 else
670 return s->sym_scope;
673 /* push a given symbol on the symbol stack */
674 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
676 Sym *s, **ps;
677 TokenSym *ts;
679 if (local_stack)
680 ps = &local_stack;
681 else
682 ps = &global_stack;
683 s = sym_push2(ps, v, type->t, c);
684 s->type.ref = type->ref;
685 s->r = r;
686 /* don't record fields or anonymous symbols */
687 /* XXX: simplify */
688 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
689 /* record symbol in token array */
690 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
691 if (v & SYM_STRUCT)
692 ps = &ts->sym_struct;
693 else
694 ps = &ts->sym_identifier;
695 s->prev_tok = *ps;
696 *ps = s;
697 s->sym_scope = local_scope;
698 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
699 tcc_error("redeclaration of '%s'",
700 get_tok_str(v & ~SYM_STRUCT, NULL));
702 return s;
705 /* push a global identifier */
706 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
708 Sym *s, **ps;
709 s = sym_push2(&global_stack, v, t, c);
710 s->r = VT_CONST | VT_SYM;
711 /* don't record anonymous symbol */
712 if (v < SYM_FIRST_ANOM) {
713 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
714 /* modify the top most local identifier, so that sym_identifier will
715 point to 's' when popped; happens when called from inline asm */
716 while (*ps != NULL && (*ps)->sym_scope)
717 ps = &(*ps)->prev_tok;
718 s->prev_tok = *ps;
719 *ps = s;
721 return s;
724 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
725 pop them yet from the list, but do remove them from the token array. */
726 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
728 Sym *s, *ss, **ps;
729 TokenSym *ts;
730 int v;
732 s = *ptop;
733 while(s != b) {
734 ss = s->prev;
735 v = s->v;
736 /* remove symbol in token array */
737 /* XXX: simplify */
738 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
739 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
740 if (v & SYM_STRUCT)
741 ps = &ts->sym_struct;
742 else
743 ps = &ts->sym_identifier;
744 *ps = s->prev_tok;
746 if (!keep)
747 sym_free(s);
748 s = ss;
750 if (!keep)
751 *ptop = b;
754 /* ------------------------------------------------------------------------- */
755 static void vcheck_cmp(void)
757 /* cannot let cpu flags if other instruction are generated. Also
758 avoid leaving VT_JMP anywhere except on the top of the stack
759 because it would complicate the code generator.
761 Don't do this when nocode_wanted. vtop might come from
762 !nocode_wanted regions (see 88_codeopt.c) and transforming
763 it to a register without actually generating code is wrong
764 as their value might still be used for real. All values
765 we push under nocode_wanted will eventually be popped
766 again, so that the VT_CMP/VT_JMP value will be in vtop
767 when code is unsuppressed again. */
769 /* However if it's just automatic suppression via CODE_OFF/ON()
770 then it seems that we better let things work undisturbed.
771 How can it work at all under nocode_wanted? Well, gv() will
772 actually clear it at the gsym() in load()/VT_JMP in the
773 generator backends */
775 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
776 gv(RC_INT);
779 static void vsetc(CType *type, int r, CValue *vc)
781 if (vtop >= vstack + (VSTACK_SIZE - 1))
782 tcc_error("memory full (vstack)");
783 vcheck_cmp();
784 vtop++;
785 vtop->type = *type;
786 vtop->r = r;
787 vtop->r2 = VT_CONST;
788 vtop->c = *vc;
789 vtop->sym = NULL;
792 ST_FUNC void vswap(void)
794 SValue tmp;
796 vcheck_cmp();
797 tmp = vtop[0];
798 vtop[0] = vtop[-1];
799 vtop[-1] = tmp;
802 /* pop stack value */
803 ST_FUNC void vpop(void)
805 int v;
806 v = vtop->r & VT_VALMASK;
807 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
808 /* for x86, we need to pop the FP stack */
809 if (v == TREG_ST0) {
810 o(0xd8dd); /* fstp %st(0) */
811 } else
812 #endif
813 if (v == VT_CMP) {
814 /* need to put correct jump if && or || without test */
815 gsym(vtop->jtrue);
816 gsym(vtop->jfalse);
818 vtop--;
821 /* push constant of type "type" with useless value */
822 static void vpush(CType *type)
824 vset(type, VT_CONST, 0);
827 /* push arbitrary 64bit constant */
828 static void vpush64(int ty, unsigned long long v)
830 CValue cval;
831 CType ctype;
832 ctype.t = ty;
833 ctype.ref = NULL;
834 cval.i = v;
835 vsetc(&ctype, VT_CONST, &cval);
838 /* push integer constant */
839 ST_FUNC void vpushi(int v)
841 vpush64(VT_INT, v);
844 /* push a pointer sized constant */
845 static void vpushs(addr_t v)
847 vpush64(VT_SIZE_T, v);
850 /* push long long constant */
851 static inline void vpushll(long long v)
853 vpush64(VT_LLONG, v);
856 ST_FUNC void vset(CType *type, int r, int v)
858 CValue cval;
859 cval.i = v;
860 vsetc(type, r, &cval);
863 static void vseti(int r, int v)
865 CType type;
866 type.t = VT_INT;
867 type.ref = NULL;
868 vset(&type, r, v);
871 ST_FUNC void vpushv(SValue *v)
873 if (vtop >= vstack + (VSTACK_SIZE - 1))
874 tcc_error("memory full (vstack)");
875 vtop++;
876 *vtop = *v;
879 static void vdup(void)
881 vpushv(vtop);
884 /* rotate n first stack elements to the bottom
885 I1 ... In -> I2 ... In I1 [top is right]
887 ST_FUNC void vrotb(int n)
889 int i;
890 SValue tmp;
892 vcheck_cmp();
893 tmp = vtop[-n + 1];
894 for(i=-n+1;i!=0;i++)
895 vtop[i] = vtop[i+1];
896 vtop[0] = tmp;
899 /* rotate the n elements before entry e towards the top
900 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
902 ST_FUNC void vrote(SValue *e, int n)
904 int i;
905 SValue tmp;
907 vcheck_cmp();
908 tmp = *e;
909 for(i = 0;i < n - 1; i++)
910 e[-i] = e[-i - 1];
911 e[-n + 1] = tmp;
914 /* rotate n first stack elements to the top
915 I1 ... In -> In I1 ... I(n-1) [top is right]
917 ST_FUNC void vrott(int n)
919 vrote(vtop, n);
922 /* ------------------------------------------------------------------------- */
923 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
925 /* called from generators to set the result from relational ops */
926 ST_FUNC void vset_VT_CMP(int op)
928 vtop->r = VT_CMP;
929 vtop->cmp_op = op;
930 vtop->jfalse = 0;
931 vtop->jtrue = 0;
934 /* called once before asking generators to load VT_CMP to a register */
935 static void vset_VT_JMP(void)
937 int op = vtop->cmp_op;
939 if (vtop->jtrue || vtop->jfalse) {
940 int origt = vtop->type.t;
941 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
942 int inv = op & (op < 2); /* small optimization */
943 vseti(VT_JMP+inv, gvtst(inv, 0));
944 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
945 } else {
946 /* otherwise convert flags (rsp. 0/1) to register */
947 vtop->c.i = op;
948 if (op < 2) /* doesn't seem to happen */
949 vtop->r = VT_CONST;
953 /* Set CPU Flags, doesn't yet jump */
954 static void gvtst_set(int inv, int t)
956 int *p;
958 if (vtop->r != VT_CMP) {
959 vpushi(0);
960 gen_op(TOK_NE);
961 if (vtop->r != VT_CMP) /* must be VT_CONST then */
962 vset_VT_CMP(vtop->c.i != 0);
965 p = inv ? &vtop->jfalse : &vtop->jtrue;
966 *p = gjmp_append(*p, t);
969 /* Generate value test
971 * Generate a test for any value (jump, comparison and integers) */
972 static int gvtst(int inv, int t)
974 int op, x, u;
976 gvtst_set(inv, t);
977 t = vtop->jtrue, u = vtop->jfalse;
978 if (inv)
979 x = u, u = t, t = x;
980 op = vtop->cmp_op;
982 /* jump to the wanted target */
983 if (op > 1)
984 t = gjmp_cond(op ^ inv, t);
985 else if (op != inv)
986 t = gjmp(t);
987 /* resolve complementary jumps to here */
988 gsym(u);
990 vtop--;
991 return t;
994 /* generate a zero or nozero test */
995 static void gen_test_zero(int op)
997 if (vtop->r == VT_CMP) {
998 int j;
999 if (op == TOK_EQ) {
1000 j = vtop->jfalse;
1001 vtop->jfalse = vtop->jtrue;
1002 vtop->jtrue = j;
1003 vtop->cmp_op ^= 1;
1005 } else {
1006 vpushi(0);
1007 gen_op(op);
1011 /* ------------------------------------------------------------------------- */
1012 /* push a symbol value of TYPE */
1013 ST_FUNC void vpushsym(CType *type, Sym *sym)
1015 CValue cval;
1016 cval.i = 0;
1017 vsetc(type, VT_CONST | VT_SYM, &cval);
1018 vtop->sym = sym;
1021 /* Return a static symbol pointing to a section */
1022 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1024 int v;
1025 Sym *sym;
1027 v = anon_sym++;
1028 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1029 sym->type.t |= VT_STATIC;
1030 put_extern_sym(sym, sec, offset, size);
1031 return sym;
1034 /* push a reference to a section offset by adding a dummy symbol */
1035 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1037 vpushsym(type, get_sym_ref(type, sec, offset, size));
1040 /* define a new external reference to a symbol 'v' of type 'u' */
1041 ST_FUNC Sym *external_global_sym(int v, CType *type)
1043 Sym *s;
1045 s = sym_find(v);
1046 if (!s) {
1047 /* push forward reference */
1048 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1049 s->type.ref = type->ref;
1050 } else if (IS_ASM_SYM(s)) {
1051 s->type.t = type->t | (s->type.t & VT_EXTERN);
1052 s->type.ref = type->ref;
1053 update_storage(s);
1055 return s;
1058 /* create an external reference with no specific type similar to asm labels.
1059 This avoids type conflicts if the symbol is used from C too */
1060 ST_FUNC Sym *external_helper_sym(int v)
1062 CType ct = { VT_ASM_FUNC, NULL };
1063 return external_global_sym(v, &ct);
1066 /* push a reference to an helper function (such as memmove) */
1067 ST_FUNC void vpush_helper_func(int v)
1069 vpushsym(&func_old_type, external_helper_sym(v));
1072 /* Merge symbol attributes. */
1073 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1075 if (sa1->aligned && !sa->aligned)
1076 sa->aligned = sa1->aligned;
1077 sa->packed |= sa1->packed;
1078 sa->weak |= sa1->weak;
1079 sa->nodebug |= sa1->nodebug;
1080 if (sa1->visibility != STV_DEFAULT) {
1081 int vis = sa->visibility;
1082 if (vis == STV_DEFAULT
1083 || vis > sa1->visibility)
1084 vis = sa1->visibility;
1085 sa->visibility = vis;
1087 sa->dllexport |= sa1->dllexport;
1088 sa->nodecorate |= sa1->nodecorate;
1089 sa->dllimport |= sa1->dllimport;
1092 /* Merge function attributes. */
1093 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1095 if (fa1->func_call && !fa->func_call)
1096 fa->func_call = fa1->func_call;
1097 if (fa1->func_type && !fa->func_type)
1098 fa->func_type = fa1->func_type;
1099 if (fa1->func_args && !fa->func_args)
1100 fa->func_args = fa1->func_args;
1101 if (fa1->func_noreturn)
1102 fa->func_noreturn = 1;
1103 if (fa1->func_ctor)
1104 fa->func_ctor = 1;
1105 if (fa1->func_dtor)
1106 fa->func_dtor = 1;
1109 /* Merge attributes. */
1110 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1112 merge_symattr(&ad->a, &ad1->a);
1113 merge_funcattr(&ad->f, &ad1->f);
1115 if (ad1->section)
1116 ad->section = ad1->section;
1117 if (ad1->alias_target)
1118 ad->alias_target = ad1->alias_target;
1119 if (ad1->asm_label)
1120 ad->asm_label = ad1->asm_label;
1121 if (ad1->attr_mode)
1122 ad->attr_mode = ad1->attr_mode;
1125 /* Merge some type attributes. */
1126 static void patch_type(Sym *sym, CType *type)
1128 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1129 if (!(sym->type.t & VT_EXTERN))
1130 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1131 sym->type.t &= ~VT_EXTERN;
1134 if (IS_ASM_SYM(sym)) {
1135 /* stay static if both are static */
1136 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1137 sym->type.ref = type->ref;
1140 if (!is_compatible_types(&sym->type, type)) {
1141 tcc_error("incompatible types for redefinition of '%s'",
1142 get_tok_str(sym->v, NULL));
1144 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1145 int static_proto = sym->type.t & VT_STATIC;
1146 /* warn if static follows non-static function declaration */
1147 if ((type->t & VT_STATIC) && !static_proto
1148 /* XXX this test for inline shouldn't be here. Until we
1149 implement gnu-inline mode again it silences a warning for
1150 mingw caused by our workarounds. */
1151 && !((type->t | sym->type.t) & VT_INLINE))
1152 tcc_warning("static storage ignored for redefinition of '%s'",
1153 get_tok_str(sym->v, NULL));
1155 /* set 'inline' if both agree or if one has static */
1156 if ((type->t | sym->type.t) & VT_INLINE) {
1157 if (!((type->t ^ sym->type.t) & VT_INLINE)
1158 || ((type->t | sym->type.t) & VT_STATIC))
1159 static_proto |= VT_INLINE;
1162 if (0 == (type->t & VT_EXTERN)) {
1163 struct FuncAttr f = sym->type.ref->f;
1164 /* put complete type, use static from prototype */
1165 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1166 sym->type.ref = type->ref;
1167 merge_funcattr(&sym->type.ref->f, &f);
1168 } else {
1169 sym->type.t &= ~VT_INLINE | static_proto;
1172 if (sym->type.ref->f.func_type == FUNC_OLD
1173 && type->ref->f.func_type != FUNC_OLD) {
1174 sym->type.ref = type->ref;
1177 } else {
1178 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1179 /* set array size if it was omitted in extern declaration */
1180 sym->type.ref->c = type->ref->c;
1182 if ((type->t ^ sym->type.t) & VT_STATIC)
1183 tcc_warning("storage mismatch for redefinition of '%s'",
1184 get_tok_str(sym->v, NULL));
1188 /* Merge some storage attributes. */
1189 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1191 if (type)
1192 patch_type(sym, type);
1194 #ifdef TCC_TARGET_PE
1195 if (sym->a.dllimport != ad->a.dllimport)
1196 tcc_error("incompatible dll linkage for redefinition of '%s'",
1197 get_tok_str(sym->v, NULL));
1198 #endif
1199 merge_symattr(&sym->a, &ad->a);
1200 if (ad->asm_label)
1201 sym->asm_label = ad->asm_label;
1202 update_storage(sym);
1205 /* copy sym to other stack */
1206 static Sym *sym_copy(Sym *s0, Sym **ps)
1208 Sym *s;
1209 s = sym_malloc(), *s = *s0;
1210 s->prev = *ps, *ps = s;
1211 if (s->v < SYM_FIRST_ANOM) {
1212 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1213 s->prev_tok = *ps, *ps = s;
1215 return s;
1218 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1219 static void sym_copy_ref(Sym *s, Sym **ps)
1221 int bt = s->type.t & VT_BTYPE;
1222 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1223 Sym **sp = &s->type.ref;
1224 for (s = *sp, *sp = NULL; s; s = s->next) {
1225 Sym *s2 = sym_copy(s, ps);
1226 sp = &(*sp = s2)->next;
1227 sym_copy_ref(s2, ps);
1232 /* define a new external reference to a symbol 'v' */
1233 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1235 Sym *s;
1237 /* look for global symbol */
1238 s = sym_find(v);
1239 while (s && s->sym_scope)
1240 s = s->prev_tok;
1242 if (!s) {
1243 /* push forward reference */
1244 s = global_identifier_push(v, type->t, 0);
1245 s->r |= r;
1246 s->a = ad->a;
1247 s->asm_label = ad->asm_label;
1248 s->type.ref = type->ref;
1249 /* copy type to the global stack */
1250 if (local_stack)
1251 sym_copy_ref(s, &global_stack);
1252 } else {
1253 patch_storage(s, ad, type);
1255 /* push variables on local_stack if any */
1256 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1257 s = sym_copy(s, &local_stack);
1258 return s;
1261 /* save registers up to (vtop - n) stack entry */
1262 ST_FUNC void save_regs(int n)
1264 SValue *p, *p1;
1265 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1266 save_reg(p->r);
1269 /* save r to the memory stack, and mark it as being free */
1270 ST_FUNC void save_reg(int r)
1272 save_reg_upstack(r, 0);
1275 /* save r to the memory stack, and mark it as being free,
1276 if seen up to (vtop - n) stack entry */
1277 ST_FUNC void save_reg_upstack(int r, int n)
1279 int l, size, align, bt;
1280 SValue *p, *p1, sv;
1282 if ((r &= VT_VALMASK) >= VT_CONST)
1283 return;
1284 if (nocode_wanted)
1285 return;
1286 l = 0;
1287 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1288 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1289 /* must save value on stack if not already done */
1290 if (!l) {
1291 bt = p->type.t & VT_BTYPE;
1292 if (bt == VT_VOID)
1293 continue;
1294 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1295 bt = VT_PTR;
1296 sv.type.t = bt;
1297 size = type_size(&sv.type, &align);
1298 l = get_temp_local_var(size,align);
1299 sv.r = VT_LOCAL | VT_LVAL;
1300 sv.c.i = l;
1301 store(p->r & VT_VALMASK, &sv);
1302 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1303 /* x86 specific: need to pop fp register ST0 if saved */
1304 if (r == TREG_ST0) {
1305 o(0xd8dd); /* fstp %st(0) */
1307 #endif
1308 /* special long long case */
1309 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1310 sv.c.i += PTR_SIZE;
1311 store(p->r2, &sv);
1314 /* mark that stack entry as being saved on the stack */
1315 if (p->r & VT_LVAL) {
1316 /* also clear the bounded flag because the
1317 relocation address of the function was stored in
1318 p->c.i */
1319 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1320 } else {
1321 p->r = VT_LVAL | VT_LOCAL;
1323 p->sym = NULL;
1324 p->r2 = VT_CONST;
1325 p->c.i = l;
1330 #ifdef TCC_TARGET_ARM
1331 /* find a register of class 'rc2' with at most one reference on stack.
1332 * If none, call get_reg(rc) */
1333 ST_FUNC int get_reg_ex(int rc, int rc2)
1335 int r;
1336 SValue *p;
1338 for(r=0;r<NB_REGS;r++) {
1339 if (reg_classes[r] & rc2) {
1340 int n;
1341 n=0;
1342 for(p = vstack; p <= vtop; p++) {
1343 if ((p->r & VT_VALMASK) == r ||
1344 p->r2 == r)
1345 n++;
1347 if (n <= 1)
1348 return r;
1351 return get_reg(rc);
1353 #endif
1355 /* find a free register of class 'rc'. If none, save one register */
1356 ST_FUNC int get_reg(int rc)
1358 int r;
1359 SValue *p;
1361 /* find a free register */
1362 for(r=0;r<NB_REGS;r++) {
1363 if (reg_classes[r] & rc) {
1364 if (nocode_wanted)
1365 return r;
1366 for(p=vstack;p<=vtop;p++) {
1367 if ((p->r & VT_VALMASK) == r ||
1368 p->r2 == r)
1369 goto notfound;
1371 return r;
1373 notfound: ;
1376 /* no register left : free the first one on the stack (VERY
1377 IMPORTANT to start from the bottom to ensure that we don't
1378 spill registers used in gen_opi()) */
1379 for(p=vstack;p<=vtop;p++) {
1380 /* look at second register (if long long) */
1381 r = p->r2;
1382 if (r < VT_CONST && (reg_classes[r] & rc))
1383 goto save_found;
1384 r = p->r & VT_VALMASK;
1385 if (r < VT_CONST && (reg_classes[r] & rc)) {
1386 save_found:
1387 save_reg(r);
1388 return r;
1391 /* Should never comes here */
1392 return -1;
1395 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1396 static int get_temp_local_var(int size,int align){
1397 int i;
1398 struct temp_local_variable *temp_var;
1399 int found_var;
1400 SValue *p;
1401 int r;
1402 char free;
1403 char found;
1404 found=0;
1405 for(i=0;i<nb_temp_local_vars;i++){
1406 temp_var=&arr_temp_local_vars[i];
1407 if(temp_var->size<size||align!=temp_var->align){
1408 continue;
1410 /*check if temp_var is free*/
1411 free=1;
1412 for(p=vstack;p<=vtop;p++) {
1413 r=p->r&VT_VALMASK;
1414 if(r==VT_LOCAL||r==VT_LLOCAL){
1415 if(p->c.i==temp_var->location){
1416 free=0;
1417 break;
1421 if(free){
1422 found_var=temp_var->location;
1423 found=1;
1424 break;
1427 if(!found){
1428 loc = (loc - size) & -align;
1429 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1430 temp_var=&arr_temp_local_vars[i];
1431 temp_var->location=loc;
1432 temp_var->size=size;
1433 temp_var->align=align;
1434 nb_temp_local_vars++;
1436 found_var=loc;
1438 return found_var;
1441 static void clear_temp_local_var_list(){
1442 nb_temp_local_vars=0;
1445 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1446 if needed */
1447 static void move_reg(int r, int s, int t)
1449 SValue sv;
1451 if (r != s) {
1452 save_reg(r);
1453 sv.type.t = t;
1454 sv.type.ref = NULL;
1455 sv.r = s;
1456 sv.c.i = 0;
1457 load(r, &sv);
1461 /* get address of vtop (vtop MUST BE an lvalue) */
1462 ST_FUNC void gaddrof(void)
1464 vtop->r &= ~VT_LVAL;
1465 /* tricky: if saved lvalue, then we can go back to lvalue */
1466 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1467 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1470 #ifdef CONFIG_TCC_BCHECK
1471 /* generate a bounded pointer addition */
1472 static void gen_bounded_ptr_add(void)
1474 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1475 if (save) {
1476 vpushv(&vtop[-1]);
1477 vrott(3);
1479 vpush_helper_func(TOK___bound_ptr_add);
1480 vrott(3);
1481 gfunc_call(2);
1482 vtop -= save;
1483 vpushi(0);
1484 /* returned pointer is in REG_IRET */
1485 vtop->r = REG_IRET | VT_BOUNDED;
1486 if (nocode_wanted)
1487 return;
1488 /* relocation offset of the bounding function call point */
1489 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1492 /* patch pointer addition in vtop so that pointer dereferencing is
1493 also tested */
1494 static void gen_bounded_ptr_deref(void)
1496 addr_t func;
1497 int size, align;
1498 ElfW_Rel *rel;
1499 Sym *sym;
1501 if (nocode_wanted)
1502 return;
1504 size = type_size(&vtop->type, &align);
1505 switch(size) {
1506 case 1: func = TOK___bound_ptr_indir1; break;
1507 case 2: func = TOK___bound_ptr_indir2; break;
1508 case 4: func = TOK___bound_ptr_indir4; break;
1509 case 8: func = TOK___bound_ptr_indir8; break;
1510 case 12: func = TOK___bound_ptr_indir12; break;
1511 case 16: func = TOK___bound_ptr_indir16; break;
1512 default:
1513 /* may happen with struct member access */
1514 return;
1516 sym = external_helper_sym(func);
1517 if (!sym->c)
1518 put_extern_sym(sym, NULL, 0, 0);
1519 /* patch relocation */
1520 /* XXX: find a better solution ? */
1521 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1522 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1525 /* generate lvalue bound code */
1526 static void gbound(void)
1528 CType type1;
1530 vtop->r &= ~VT_MUSTBOUND;
1531 /* if lvalue, then use checking code before dereferencing */
1532 if (vtop->r & VT_LVAL) {
1533 /* if not VT_BOUNDED value, then make one */
1534 if (!(vtop->r & VT_BOUNDED)) {
1535 /* must save type because we must set it to int to get pointer */
1536 type1 = vtop->type;
1537 vtop->type.t = VT_PTR;
1538 gaddrof();
1539 vpushi(0);
1540 gen_bounded_ptr_add();
1541 vtop->r |= VT_LVAL;
1542 vtop->type = type1;
1544 /* then check for dereferencing */
1545 gen_bounded_ptr_deref();
1549 /* we need to call __bound_ptr_add before we start to load function
1550 args into registers */
1551 ST_FUNC void gbound_args(int nb_args)
1553 int i, v;
1554 SValue *sv;
1556 for (i = 1; i <= nb_args; ++i)
1557 if (vtop[1 - i].r & VT_MUSTBOUND) {
1558 vrotb(i);
1559 gbound();
1560 vrott(i);
1563 sv = vtop - nb_args;
1564 if (sv->r & VT_SYM) {
1565 v = sv->sym->v;
1566 if (v == TOK_setjmp
1567 || v == TOK__setjmp
1568 #ifndef TCC_TARGET_PE
1569 || v == TOK_sigsetjmp
1570 || v == TOK___sigsetjmp
1571 #endif
1573 vpush_helper_func(TOK___bound_setjmp);
1574 vpushv(sv + 1);
1575 gfunc_call(1);
1576 func_bound_add_epilog = 1;
1578 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1579 if (v == TOK_alloca)
1580 func_bound_add_epilog = 1;
1581 #endif
1582 #if TARGETOS_NetBSD
1583 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1584 sv->sym->asm_label = TOK___bound_longjmp;
1585 #endif
1589 /* Add bounds for local symbols from S to E (via ->prev) */
1590 static void add_local_bounds(Sym *s, Sym *e)
1592 for (; s != e; s = s->prev) {
1593 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1594 continue;
1595 /* Add arrays/structs/unions because we always take address */
1596 if ((s->type.t & VT_ARRAY)
1597 || (s->type.t & VT_BTYPE) == VT_STRUCT
1598 || s->a.addrtaken) {
1599 /* add local bound info */
1600 int align, size = type_size(&s->type, &align);
1601 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1602 2 * sizeof(addr_t));
1603 bounds_ptr[0] = s->c;
1604 bounds_ptr[1] = size;
1608 #endif
1610 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1611 static void pop_local_syms(Sym *b, int keep)
1613 #ifdef CONFIG_TCC_BCHECK
1614 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1615 add_local_bounds(local_stack, b);
1616 #endif
1617 if (debug_modes)
1618 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1619 sym_pop(&local_stack, b, keep);
1622 static void incr_bf_adr(int o)
1624 vtop->type = char_pointer_type;
1625 gaddrof();
1626 vpushs(o);
1627 gen_op('+');
1628 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1629 vtop->r |= VT_LVAL;
1632 /* single-byte load mode for packed or otherwise unaligned bitfields */
1633 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1635 int n, o, bits;
1636 save_reg_upstack(vtop->r, 1);
1637 vpush64(type->t & VT_BTYPE, 0); // B X
1638 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1639 do {
1640 vswap(); // X B
1641 incr_bf_adr(o);
1642 vdup(); // X B B
1643 n = 8 - bit_pos;
1644 if (n > bit_size)
1645 n = bit_size;
1646 if (bit_pos)
1647 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1648 if (n < 8)
1649 vpushi((1 << n) - 1), gen_op('&');
1650 gen_cast(type);
1651 if (bits)
1652 vpushi(bits), gen_op(TOK_SHL);
1653 vrotb(3); // B Y X
1654 gen_op('|'); // B X
1655 bits += n, bit_size -= n, o = 1;
1656 } while (bit_size);
1657 vswap(), vpop();
1658 if (!(type->t & VT_UNSIGNED)) {
1659 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1660 vpushi(n), gen_op(TOK_SHL);
1661 vpushi(n), gen_op(TOK_SAR);
1665 /* single-byte store mode for packed or otherwise unaligned bitfields */
1666 static void store_packed_bf(int bit_pos, int bit_size)
1668 int bits, n, o, m, c;
1669 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1670 vswap(); // X B
1671 save_reg_upstack(vtop->r, 1);
1672 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1673 do {
1674 incr_bf_adr(o); // X B
1675 vswap(); //B X
1676 c ? vdup() : gv_dup(); // B V X
1677 vrott(3); // X B V
1678 if (bits)
1679 vpushi(bits), gen_op(TOK_SHR);
1680 if (bit_pos)
1681 vpushi(bit_pos), gen_op(TOK_SHL);
1682 n = 8 - bit_pos;
1683 if (n > bit_size)
1684 n = bit_size;
1685 if (n < 8) {
1686 m = ((1 << n) - 1) << bit_pos;
1687 vpushi(m), gen_op('&'); // X B V1
1688 vpushv(vtop-1); // X B V1 B
1689 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1690 gen_op('&'); // X B V1 B1
1691 gen_op('|'); // X B V2
1693 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1694 vstore(), vpop(); // X B
1695 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1696 } while (bit_size);
1697 vpop(), vpop();
1700 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1702 int t;
1703 if (0 == sv->type.ref)
1704 return 0;
1705 t = sv->type.ref->auxtype;
1706 if (t != -1 && t != VT_STRUCT) {
1707 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1708 sv->r |= VT_LVAL;
1710 return t;
1713 /* store vtop a register belonging to class 'rc'. lvalues are
1714 converted to values. Cannot be used if cannot be converted to
1715 register value (such as structures). */
1716 ST_FUNC int gv(int rc)
1718 int r, r2, r_ok, r2_ok, rc2, bt;
1719 int bit_pos, bit_size, size, align;
1721 /* NOTE: get_reg can modify vstack[] */
1722 if (vtop->type.t & VT_BITFIELD) {
1723 CType type;
1725 bit_pos = BIT_POS(vtop->type.t);
1726 bit_size = BIT_SIZE(vtop->type.t);
1727 /* remove bit field info to avoid loops */
1728 vtop->type.t &= ~VT_STRUCT_MASK;
1730 type.ref = NULL;
1731 type.t = vtop->type.t & VT_UNSIGNED;
1732 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1733 type.t |= VT_UNSIGNED;
1735 r = adjust_bf(vtop, bit_pos, bit_size);
1737 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1738 type.t |= VT_LLONG;
1739 else
1740 type.t |= VT_INT;
1742 if (r == VT_STRUCT) {
1743 load_packed_bf(&type, bit_pos, bit_size);
1744 } else {
1745 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1746 /* cast to int to propagate signedness in following ops */
1747 gen_cast(&type);
1748 /* generate shifts */
1749 vpushi(bits - (bit_pos + bit_size));
1750 gen_op(TOK_SHL);
1751 vpushi(bits - bit_size);
1752 /* NOTE: transformed to SHR if unsigned */
1753 gen_op(TOK_SAR);
1755 r = gv(rc);
1756 } else {
1757 if (is_float(vtop->type.t) &&
1758 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1759 /* CPUs usually cannot use float constants, so we store them
1760 generically in data segment */
1761 init_params p = { rodata_section };
1762 unsigned long offset;
1763 size = type_size(&vtop->type, &align);
1764 if (NODATA_WANTED)
1765 size = 0, align = 1;
1766 offset = section_add(p.sec, size, align);
1767 vpush_ref(&vtop->type, p.sec, offset, size);
1768 vswap();
1769 init_putv(&p, &vtop->type, offset);
1770 vtop->r |= VT_LVAL;
1772 #ifdef CONFIG_TCC_BCHECK
1773 if (vtop->r & VT_MUSTBOUND)
1774 gbound();
1775 #endif
1777 bt = vtop->type.t & VT_BTYPE;
1779 #ifdef TCC_TARGET_RISCV64
1780 /* XXX mega hack */
1781 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1782 rc = RC_INT;
1783 #endif
1784 rc2 = RC2_TYPE(bt, rc);
1786 /* need to reload if:
1787 - constant
1788 - lvalue (need to dereference pointer)
1789 - already a register, but not in the right class */
1790 r = vtop->r & VT_VALMASK;
1791 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1792 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1794 if (!r_ok || !r2_ok) {
1795 if (!r_ok)
1796 r = get_reg(rc);
1797 if (rc2) {
1798 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1799 int original_type = vtop->type.t;
1801 /* two register type load :
1802 expand to two words temporarily */
1803 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1804 /* load constant */
1805 unsigned long long ll = vtop->c.i;
1806 vtop->c.i = ll; /* first word */
1807 load(r, vtop);
1808 vtop->r = r; /* save register value */
1809 vpushi(ll >> 32); /* second word */
1810 } else if (vtop->r & VT_LVAL) {
1811 /* We do not want to modifier the long long pointer here.
1812 So we save any other instances down the stack */
1813 save_reg_upstack(vtop->r, 1);
1814 /* load from memory */
1815 vtop->type.t = load_type;
1816 load(r, vtop);
1817 vdup();
1818 vtop[-1].r = r; /* save register value */
1819 /* increment pointer to get second word */
1820 vtop->type.t = VT_PTRDIFF_T;
1821 gaddrof();
1822 vpushs(PTR_SIZE);
1823 gen_op('+');
1824 vtop->r |= VT_LVAL;
1825 vtop->type.t = load_type;
1826 } else {
1827 /* move registers */
1828 if (!r_ok)
1829 load(r, vtop);
1830 if (r2_ok && vtop->r2 < VT_CONST)
1831 goto done;
1832 vdup();
1833 vtop[-1].r = r; /* save register value */
1834 vtop->r = vtop[-1].r2;
1836 /* Allocate second register. Here we rely on the fact that
1837 get_reg() tries first to free r2 of an SValue. */
1838 r2 = get_reg(rc2);
1839 load(r2, vtop);
1840 vpop();
1841 /* write second register */
1842 vtop->r2 = r2;
1843 done:
1844 vtop->type.t = original_type;
1845 } else {
1846 if (vtop->r == VT_CMP)
1847 vset_VT_JMP();
1848 /* one register type load */
1849 load(r, vtop);
1852 vtop->r = r;
1853 #ifdef TCC_TARGET_C67
1854 /* uses register pairs for doubles */
1855 if (bt == VT_DOUBLE)
1856 vtop->r2 = r+1;
1857 #endif
1859 return r;
1862 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1863 ST_FUNC void gv2(int rc1, int rc2)
1865 /* generate more generic register first. But VT_JMP or VT_CMP
1866 values must be generated first in all cases to avoid possible
1867 reload errors */
1868 if (vtop->r != VT_CMP && rc1 <= rc2) {
1869 vswap();
1870 gv(rc1);
1871 vswap();
1872 gv(rc2);
1873 /* test if reload is needed for first register */
1874 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1875 vswap();
1876 gv(rc1);
1877 vswap();
1879 } else {
1880 gv(rc2);
1881 vswap();
1882 gv(rc1);
1883 vswap();
1884 /* test if reload is needed for first register */
1885 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1886 gv(rc2);
1891 #if PTR_SIZE == 4
1892 /* expand 64bit on stack in two ints */
1893 ST_FUNC void lexpand(void)
1895 int u, v;
1896 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1897 v = vtop->r & (VT_VALMASK | VT_LVAL);
1898 if (v == VT_CONST) {
1899 vdup();
1900 vtop[0].c.i >>= 32;
1901 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1902 vdup();
1903 vtop[0].c.i += 4;
1904 } else {
1905 gv(RC_INT);
1906 vdup();
1907 vtop[0].r = vtop[-1].r2;
1908 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1910 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1912 #endif
1914 #if PTR_SIZE == 4
1915 /* build a long long from two ints */
1916 static void lbuild(int t)
1918 gv2(RC_INT, RC_INT);
1919 vtop[-1].r2 = vtop[0].r;
1920 vtop[-1].type.t = t;
1921 vpop();
1923 #endif
1925 /* convert stack entry to register and duplicate its value in another
1926 register */
1927 static void gv_dup(void)
1929 int t, rc, r;
1931 t = vtop->type.t;
1932 #if PTR_SIZE == 4
1933 if ((t & VT_BTYPE) == VT_LLONG) {
1934 if (t & VT_BITFIELD) {
1935 gv(RC_INT);
1936 t = vtop->type.t;
1938 lexpand();
1939 gv_dup();
1940 vswap();
1941 vrotb(3);
1942 gv_dup();
1943 vrotb(4);
1944 /* stack: H L L1 H1 */
1945 lbuild(t);
1946 vrotb(3);
1947 vrotb(3);
1948 vswap();
1949 lbuild(t);
1950 vswap();
1951 return;
1953 #endif
1954 /* duplicate value */
1955 rc = RC_TYPE(t);
1956 gv(rc);
1957 r = get_reg(rc);
1958 vdup();
1959 load(r, vtop);
1960 vtop->r = r;
1963 #if PTR_SIZE == 4
1964 /* generate CPU independent (unsigned) long long operations */
1965 static void gen_opl(int op)
1967 int t, a, b, op1, c, i;
1968 int func;
1969 unsigned short reg_iret = REG_IRET;
1970 unsigned short reg_lret = REG_IRE2;
1971 SValue tmp;
1973 switch(op) {
1974 case '/':
1975 case TOK_PDIV:
1976 func = TOK___divdi3;
1977 goto gen_func;
1978 case TOK_UDIV:
1979 func = TOK___udivdi3;
1980 goto gen_func;
1981 case '%':
1982 func = TOK___moddi3;
1983 goto gen_mod_func;
1984 case TOK_UMOD:
1985 func = TOK___umoddi3;
1986 gen_mod_func:
1987 #ifdef TCC_ARM_EABI
1988 reg_iret = TREG_R2;
1989 reg_lret = TREG_R3;
1990 #endif
1991 gen_func:
1992 /* call generic long long function */
1993 vpush_helper_func(func);
1994 vrott(3);
1995 gfunc_call(2);
1996 vpushi(0);
1997 vtop->r = reg_iret;
1998 vtop->r2 = reg_lret;
1999 break;
2000 case '^':
2001 case '&':
2002 case '|':
2003 case '*':
2004 case '+':
2005 case '-':
2006 //pv("gen_opl A",0,2);
2007 t = vtop->type.t;
2008 vswap();
2009 lexpand();
2010 vrotb(3);
2011 lexpand();
2012 /* stack: L1 H1 L2 H2 */
2013 tmp = vtop[0];
2014 vtop[0] = vtop[-3];
2015 vtop[-3] = tmp;
2016 tmp = vtop[-2];
2017 vtop[-2] = vtop[-3];
2018 vtop[-3] = tmp;
2019 vswap();
2020 /* stack: H1 H2 L1 L2 */
2021 //pv("gen_opl B",0,4);
2022 if (op == '*') {
2023 vpushv(vtop - 1);
2024 vpushv(vtop - 1);
2025 gen_op(TOK_UMULL);
2026 lexpand();
2027 /* stack: H1 H2 L1 L2 ML MH */
2028 for(i=0;i<4;i++)
2029 vrotb(6);
2030 /* stack: ML MH H1 H2 L1 L2 */
2031 tmp = vtop[0];
2032 vtop[0] = vtop[-2];
2033 vtop[-2] = tmp;
2034 /* stack: ML MH H1 L2 H2 L1 */
2035 gen_op('*');
2036 vrotb(3);
2037 vrotb(3);
2038 gen_op('*');
2039 /* stack: ML MH M1 M2 */
2040 gen_op('+');
2041 gen_op('+');
2042 } else if (op == '+' || op == '-') {
2043 /* XXX: add non carry method too (for MIPS or alpha) */
2044 if (op == '+')
2045 op1 = TOK_ADDC1;
2046 else
2047 op1 = TOK_SUBC1;
2048 gen_op(op1);
2049 /* stack: H1 H2 (L1 op L2) */
2050 vrotb(3);
2051 vrotb(3);
2052 gen_op(op1 + 1); /* TOK_xxxC2 */
2053 } else {
2054 gen_op(op);
2055 /* stack: H1 H2 (L1 op L2) */
2056 vrotb(3);
2057 vrotb(3);
2058 /* stack: (L1 op L2) H1 H2 */
2059 gen_op(op);
2060 /* stack: (L1 op L2) (H1 op H2) */
2062 /* stack: L H */
2063 lbuild(t);
2064 break;
2065 case TOK_SAR:
2066 case TOK_SHR:
2067 case TOK_SHL:
2068 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2069 t = vtop[-1].type.t;
2070 vswap();
2071 lexpand();
2072 vrotb(3);
2073 /* stack: L H shift */
2074 c = (int)vtop->c.i;
2075 /* constant: simpler */
2076 /* NOTE: all comments are for SHL. the other cases are
2077 done by swapping words */
2078 vpop();
2079 if (op != TOK_SHL)
2080 vswap();
2081 if (c >= 32) {
2082 /* stack: L H */
2083 vpop();
2084 if (c > 32) {
2085 vpushi(c - 32);
2086 gen_op(op);
2088 if (op != TOK_SAR) {
2089 vpushi(0);
2090 } else {
2091 gv_dup();
2092 vpushi(31);
2093 gen_op(TOK_SAR);
2095 vswap();
2096 } else {
2097 vswap();
2098 gv_dup();
2099 /* stack: H L L */
2100 vpushi(c);
2101 gen_op(op);
2102 vswap();
2103 vpushi(32 - c);
2104 if (op == TOK_SHL)
2105 gen_op(TOK_SHR);
2106 else
2107 gen_op(TOK_SHL);
2108 vrotb(3);
2109 /* stack: L L H */
2110 vpushi(c);
2111 if (op == TOK_SHL)
2112 gen_op(TOK_SHL);
2113 else
2114 gen_op(TOK_SHR);
2115 gen_op('|');
2117 if (op != TOK_SHL)
2118 vswap();
2119 lbuild(t);
2120 } else {
2121 /* XXX: should provide a faster fallback on x86 ? */
2122 switch(op) {
2123 case TOK_SAR:
2124 func = TOK___ashrdi3;
2125 goto gen_func;
2126 case TOK_SHR:
2127 func = TOK___lshrdi3;
2128 goto gen_func;
2129 case TOK_SHL:
2130 func = TOK___ashldi3;
2131 goto gen_func;
2134 break;
2135 default:
2136 /* compare operations */
2137 t = vtop->type.t;
2138 vswap();
2139 lexpand();
2140 vrotb(3);
2141 lexpand();
2142 /* stack: L1 H1 L2 H2 */
2143 tmp = vtop[-1];
2144 vtop[-1] = vtop[-2];
2145 vtop[-2] = tmp;
2146 /* stack: L1 L2 H1 H2 */
2147 save_regs(4);
2148 /* compare high */
2149 op1 = op;
2150 /* when values are equal, we need to compare low words. since
2151 the jump is inverted, we invert the test too. */
2152 if (op1 == TOK_LT)
2153 op1 = TOK_LE;
2154 else if (op1 == TOK_GT)
2155 op1 = TOK_GE;
2156 else if (op1 == TOK_ULT)
2157 op1 = TOK_ULE;
2158 else if (op1 == TOK_UGT)
2159 op1 = TOK_UGE;
2160 a = 0;
2161 b = 0;
2162 gen_op(op1);
2163 if (op == TOK_NE) {
2164 b = gvtst(0, 0);
2165 } else {
2166 a = gvtst(1, 0);
2167 if (op != TOK_EQ) {
2168 /* generate non equal test */
2169 vpushi(0);
2170 vset_VT_CMP(TOK_NE);
2171 b = gvtst(0, 0);
2174 /* compare low. Always unsigned */
2175 op1 = op;
2176 if (op1 == TOK_LT)
2177 op1 = TOK_ULT;
2178 else if (op1 == TOK_LE)
2179 op1 = TOK_ULE;
2180 else if (op1 == TOK_GT)
2181 op1 = TOK_UGT;
2182 else if (op1 == TOK_GE)
2183 op1 = TOK_UGE;
2184 gen_op(op1);
2185 #if 0//def TCC_TARGET_I386
2186 if (op == TOK_NE) { gsym(b); break; }
2187 if (op == TOK_EQ) { gsym(a); break; }
2188 #endif
2189 gvtst_set(1, a);
2190 gvtst_set(0, b);
2191 break;
2194 #endif
2196 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2198 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2199 return (a ^ b) >> 63 ? -x : x;
2202 static int gen_opic_lt(uint64_t a, uint64_t b)
2204 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2207 /* handle integer constant optimizations and various machine
2208 independent opt */
2209 static void gen_opic(int op)
2211 SValue *v1 = vtop - 1;
2212 SValue *v2 = vtop;
2213 int t1 = v1->type.t & VT_BTYPE;
2214 int t2 = v2->type.t & VT_BTYPE;
2215 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2216 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2217 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2218 uint64_t l1 = c1 ? v1->c.i : 0;
2219 uint64_t l2 = c2 ? v2->c.i : 0;
2220 int shm = (t1 == VT_LLONG) ? 63 : 31;
2222 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2223 l1 = ((uint32_t)l1 |
2224 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2225 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2226 l2 = ((uint32_t)l2 |
2227 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2229 if (c1 && c2) {
2230 switch(op) {
2231 case '+': l1 += l2; break;
2232 case '-': l1 -= l2; break;
2233 case '&': l1 &= l2; break;
2234 case '^': l1 ^= l2; break;
2235 case '|': l1 |= l2; break;
2236 case '*': l1 *= l2; break;
2238 case TOK_PDIV:
2239 case '/':
2240 case '%':
2241 case TOK_UDIV:
2242 case TOK_UMOD:
2243 /* if division by zero, generate explicit division */
2244 if (l2 == 0) {
2245 if (const_wanted && !(nocode_wanted & unevalmask))
2246 tcc_error("division by zero in constant");
2247 goto general_case;
2249 switch(op) {
2250 default: l1 = gen_opic_sdiv(l1, l2); break;
2251 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2252 case TOK_UDIV: l1 = l1 / l2; break;
2253 case TOK_UMOD: l1 = l1 % l2; break;
2255 break;
2256 case TOK_SHL: l1 <<= (l2 & shm); break;
2257 case TOK_SHR: l1 >>= (l2 & shm); break;
2258 case TOK_SAR:
2259 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2260 break;
2261 /* tests */
2262 case TOK_ULT: l1 = l1 < l2; break;
2263 case TOK_UGE: l1 = l1 >= l2; break;
2264 case TOK_EQ: l1 = l1 == l2; break;
2265 case TOK_NE: l1 = l1 != l2; break;
2266 case TOK_ULE: l1 = l1 <= l2; break;
2267 case TOK_UGT: l1 = l1 > l2; break;
2268 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2269 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2270 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2271 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2272 /* logical */
2273 case TOK_LAND: l1 = l1 && l2; break;
2274 case TOK_LOR: l1 = l1 || l2; break;
2275 default:
2276 goto general_case;
2278 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2279 l1 = ((uint32_t)l1 |
2280 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2281 v1->c.i = l1;
2282 vtop--;
2283 } else {
2284 nonconst = VT_NONCONST;
2285 /* if commutative ops, put c2 as constant */
2286 if (c1 && (op == '+' || op == '&' || op == '^' ||
2287 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2288 vswap();
2289 c2 = c1; //c = c1, c1 = c2, c2 = c;
2290 l2 = l1; //l = l1, l1 = l2, l2 = l;
2292 if (!const_wanted &&
2293 c1 && ((l1 == 0 &&
2294 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2295 (l1 == -1 && op == TOK_SAR))) {
2296 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2297 vtop--;
2298 } else if (!const_wanted &&
2299 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2300 (op == '|' &&
2301 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2302 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2303 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2304 if (l2 == 1)
2305 vtop->c.i = 0;
2306 vswap();
2307 vtop--;
2308 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2309 op == TOK_PDIV) &&
2310 l2 == 1) ||
2311 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2312 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2313 l2 == 0) ||
2314 (op == '&' &&
2315 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2316 /* filter out NOP operations like x*1, x-0, x&-1... */
2317 vtop--;
2318 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2319 /* try to use shifts instead of muls or divs */
2320 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2321 int n = -1;
2322 while (l2) {
2323 l2 >>= 1;
2324 n++;
2326 vtop->c.i = n;
2327 if (op == '*')
2328 op = TOK_SHL;
2329 else if (op == TOK_PDIV)
2330 op = TOK_SAR;
2331 else
2332 op = TOK_SHR;
2334 goto general_case;
2335 } else if (c2 && (op == '+' || op == '-') &&
2336 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2337 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2338 /* symbol + constant case */
2339 if (op == '-')
2340 l2 = -l2;
2341 l2 += vtop[-1].c.i;
2342 /* The backends can't always deal with addends to symbols
2343 larger than +-1<<31. Don't construct such. */
2344 if ((int)l2 != l2)
2345 goto general_case;
2346 vtop--;
2347 vtop->c.i = l2;
2348 } else {
2349 general_case:
2350 /* call low level op generator */
2351 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2352 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2353 gen_opl(op);
2354 else
2355 gen_opi(op);
2358 if (vtop->r == VT_CONST)
2359 vtop->r |= nonconst;
2362 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2363 # define gen_negf gen_opf
2364 #elif defined TCC_TARGET_ARM
2365 void gen_negf(int op)
2367 /* arm will detect 0-x and replace by vneg */
2368 vpushi(0), vswap(), gen_op('-');
2370 #else
2371 /* XXX: implement in gen_opf() for other backends too */
2372 void gen_negf(int op)
2374 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2375 subtract(-0, x), but with them it's really a sign flip
2376 operation. We implement this with bit manipulation and have
2377 to do some type reinterpretation for this, which TCC can do
2378 only via memory. */
2380 int align, size, bt;
2382 size = type_size(&vtop->type, &align);
2383 bt = vtop->type.t & VT_BTYPE;
2384 save_reg(gv(RC_TYPE(bt)));
2385 vdup();
2386 incr_bf_adr(size - 1);
2387 vdup();
2388 vpushi(0x80); /* flip sign */
2389 gen_op('^');
2390 vstore();
2391 vpop();
2393 #endif
2395 /* generate a floating point operation with constant propagation */
2396 static void gen_opif(int op)
2398 int c1, c2;
2399 SValue *v1, *v2;
2400 #if defined _MSC_VER && defined __x86_64__
2401 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2402 volatile
2403 #endif
2404 long double f1, f2;
2406 v1 = vtop - 1;
2407 v2 = vtop;
2408 if (op == TOK_NEG)
2409 v1 = v2;
2411 /* currently, we cannot do computations with forward symbols */
2412 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2413 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2414 if (c1 && c2) {
2415 if (v1->type.t == VT_FLOAT) {
2416 f1 = v1->c.f;
2417 f2 = v2->c.f;
2418 } else if (v1->type.t == VT_DOUBLE) {
2419 f1 = v1->c.d;
2420 f2 = v2->c.d;
2421 } else {
2422 f1 = v1->c.ld;
2423 f2 = v2->c.ld;
2425 /* NOTE: we only do constant propagation if finite number (not
2426 NaN or infinity) (ANSI spec) */
2427 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2428 goto general_case;
2429 switch(op) {
2430 case '+': f1 += f2; break;
2431 case '-': f1 -= f2; break;
2432 case '*': f1 *= f2; break;
2433 case '/':
2434 if (f2 == 0.0) {
2435 union { float f; unsigned u; } x1, x2, y;
2436 /* If not in initializer we need to potentially generate
2437 FP exceptions at runtime, otherwise we want to fold. */
2438 if (!const_wanted)
2439 goto general_case;
2440 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2441 when used to compile the f1 /= f2 below, would be -nan */
2442 x1.f = f1, x2.f = f2;
2443 if (f1 == 0.0)
2444 y.u = 0x7fc00000; /* nan */
2445 else
2446 y.u = 0x7f800000; /* infinity */
2447 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2448 f1 = y.f;
2449 break;
2451 f1 /= f2;
2452 break;
2453 case TOK_NEG:
2454 f1 = -f1;
2455 goto unary_result;
2456 /* XXX: also handles tests ? */
2457 default:
2458 goto general_case;
2460 vtop--;
2461 unary_result:
2462 /* XXX: overflow test ? */
2463 if (v1->type.t == VT_FLOAT) {
2464 v1->c.f = f1;
2465 } else if (v1->type.t == VT_DOUBLE) {
2466 v1->c.d = f1;
2467 } else {
2468 v1->c.ld = f1;
2470 } else {
2471 general_case:
2472 if (op == TOK_NEG) {
2473 gen_negf(op);
2474 } else {
2475 gen_opf(op);
2480 /* print a type. If 'varstr' is not NULL, then the variable is also
2481 printed in the type */
2482 /* XXX: union */
2483 /* XXX: add array and function pointers */
2484 static void type_to_str(char *buf, int buf_size,
2485 CType *type, const char *varstr)
2487 int bt, v, t;
2488 Sym *s, *sa;
2489 char buf1[256];
2490 const char *tstr;
2492 t = type->t;
2493 bt = t & VT_BTYPE;
2494 buf[0] = '\0';
2496 if (t & VT_EXTERN)
2497 pstrcat(buf, buf_size, "extern ");
2498 if (t & VT_STATIC)
2499 pstrcat(buf, buf_size, "static ");
2500 if (t & VT_TYPEDEF)
2501 pstrcat(buf, buf_size, "typedef ");
2502 if (t & VT_INLINE)
2503 pstrcat(buf, buf_size, "inline ");
2504 if (bt != VT_PTR) {
2505 if (t & VT_VOLATILE)
2506 pstrcat(buf, buf_size, "volatile ");
2507 if (t & VT_CONSTANT)
2508 pstrcat(buf, buf_size, "const ");
2510 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2511 || ((t & VT_UNSIGNED)
2512 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2513 && !IS_ENUM(t)
2515 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2517 buf_size -= strlen(buf);
2518 buf += strlen(buf);
2520 switch(bt) {
2521 case VT_VOID:
2522 tstr = "void";
2523 goto add_tstr;
2524 case VT_BOOL:
2525 tstr = "_Bool";
2526 goto add_tstr;
2527 case VT_BYTE:
2528 tstr = "char";
2529 goto add_tstr;
2530 case VT_SHORT:
2531 tstr = "short";
2532 goto add_tstr;
2533 case VT_INT:
2534 tstr = "int";
2535 goto maybe_long;
2536 case VT_LLONG:
2537 tstr = "long long";
2538 maybe_long:
2539 if (t & VT_LONG)
2540 tstr = "long";
2541 if (!IS_ENUM(t))
2542 goto add_tstr;
2543 tstr = "enum ";
2544 goto tstruct;
2545 case VT_FLOAT:
2546 tstr = "float";
2547 goto add_tstr;
2548 case VT_DOUBLE:
2549 tstr = "double";
2550 if (!(t & VT_LONG))
2551 goto add_tstr;
2552 case VT_LDOUBLE:
2553 tstr = "long double";
2554 add_tstr:
2555 pstrcat(buf, buf_size, tstr);
2556 break;
2557 case VT_STRUCT:
2558 tstr = "struct ";
2559 if (IS_UNION(t))
2560 tstr = "union ";
2561 tstruct:
2562 pstrcat(buf, buf_size, tstr);
2563 v = type->ref->v & ~SYM_STRUCT;
2564 if (v >= SYM_FIRST_ANOM)
2565 pstrcat(buf, buf_size, "<anonymous>");
2566 else
2567 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2568 break;
2569 case VT_FUNC:
2570 s = type->ref;
2571 buf1[0]=0;
2572 if (varstr && '*' == *varstr) {
2573 pstrcat(buf1, sizeof(buf1), "(");
2574 pstrcat(buf1, sizeof(buf1), varstr);
2575 pstrcat(buf1, sizeof(buf1), ")");
2577 pstrcat(buf1, buf_size, "(");
2578 sa = s->next;
2579 while (sa != NULL) {
2580 char buf2[256];
2581 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2582 pstrcat(buf1, sizeof(buf1), buf2);
2583 sa = sa->next;
2584 if (sa)
2585 pstrcat(buf1, sizeof(buf1), ", ");
2587 if (s->f.func_type == FUNC_ELLIPSIS)
2588 pstrcat(buf1, sizeof(buf1), ", ...");
2589 pstrcat(buf1, sizeof(buf1), ")");
2590 type_to_str(buf, buf_size, &s->type, buf1);
2591 goto no_var;
2592 case VT_PTR:
2593 s = type->ref;
2594 if (t & (VT_ARRAY|VT_VLA)) {
2595 if (varstr && '*' == *varstr)
2596 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2597 else
2598 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2599 type_to_str(buf, buf_size, &s->type, buf1);
2600 goto no_var;
2602 pstrcpy(buf1, sizeof(buf1), "*");
2603 if (t & VT_CONSTANT)
2604 pstrcat(buf1, buf_size, "const ");
2605 if (t & VT_VOLATILE)
2606 pstrcat(buf1, buf_size, "volatile ");
2607 if (varstr)
2608 pstrcat(buf1, sizeof(buf1), varstr);
2609 type_to_str(buf, buf_size, &s->type, buf1);
2610 goto no_var;
2612 if (varstr) {
2613 pstrcat(buf, buf_size, " ");
2614 pstrcat(buf, buf_size, varstr);
2616 no_var: ;
2619 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2621 char buf1[256], buf2[256];
2622 type_to_str(buf1, sizeof(buf1), st, NULL);
2623 type_to_str(buf2, sizeof(buf2), dt, NULL);
2624 tcc_error(fmt, buf1, buf2);
2627 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2629 char buf1[256], buf2[256];
2630 type_to_str(buf1, sizeof(buf1), st, NULL);
2631 type_to_str(buf2, sizeof(buf2), dt, NULL);
2632 tcc_warning(fmt, buf1, buf2);
2635 static int pointed_size(CType *type)
2637 int align;
2638 return type_size(pointed_type(type), &align);
2641 static inline int is_null_pointer(SValue *p)
2643 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2644 return 0;
2645 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2646 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2647 ((p->type.t & VT_BTYPE) == VT_PTR &&
2648 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2649 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2650 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2654 /* compare function types. OLD functions match any new functions */
2655 static int is_compatible_func(CType *type1, CType *type2)
2657 Sym *s1, *s2;
2659 s1 = type1->ref;
2660 s2 = type2->ref;
2661 if (s1->f.func_call != s2->f.func_call)
2662 return 0;
2663 if (s1->f.func_type != s2->f.func_type
2664 && s1->f.func_type != FUNC_OLD
2665 && s2->f.func_type != FUNC_OLD)
2666 return 0;
2667 for (;;) {
2668 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2669 return 0;
2670 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2671 return 1;
2672 s1 = s1->next;
2673 s2 = s2->next;
2674 if (!s1)
2675 return !s2;
2676 if (!s2)
2677 return 0;
2681 /* return true if type1 and type2 are the same. If unqualified is
2682 true, qualifiers on the types are ignored.
2684 static int compare_types(CType *type1, CType *type2, int unqualified)
2686 int bt1, t1, t2;
2688 t1 = type1->t & VT_TYPE;
2689 t2 = type2->t & VT_TYPE;
2690 if (unqualified) {
2691 /* strip qualifiers before comparing */
2692 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2693 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2696 /* Default Vs explicit signedness only matters for char */
2697 if ((t1 & VT_BTYPE) != VT_BYTE) {
2698 t1 &= ~VT_DEFSIGN;
2699 t2 &= ~VT_DEFSIGN;
2701 /* XXX: bitfields ? */
2702 if (t1 != t2)
2703 return 0;
2705 if ((t1 & VT_ARRAY)
2706 && !(type1->ref->c < 0
2707 || type2->ref->c < 0
2708 || type1->ref->c == type2->ref->c))
2709 return 0;
2711 /* test more complicated cases */
2712 bt1 = t1 & VT_BTYPE;
2713 if (bt1 == VT_PTR) {
2714 type1 = pointed_type(type1);
2715 type2 = pointed_type(type2);
2716 return is_compatible_types(type1, type2);
2717 } else if (bt1 == VT_STRUCT) {
2718 return (type1->ref == type2->ref);
2719 } else if (bt1 == VT_FUNC) {
2720 return is_compatible_func(type1, type2);
2721 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2722 /* If both are enums then they must be the same, if only one is then
2723 t1 and t2 must be equal, which was checked above already. */
2724 return type1->ref == type2->ref;
2725 } else {
2726 return 1;
2730 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2731 type is stored in DEST if non-null (except for pointer plus/minus) . */
2732 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2734 CType *type1 = &op1->type, *type2 = &op2->type, type;
2735 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2736 int ret = 1;
2738 type.t = VT_VOID;
2739 type.ref = NULL;
2741 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2742 ret = op == '?' ? 1 : 0;
2743 /* NOTE: as an extension, we accept void on only one side */
2744 type.t = VT_VOID;
2745 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2746 if (op == '+') ; /* Handled in caller */
2747 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2748 /* If one is a null ptr constant the result type is the other. */
2749 else if (is_null_pointer (op2)) type = *type1;
2750 else if (is_null_pointer (op1)) type = *type2;
2751 else if (bt1 != bt2) {
2752 /* accept comparison or cond-expr between pointer and integer
2753 with a warning */
2754 if ((op == '?' || TOK_ISCOND(op))
2755 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2756 tcc_warning("pointer/integer mismatch in %s",
2757 op == '?' ? "conditional expression" : "comparison");
2758 else if (op != '-' || !is_integer_btype(bt2))
2759 ret = 0;
2760 type = *(bt1 == VT_PTR ? type1 : type2);
2761 } else {
2762 CType *pt1 = pointed_type(type1);
2763 CType *pt2 = pointed_type(type2);
2764 int pbt1 = pt1->t & VT_BTYPE;
2765 int pbt2 = pt2->t & VT_BTYPE;
2766 int newquals, copied = 0;
2767 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2768 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2769 if (op != '?' && !TOK_ISCOND(op))
2770 ret = 0;
2771 else
2772 type_incompatibility_warning(type1, type2,
2773 op == '?'
2774 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2775 : "pointer type mismatch in comparison('%s' and '%s')");
2777 if (op == '?') {
2778 /* pointers to void get preferred, otherwise the
2779 pointed to types minus qualifs should be compatible */
2780 type = *((pbt1 == VT_VOID) ? type1 : type2);
2781 /* combine qualifs */
2782 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2783 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2784 & newquals)
2786 /* copy the pointer target symbol */
2787 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2788 0, type.ref->c);
2789 copied = 1;
2790 pointed_type(&type)->t |= newquals;
2792 /* pointers to incomplete arrays get converted to
2793 pointers to completed ones if possible */
2794 if (pt1->t & VT_ARRAY
2795 && pt2->t & VT_ARRAY
2796 && pointed_type(&type)->ref->c < 0
2797 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2799 if (!copied)
2800 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2801 0, type.ref->c);
2802 pointed_type(&type)->ref =
2803 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2804 0, pointed_type(&type)->ref->c);
2805 pointed_type(&type)->ref->c =
2806 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2810 if (TOK_ISCOND(op))
2811 type.t = VT_SIZE_T;
2812 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2813 if (op != '?' || !compare_types(type1, type2, 1))
2814 ret = 0;
2815 type = *type1;
2816 } else if (is_float(bt1) || is_float(bt2)) {
2817 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2818 type.t = VT_LDOUBLE;
2819 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2820 type.t = VT_DOUBLE;
2821 } else {
2822 type.t = VT_FLOAT;
2824 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2825 /* cast to biggest op */
2826 type.t = VT_LLONG | VT_LONG;
2827 if (bt1 == VT_LLONG)
2828 type.t &= t1;
2829 if (bt2 == VT_LLONG)
2830 type.t &= t2;
2831 /* convert to unsigned if it does not fit in a long long */
2832 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2833 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2834 type.t |= VT_UNSIGNED;
2835 } else {
2836 /* integer operations */
2837 type.t = VT_INT | (VT_LONG & (t1 | t2));
2838 /* convert to unsigned if it does not fit in an integer */
2839 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2840 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2841 type.t |= VT_UNSIGNED;
2843 if (dest)
2844 *dest = type;
2845 return ret;
2848 /* generic gen_op: handles types problems */
2849 ST_FUNC void gen_op(int op)
2851 int t1, t2, bt1, bt2, t;
2852 CType type1, combtype;
2854 redo:
2855 t1 = vtop[-1].type.t;
2856 t2 = vtop[0].type.t;
2857 bt1 = t1 & VT_BTYPE;
2858 bt2 = t2 & VT_BTYPE;
2860 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2861 if (bt2 == VT_FUNC) {
2862 mk_pointer(&vtop->type);
2863 gaddrof();
2865 if (bt1 == VT_FUNC) {
2866 vswap();
2867 mk_pointer(&vtop->type);
2868 gaddrof();
2869 vswap();
2871 goto redo;
2872 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2873 tcc_error_noabort("invalid operand types for binary operation");
2874 vpop();
2875 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2876 /* at least one operand is a pointer */
2877 /* relational op: must be both pointers */
2878 int align;
2879 if (TOK_ISCOND(op))
2880 goto std_op;
2881 /* if both pointers, then it must be the '-' op */
2882 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2883 if (op != '-')
2884 tcc_error("cannot use pointers here");
2885 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2886 vrott(3);
2887 gen_opic(op);
2888 vtop->type.t = VT_PTRDIFF_T;
2889 vswap();
2890 gen_op(TOK_PDIV);
2891 } else {
2892 /* exactly one pointer : must be '+' or '-'. */
2893 if (op != '-' && op != '+')
2894 tcc_error("cannot use pointers here");
2895 /* Put pointer as first operand */
2896 if (bt2 == VT_PTR) {
2897 vswap();
2898 t = t1, t1 = t2, t2 = t;
2900 #if PTR_SIZE == 4
2901 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2902 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2903 gen_cast_s(VT_INT);
2904 #endif
2905 type1 = vtop[-1].type;
2906 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2907 gen_op('*');
2908 #ifdef CONFIG_TCC_BCHECK
2909 if (tcc_state->do_bounds_check && !const_wanted) {
2910 /* if bounded pointers, we generate a special code to
2911 test bounds */
2912 if (op == '-') {
2913 vpushi(0);
2914 vswap();
2915 gen_op('-');
2917 gen_bounded_ptr_add();
2918 } else
2919 #endif
2921 gen_opic(op);
2923 type1.t &= ~(VT_ARRAY|VT_VLA);
2924 /* put again type if gen_opic() swaped operands */
2925 vtop->type = type1;
2927 } else {
2928 /* floats can only be used for a few operations */
2929 if (is_float(combtype.t)
2930 && op != '+' && op != '-' && op != '*' && op != '/'
2931 && !TOK_ISCOND(op))
2932 tcc_error("invalid operands for binary operation");
2933 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2934 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2935 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2936 t |= VT_UNSIGNED;
2937 t |= (VT_LONG & t1);
2938 combtype.t = t;
2940 std_op:
2941 t = t2 = combtype.t;
2942 /* XXX: currently, some unsigned operations are explicit, so
2943 we modify them here */
2944 if (t & VT_UNSIGNED) {
2945 if (op == TOK_SAR)
2946 op = TOK_SHR;
2947 else if (op == '/')
2948 op = TOK_UDIV;
2949 else if (op == '%')
2950 op = TOK_UMOD;
2951 else if (op == TOK_LT)
2952 op = TOK_ULT;
2953 else if (op == TOK_GT)
2954 op = TOK_UGT;
2955 else if (op == TOK_LE)
2956 op = TOK_ULE;
2957 else if (op == TOK_GE)
2958 op = TOK_UGE;
2960 vswap();
2961 gen_cast_s(t);
2962 vswap();
2963 /* special case for shifts and long long: we keep the shift as
2964 an integer */
2965 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2966 t2 = VT_INT;
2967 gen_cast_s(t2);
2968 if (is_float(t))
2969 gen_opif(op);
2970 else
2971 gen_opic(op);
2972 if (TOK_ISCOND(op)) {
2973 /* relational op: the result is an int */
2974 vtop->type.t = VT_INT;
2975 } else {
2976 vtop->type.t = t;
2979 // Make sure that we have converted to an rvalue:
2980 if (vtop->r & VT_LVAL)
2981 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2984 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2985 #define gen_cvt_itof1 gen_cvt_itof
2986 #else
2987 /* generic itof for unsigned long long case */
2988 static void gen_cvt_itof1(int t)
2990 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2991 (VT_LLONG | VT_UNSIGNED)) {
2993 if (t == VT_FLOAT)
2994 vpush_helper_func(TOK___floatundisf);
2995 #if LDOUBLE_SIZE != 8
2996 else if (t == VT_LDOUBLE)
2997 vpush_helper_func(TOK___floatundixf);
2998 #endif
2999 else
3000 vpush_helper_func(TOK___floatundidf);
3001 vrott(2);
3002 gfunc_call(1);
3003 vpushi(0);
3004 PUT_R_RET(vtop, t);
3005 } else {
3006 gen_cvt_itof(t);
3009 #endif
3011 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3012 #define gen_cvt_ftoi1 gen_cvt_ftoi
3013 #else
3014 /* generic ftoi for unsigned long long case */
3015 static void gen_cvt_ftoi1(int t)
3017 int st;
3018 if (t == (VT_LLONG | VT_UNSIGNED)) {
3019 /* not handled natively */
3020 st = vtop->type.t & VT_BTYPE;
3021 if (st == VT_FLOAT)
3022 vpush_helper_func(TOK___fixunssfdi);
3023 #if LDOUBLE_SIZE != 8
3024 else if (st == VT_LDOUBLE)
3025 vpush_helper_func(TOK___fixunsxfdi);
3026 #endif
3027 else
3028 vpush_helper_func(TOK___fixunsdfdi);
3029 vrott(2);
3030 gfunc_call(1);
3031 vpushi(0);
3032 PUT_R_RET(vtop, t);
3033 } else {
3034 gen_cvt_ftoi(t);
3037 #endif
3039 /* special delayed cast for char/short */
3040 static void force_charshort_cast(void)
3042 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3043 int dbt = vtop->type.t;
3044 vtop->r &= ~VT_MUSTCAST;
3045 vtop->type.t = sbt;
3046 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3047 vtop->type.t = dbt;
3050 static void gen_cast_s(int t)
3052 CType type;
3053 type.t = t;
3054 type.ref = NULL;
3055 gen_cast(&type);
3058 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3059 static void gen_cast(CType *type)
3061 int sbt, dbt, sf, df, c;
3062 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3064 /* special delayed cast for char/short */
3065 if (vtop->r & VT_MUSTCAST)
3066 force_charshort_cast();
3068 /* bitfields first get cast to ints */
3069 if (vtop->type.t & VT_BITFIELD)
3070 gv(RC_INT);
3072 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3073 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3074 if (sbt == VT_FUNC)
3075 sbt = VT_PTR;
3077 again:
3078 if (sbt != dbt) {
3079 sf = is_float(sbt);
3080 df = is_float(dbt);
3081 dbt_bt = dbt & VT_BTYPE;
3082 sbt_bt = sbt & VT_BTYPE;
3083 if (dbt_bt == VT_VOID)
3084 goto done;
3085 if (sbt_bt == VT_VOID) {
3086 error:
3087 cast_error(&vtop->type, type);
3090 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3091 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3092 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3093 #endif
3094 if (c) {
3095 /* constant case: we can do it now */
3096 /* XXX: in ISOC, cannot do it if error in convert */
3097 if (sbt == VT_FLOAT)
3098 vtop->c.ld = vtop->c.f;
3099 else if (sbt == VT_DOUBLE)
3100 vtop->c.ld = vtop->c.d;
3102 if (df) {
3103 if (sbt_bt == VT_LLONG) {
3104 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3105 vtop->c.ld = vtop->c.i;
3106 else
3107 vtop->c.ld = -(long double)-vtop->c.i;
3108 } else if(!sf) {
3109 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3110 vtop->c.ld = (uint32_t)vtop->c.i;
3111 else
3112 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3115 if (dbt == VT_FLOAT)
3116 vtop->c.f = (float)vtop->c.ld;
3117 else if (dbt == VT_DOUBLE)
3118 vtop->c.d = (double)vtop->c.ld;
3119 } else if (sf && dbt == VT_BOOL) {
3120 vtop->c.i = (vtop->c.ld != 0);
3121 } else {
3122 if(sf)
3123 vtop->c.i = vtop->c.ld;
3124 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3126 else if (sbt & VT_UNSIGNED)
3127 vtop->c.i = (uint32_t)vtop->c.i;
3128 else
3129 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3131 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3133 else if (dbt == VT_BOOL)
3134 vtop->c.i = (vtop->c.i != 0);
3135 else {
3136 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3137 dbt_bt == VT_SHORT ? 0xffff :
3138 0xffffffff;
3139 vtop->c.i &= m;
3140 if (!(dbt & VT_UNSIGNED))
3141 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3144 goto done;
3146 } else if (dbt == VT_BOOL
3147 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3148 == (VT_CONST | VT_SYM)) {
3149 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3150 vtop->r = VT_CONST;
3151 vtop->c.i = 1;
3152 goto done;
3155 /* cannot generate code for global or static initializers */
3156 if (nocode_wanted & DATA_ONLY_WANTED)
3157 goto done;
3159 /* non constant case: generate code */
3160 if (dbt == VT_BOOL) {
3161 gen_test_zero(TOK_NE);
3162 goto done;
3165 if (sf || df) {
3166 if (sf && df) {
3167 /* convert from fp to fp */
3168 gen_cvt_ftof(dbt);
3169 } else if (df) {
3170 /* convert int to fp */
3171 gen_cvt_itof1(dbt);
3172 } else {
3173 /* convert fp to int */
3174 sbt = dbt;
3175 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3176 sbt = VT_INT;
3177 gen_cvt_ftoi1(sbt);
3178 goto again; /* may need char/short cast */
3180 goto done;
3183 ds = btype_size(dbt_bt);
3184 ss = btype_size(sbt_bt);
3185 if (ds == 0 || ss == 0)
3186 goto error;
3188 if (IS_ENUM(type->t) && type->ref->c < 0)
3189 tcc_error("cast to incomplete type");
3191 /* same size and no sign conversion needed */
3192 if (ds == ss && ds >= 4)
3193 goto done;
3194 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3195 tcc_warning("cast between pointer and integer of different size");
3196 if (sbt_bt == VT_PTR) {
3197 /* put integer type to allow logical operations below */
3198 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3202 /* processor allows { int a = 0, b = *(char*)&a; }
3203 That means that if we cast to less width, we can just
3204 change the type and read it still later. */
3205 #define ALLOW_SUBTYPE_ACCESS 1
3207 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3208 /* value still in memory */
3209 if (ds <= ss)
3210 goto done;
3211 /* ss <= 4 here */
3212 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3213 gv(RC_INT);
3214 goto done; /* no 64bit envolved */
3217 gv(RC_INT);
3219 trunc = 0;
3220 #if PTR_SIZE == 4
3221 if (ds == 8) {
3222 /* generate high word */
3223 if (sbt & VT_UNSIGNED) {
3224 vpushi(0);
3225 gv(RC_INT);
3226 } else {
3227 gv_dup();
3228 vpushi(31);
3229 gen_op(TOK_SAR);
3231 lbuild(dbt);
3232 } else if (ss == 8) {
3233 /* from long long: just take low order word */
3234 lexpand();
3235 vpop();
3237 ss = 4;
3239 #elif PTR_SIZE == 8
3240 if (ds == 8) {
3241 /* need to convert from 32bit to 64bit */
3242 if (sbt & VT_UNSIGNED) {
3243 #if defined(TCC_TARGET_RISCV64)
3244 /* RISC-V keeps 32bit vals in registers sign-extended.
3245 So here we need a zero-extension. */
3246 trunc = 32;
3247 #else
3248 goto done;
3249 #endif
3250 } else {
3251 gen_cvt_sxtw();
3252 goto done;
3254 ss = ds, ds = 4, dbt = sbt;
3255 } else if (ss == 8) {
3256 /* RISC-V keeps 32bit vals in registers sign-extended.
3257 So here we need a sign-extension for signed types and
3258 zero-extension. for unsigned types. */
3259 #if !defined(TCC_TARGET_RISCV64)
3260 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3261 #endif
3262 } else {
3263 ss = 4;
3265 #endif
3267 if (ds >= ss)
3268 goto done;
3269 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3270 if (ss == 4) {
3271 gen_cvt_csti(dbt);
3272 goto done;
3274 #endif
3275 bits = (ss - ds) * 8;
3276 /* for unsigned, gen_op will convert SAR to SHR */
3277 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3278 vpushi(bits);
3279 gen_op(TOK_SHL);
3280 vpushi(bits - trunc);
3281 gen_op(TOK_SAR);
3282 vpushi(trunc);
3283 gen_op(TOK_SHR);
3285 done:
3286 vtop->type = *type;
3287 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3290 /* return type size as known at compile time. Put alignment at 'a' */
3291 ST_FUNC int type_size(CType *type, int *a)
3293 Sym *s;
3294 int bt;
3296 bt = type->t & VT_BTYPE;
3297 if (bt == VT_STRUCT) {
3298 /* struct/union */
3299 s = type->ref;
3300 *a = s->r;
3301 return s->c;
3302 } else if (bt == VT_PTR) {
3303 if (type->t & VT_ARRAY) {
3304 int ts;
3306 s = type->ref;
3307 ts = type_size(&s->type, a);
3309 if (ts < 0 && s->c < 0)
3310 ts = -ts;
3312 return ts * s->c;
3313 } else {
3314 *a = PTR_SIZE;
3315 return PTR_SIZE;
3317 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3318 *a = 0;
3319 return -1; /* incomplete enum */
3320 } else if (bt == VT_LDOUBLE) {
3321 *a = LDOUBLE_ALIGN;
3322 return LDOUBLE_SIZE;
3323 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3324 #ifdef TCC_TARGET_I386
3325 #ifdef TCC_TARGET_PE
3326 *a = 8;
3327 #else
3328 *a = 4;
3329 #endif
3330 #elif defined(TCC_TARGET_ARM)
3331 #ifdef TCC_ARM_EABI
3332 *a = 8;
3333 #else
3334 *a = 4;
3335 #endif
3336 #else
3337 *a = 8;
3338 #endif
3339 return 8;
3340 } else if (bt == VT_INT || bt == VT_FLOAT) {
3341 *a = 4;
3342 return 4;
3343 } else if (bt == VT_SHORT) {
3344 *a = 2;
3345 return 2;
3346 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3347 *a = 8;
3348 return 16;
3349 } else {
3350 /* char, void, function, _Bool */
3351 *a = 1;
3352 return 1;
3356 /* push type size as known at runtime time on top of value stack. Put
3357 alignment at 'a' */
3358 static void vpush_type_size(CType *type, int *a)
3360 if (type->t & VT_VLA) {
3361 type_size(&type->ref->type, a);
3362 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3363 } else {
3364 int size = type_size(type, a);
3365 if (size < 0)
3366 tcc_error("unknown type size");
3367 #if PTR_SIZE == 8
3368 vpushll(size);
3369 #else
3370 vpushi(size);
3371 #endif
3375 /* return the pointed type of t */
3376 static inline CType *pointed_type(CType *type)
3378 return &type->ref->type;
3381 /* modify type so that its it is a pointer to type. */
3382 ST_FUNC void mk_pointer(CType *type)
3384 Sym *s;
3385 s = sym_push(SYM_FIELD, type, 0, -1);
3386 type->t = VT_PTR | (type->t & VT_STORAGE);
3387 type->ref = s;
3390 /* return true if type1 and type2 are exactly the same (including
3391 qualifiers).
3393 static int is_compatible_types(CType *type1, CType *type2)
3395 return compare_types(type1,type2,0);
3398 /* return true if type1 and type2 are the same (ignoring qualifiers).
3400 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3402 return compare_types(type1,type2,1);
3405 static void cast_error(CType *st, CType *dt)
3407 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3410 /* verify type compatibility to store vtop in 'dt' type */
3411 static void verify_assign_cast(CType *dt)
3413 CType *st, *type1, *type2;
3414 int dbt, sbt, qualwarn, lvl;
3416 st = &vtop->type; /* source type */
3417 dbt = dt->t & VT_BTYPE;
3418 sbt = st->t & VT_BTYPE;
3419 if (dt->t & VT_CONSTANT)
3420 tcc_warning("assignment of read-only location");
3421 switch(dbt) {
3422 case VT_VOID:
3423 if (sbt != dbt)
3424 tcc_error("assignment to void expression");
3425 break;
3426 case VT_PTR:
3427 /* special cases for pointers */
3428 /* '0' can also be a pointer */
3429 if (is_null_pointer(vtop))
3430 break;
3431 /* accept implicit pointer to integer cast with warning */
3432 if (is_integer_btype(sbt)) {
3433 tcc_warning("assignment makes pointer from integer without a cast");
3434 break;
3436 type1 = pointed_type(dt);
3437 if (sbt == VT_PTR)
3438 type2 = pointed_type(st);
3439 else if (sbt == VT_FUNC)
3440 type2 = st; /* a function is implicitly a function pointer */
3441 else
3442 goto error;
3443 if (is_compatible_types(type1, type2))
3444 break;
3445 for (qualwarn = lvl = 0;; ++lvl) {
3446 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3447 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3448 qualwarn = 1;
3449 dbt = type1->t & (VT_BTYPE|VT_LONG);
3450 sbt = type2->t & (VT_BTYPE|VT_LONG);
3451 if (dbt != VT_PTR || sbt != VT_PTR)
3452 break;
3453 type1 = pointed_type(type1);
3454 type2 = pointed_type(type2);
3456 if (!is_compatible_unqualified_types(type1, type2)) {
3457 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3458 /* void * can match anything */
3459 } else if (dbt == sbt
3460 && is_integer_btype(sbt & VT_BTYPE)
3461 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3462 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3463 /* Like GCC don't warn by default for merely changes
3464 in pointer target signedness. Do warn for different
3465 base types, though, in particular for unsigned enums
3466 and signed int targets. */
3467 } else {
3468 tcc_warning("assignment from incompatible pointer type");
3469 break;
3472 if (qualwarn)
3473 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3474 break;
3475 case VT_BYTE:
3476 case VT_SHORT:
3477 case VT_INT:
3478 case VT_LLONG:
3479 if (sbt == VT_PTR || sbt == VT_FUNC) {
3480 tcc_warning("assignment makes integer from pointer without a cast");
3481 } else if (sbt == VT_STRUCT) {
3482 goto case_VT_STRUCT;
3484 /* XXX: more tests */
3485 break;
3486 case VT_STRUCT:
3487 case_VT_STRUCT:
3488 if (!is_compatible_unqualified_types(dt, st)) {
3489 error:
3490 cast_error(st, dt);
3492 break;
3496 static void gen_assign_cast(CType *dt)
3498 verify_assign_cast(dt);
3499 gen_cast(dt);
3502 /* store vtop in lvalue pushed on stack */
3503 ST_FUNC void vstore(void)
3505 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3507 ft = vtop[-1].type.t;
3508 sbt = vtop->type.t & VT_BTYPE;
3509 dbt = ft & VT_BTYPE;
3510 verify_assign_cast(&vtop[-1].type);
3512 if (sbt == VT_STRUCT) {
3513 /* if structure, only generate pointer */
3514 /* structure assignment : generate memcpy */
3515 size = type_size(&vtop->type, &align);
3516 /* destination, keep on stack() as result */
3517 vpushv(vtop - 1);
3518 #ifdef CONFIG_TCC_BCHECK
3519 if (vtop->r & VT_MUSTBOUND)
3520 gbound(); /* check would be wrong after gaddrof() */
3521 #endif
3522 vtop->type.t = VT_PTR;
3523 gaddrof();
3524 /* source */
3525 vswap();
3526 #ifdef CONFIG_TCC_BCHECK
3527 if (vtop->r & VT_MUSTBOUND)
3528 gbound();
3529 #endif
3530 vtop->type.t = VT_PTR;
3531 gaddrof();
3533 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3534 if (1
3535 #ifdef CONFIG_TCC_BCHECK
3536 && !tcc_state->do_bounds_check
3537 #endif
3539 gen_struct_copy(size);
3540 } else
3541 #endif
3543 /* type size */
3544 vpushi(size);
3545 /* Use memmove, rather than memcpy, as dest and src may be same: */
3546 #ifdef TCC_ARM_EABI
3547 if(!(align & 7))
3548 vpush_helper_func(TOK_memmove8);
3549 else if(!(align & 3))
3550 vpush_helper_func(TOK_memmove4);
3551 else
3552 #endif
3553 vpush_helper_func(TOK_memmove);
3554 vrott(4);
3555 gfunc_call(3);
3558 } else if (ft & VT_BITFIELD) {
3559 /* bitfield store handling */
3561 /* save lvalue as expression result (example: s.b = s.a = n;) */
3562 vdup(), vtop[-1] = vtop[-2];
3564 bit_pos = BIT_POS(ft);
3565 bit_size = BIT_SIZE(ft);
3566 /* remove bit field info to avoid loops */
3567 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3569 if (dbt == VT_BOOL) {
3570 gen_cast(&vtop[-1].type);
3571 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3573 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3574 if (dbt != VT_BOOL) {
3575 gen_cast(&vtop[-1].type);
3576 dbt = vtop[-1].type.t & VT_BTYPE;
3578 if (r == VT_STRUCT) {
3579 store_packed_bf(bit_pos, bit_size);
3580 } else {
3581 unsigned long long mask = (1ULL << bit_size) - 1;
3582 if (dbt != VT_BOOL) {
3583 /* mask source */
3584 if (dbt == VT_LLONG)
3585 vpushll(mask);
3586 else
3587 vpushi((unsigned)mask);
3588 gen_op('&');
3590 /* shift source */
3591 vpushi(bit_pos);
3592 gen_op(TOK_SHL);
3593 vswap();
3594 /* duplicate destination */
3595 vdup();
3596 vrott(3);
3597 /* load destination, mask and or with source */
3598 if (dbt == VT_LLONG)
3599 vpushll(~(mask << bit_pos));
3600 else
3601 vpushi(~((unsigned)mask << bit_pos));
3602 gen_op('&');
3603 gen_op('|');
3604 /* store result */
3605 vstore();
3606 /* ... and discard */
3607 vpop();
3609 } else if (dbt == VT_VOID) {
3610 --vtop;
3611 } else {
3612 /* optimize char/short casts */
3613 delayed_cast = 0;
3614 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3615 && is_integer_btype(sbt)
3617 if ((vtop->r & VT_MUSTCAST)
3618 && btype_size(dbt) > btype_size(sbt)
3620 force_charshort_cast();
3621 delayed_cast = 1;
3622 } else {
3623 gen_cast(&vtop[-1].type);
3626 #ifdef CONFIG_TCC_BCHECK
3627 /* bound check case */
3628 if (vtop[-1].r & VT_MUSTBOUND) {
3629 vswap();
3630 gbound();
3631 vswap();
3633 #endif
3634 gv(RC_TYPE(dbt)); /* generate value */
3636 if (delayed_cast) {
3637 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3638 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3639 vtop->type.t = ft & VT_TYPE;
3642 /* if lvalue was saved on stack, must read it */
3643 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3644 SValue sv;
3645 r = get_reg(RC_INT);
3646 sv.type.t = VT_PTRDIFF_T;
3647 sv.r = VT_LOCAL | VT_LVAL;
3648 sv.c.i = vtop[-1].c.i;
3649 load(r, &sv);
3650 vtop[-1].r = r | VT_LVAL;
3653 r = vtop->r & VT_VALMASK;
3654 /* two word case handling :
3655 store second register at word + 4 (or +8 for x86-64) */
3656 if (USING_TWO_WORDS(dbt)) {
3657 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3658 vtop[-1].type.t = load_type;
3659 store(r, vtop - 1);
3660 vswap();
3661 /* convert to int to increment easily */
3662 vtop->type.t = VT_PTRDIFF_T;
3663 gaddrof();
3664 vpushs(PTR_SIZE);
3665 gen_op('+');
3666 vtop->r |= VT_LVAL;
3667 vswap();
3668 vtop[-1].type.t = load_type;
3669 /* XXX: it works because r2 is spilled last ! */
3670 store(vtop->r2, vtop - 1);
3671 } else {
3672 /* single word */
3673 store(r, vtop - 1);
3675 vswap();
3676 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3680 /* post defines POST/PRE add. c is the token ++ or -- */
3681 ST_FUNC void inc(int post, int c)
3683 test_lvalue();
3684 vdup(); /* save lvalue */
3685 if (post) {
3686 gv_dup(); /* duplicate value */
3687 vrotb(3);
3688 vrotb(3);
3690 /* add constant */
3691 vpushi(c - TOK_MID);
3692 gen_op('+');
3693 vstore(); /* store value */
3694 if (post)
3695 vpop(); /* if post op, return saved value */
3698 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3700 /* read the string */
3701 if (tok != TOK_STR)
3702 expect(msg);
3703 cstr_new(astr);
3704 while (tok == TOK_STR) {
3705 /* XXX: add \0 handling too ? */
3706 cstr_cat(astr, tokc.str.data, -1);
3707 next();
3709 cstr_ccat(astr, '\0');
3712 /* If I is >= 1 and a power of two, returns log2(i)+1.
3713 If I is 0 returns 0. */
3714 ST_FUNC int exact_log2p1(int i)
3716 int ret;
3717 if (!i)
3718 return 0;
3719 for (ret = 1; i >= 1 << 8; ret += 8)
3720 i >>= 8;
3721 if (i >= 1 << 4)
3722 ret += 4, i >>= 4;
3723 if (i >= 1 << 2)
3724 ret += 2, i >>= 2;
3725 if (i >= 1 << 1)
3726 ret++;
3727 return ret;
3730 /* Parse __attribute__((...)) GNUC extension. */
3731 static void parse_attribute(AttributeDef *ad)
3733 int t, n;
3734 CString astr;
3736 redo:
3737 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3738 return;
3739 next();
3740 skip('(');
3741 skip('(');
3742 while (tok != ')') {
3743 if (tok < TOK_IDENT)
3744 expect("attribute name");
3745 t = tok;
3746 next();
3747 switch(t) {
3748 case TOK_CLEANUP1:
3749 case TOK_CLEANUP2:
3751 Sym *s;
3753 skip('(');
3754 s = sym_find(tok);
3755 if (!s) {
3756 tcc_warning_c(warn_implicit_function_declaration)(
3757 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3758 s = external_global_sym(tok, &func_old_type);
3759 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3760 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3761 ad->cleanup_func = s;
3762 next();
3763 skip(')');
3764 break;
3766 case TOK_CONSTRUCTOR1:
3767 case TOK_CONSTRUCTOR2:
3768 ad->f.func_ctor = 1;
3769 break;
3770 case TOK_DESTRUCTOR1:
3771 case TOK_DESTRUCTOR2:
3772 ad->f.func_dtor = 1;
3773 break;
3774 case TOK_ALWAYS_INLINE1:
3775 case TOK_ALWAYS_INLINE2:
3776 ad->f.func_alwinl = 1;
3777 break;
3778 case TOK_SECTION1:
3779 case TOK_SECTION2:
3780 skip('(');
3781 parse_mult_str(&astr, "section name");
3782 ad->section = find_section(tcc_state, (char *)astr.data);
3783 skip(')');
3784 cstr_free(&astr);
3785 break;
3786 case TOK_ALIAS1:
3787 case TOK_ALIAS2:
3788 skip('(');
3789 parse_mult_str(&astr, "alias(\"target\")");
3790 ad->alias_target = /* save string as token, for later */
3791 tok_alloc((char*)astr.data, astr.size-1)->tok;
3792 skip(')');
3793 cstr_free(&astr);
3794 break;
3795 case TOK_VISIBILITY1:
3796 case TOK_VISIBILITY2:
3797 skip('(');
3798 parse_mult_str(&astr,
3799 "visibility(\"default|hidden|internal|protected\")");
3800 if (!strcmp (astr.data, "default"))
3801 ad->a.visibility = STV_DEFAULT;
3802 else if (!strcmp (astr.data, "hidden"))
3803 ad->a.visibility = STV_HIDDEN;
3804 else if (!strcmp (astr.data, "internal"))
3805 ad->a.visibility = STV_INTERNAL;
3806 else if (!strcmp (astr.data, "protected"))
3807 ad->a.visibility = STV_PROTECTED;
3808 else
3809 expect("visibility(\"default|hidden|internal|protected\")");
3810 skip(')');
3811 cstr_free(&astr);
3812 break;
3813 case TOK_ALIGNED1:
3814 case TOK_ALIGNED2:
3815 if (tok == '(') {
3816 next();
3817 n = expr_const();
3818 if (n <= 0 || (n & (n - 1)) != 0)
3819 tcc_error("alignment must be a positive power of two");
3820 skip(')');
3821 } else {
3822 n = MAX_ALIGN;
3824 ad->a.aligned = exact_log2p1(n);
3825 if (n != 1 << (ad->a.aligned - 1))
3826 tcc_error("alignment of %d is larger than implemented", n);
3827 break;
3828 case TOK_PACKED1:
3829 case TOK_PACKED2:
3830 ad->a.packed = 1;
3831 break;
3832 case TOK_WEAK1:
3833 case TOK_WEAK2:
3834 ad->a.weak = 1;
3835 break;
3836 case TOK_NODEBUG1:
3837 case TOK_NODEBUG2:
3838 ad->a.nodebug = 1;
3839 break;
3840 case TOK_UNUSED1:
3841 case TOK_UNUSED2:
3842 /* currently, no need to handle it because tcc does not
3843 track unused objects */
3844 break;
3845 case TOK_NORETURN1:
3846 case TOK_NORETURN2:
3847 ad->f.func_noreturn = 1;
3848 break;
3849 case TOK_CDECL1:
3850 case TOK_CDECL2:
3851 case TOK_CDECL3:
3852 ad->f.func_call = FUNC_CDECL;
3853 break;
3854 case TOK_STDCALL1:
3855 case TOK_STDCALL2:
3856 case TOK_STDCALL3:
3857 ad->f.func_call = FUNC_STDCALL;
3858 break;
3859 #ifdef TCC_TARGET_I386
3860 case TOK_REGPARM1:
3861 case TOK_REGPARM2:
3862 skip('(');
3863 n = expr_const();
3864 if (n > 3)
3865 n = 3;
3866 else if (n < 0)
3867 n = 0;
3868 if (n > 0)
3869 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3870 skip(')');
3871 break;
3872 case TOK_FASTCALL1:
3873 case TOK_FASTCALL2:
3874 case TOK_FASTCALL3:
3875 ad->f.func_call = FUNC_FASTCALLW;
3876 break;
3877 #endif
3878 case TOK_MODE:
3879 skip('(');
3880 switch(tok) {
3881 case TOK_MODE_DI:
3882 ad->attr_mode = VT_LLONG + 1;
3883 break;
3884 case TOK_MODE_QI:
3885 ad->attr_mode = VT_BYTE + 1;
3886 break;
3887 case TOK_MODE_HI:
3888 ad->attr_mode = VT_SHORT + 1;
3889 break;
3890 case TOK_MODE_SI:
3891 case TOK_MODE_word:
3892 ad->attr_mode = VT_INT + 1;
3893 break;
3894 default:
3895 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3896 break;
3898 next();
3899 skip(')');
3900 break;
3901 case TOK_DLLEXPORT:
3902 ad->a.dllexport = 1;
3903 break;
3904 case TOK_NODECORATE:
3905 ad->a.nodecorate = 1;
3906 break;
3907 case TOK_DLLIMPORT:
3908 ad->a.dllimport = 1;
3909 break;
3910 default:
3911 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3912 /* skip parameters */
3913 if (tok == '(') {
3914 int parenthesis = 0;
3915 do {
3916 if (tok == '(')
3917 parenthesis++;
3918 else if (tok == ')')
3919 parenthesis--;
3920 next();
3921 } while (parenthesis && tok != -1);
3923 break;
3925 if (tok != ',')
3926 break;
3927 next();
3929 skip(')');
3930 skip(')');
3931 goto redo;
3934 static Sym * find_field (CType *type, int v, int *cumofs)
3936 Sym *s = type->ref;
3937 int v1 = v | SYM_FIELD;
3939 while ((s = s->next) != NULL) {
3940 if (s->v == v1) {
3941 *cumofs += s->c;
3942 return s;
3944 if ((s->type.t & VT_BTYPE) == VT_STRUCT
3945 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
3946 /* try to find field in anonymous sub-struct/union */
3947 Sym *ret = find_field (&s->type, v1, cumofs);
3948 if (ret) {
3949 *cumofs += s->c;
3950 return ret;
3955 if (!(v & SYM_FIELD)) { /* top-level call */
3956 s = type->ref;
3957 if (s->c < 0)
3958 tcc_error("dereferencing incomplete type '%s'",
3959 get_tok_str(s->v & ~SYM_STRUCT, 0));
3960 else
3961 tcc_error("field not found: %s",
3962 get_tok_str(v, &tokc));
3964 return NULL;
3967 static void check_fields (CType *type, int check)
3969 Sym *s = type->ref;
3971 while ((s = s->next) != NULL) {
3972 int v = s->v & ~SYM_FIELD;
3973 if (v < SYM_FIRST_ANOM) {
3974 TokenSym *ts = table_ident[v - TOK_IDENT];
3975 if (check && (ts->tok & SYM_FIELD))
3976 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3977 ts->tok ^= SYM_FIELD;
3978 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3979 check_fields (&s->type, check);
3983 static void struct_layout(CType *type, AttributeDef *ad)
3985 int size, align, maxalign, offset, c, bit_pos, bit_size;
3986 int packed, a, bt, prevbt, prev_bit_size;
3987 int pcc = !tcc_state->ms_bitfields;
3988 int pragma_pack = *tcc_state->pack_stack_ptr;
3989 Sym *f;
3991 maxalign = 1;
3992 offset = 0;
3993 c = 0;
3994 bit_pos = 0;
3995 prevbt = VT_STRUCT; /* make it never match */
3996 prev_bit_size = 0;
3998 //#define BF_DEBUG
4000 for (f = type->ref->next; f; f = f->next) {
4001 if (f->type.t & VT_BITFIELD)
4002 bit_size = BIT_SIZE(f->type.t);
4003 else
4004 bit_size = -1;
4005 size = type_size(&f->type, &align);
4006 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4007 packed = 0;
4009 if (pcc && bit_size == 0) {
4010 /* in pcc mode, packing does not affect zero-width bitfields */
4012 } else {
4013 /* in pcc mode, attribute packed overrides if set. */
4014 if (pcc && (f->a.packed || ad->a.packed))
4015 align = packed = 1;
4017 /* pragma pack overrides align if lesser and packs bitfields always */
4018 if (pragma_pack) {
4019 packed = 1;
4020 if (pragma_pack < align)
4021 align = pragma_pack;
4022 /* in pcc mode pragma pack also overrides individual align */
4023 if (pcc && pragma_pack < a)
4024 a = 0;
4027 /* some individual align was specified */
4028 if (a)
4029 align = a;
4031 if (type->ref->type.t == VT_UNION) {
4032 if (pcc && bit_size >= 0)
4033 size = (bit_size + 7) >> 3;
4034 offset = 0;
4035 if (size > c)
4036 c = size;
4038 } else if (bit_size < 0) {
4039 if (pcc)
4040 c += (bit_pos + 7) >> 3;
4041 c = (c + align - 1) & -align;
4042 offset = c;
4043 if (size > 0)
4044 c += size;
4045 bit_pos = 0;
4046 prevbt = VT_STRUCT;
4047 prev_bit_size = 0;
4049 } else {
4050 /* A bit-field. Layout is more complicated. There are two
4051 options: PCC (GCC) compatible and MS compatible */
4052 if (pcc) {
4053 /* In PCC layout a bit-field is placed adjacent to the
4054 preceding bit-fields, except if:
4055 - it has zero-width
4056 - an individual alignment was given
4057 - it would overflow its base type container and
4058 there is no packing */
4059 if (bit_size == 0) {
4060 new_field:
4061 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4062 bit_pos = 0;
4063 } else if (f->a.aligned) {
4064 goto new_field;
4065 } else if (!packed) {
4066 int a8 = align * 8;
4067 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4068 if (ofs > size / align)
4069 goto new_field;
4072 /* in pcc mode, long long bitfields have type int if they fit */
4073 if (size == 8 && bit_size <= 32)
4074 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4076 while (bit_pos >= align * 8)
4077 c += align, bit_pos -= align * 8;
4078 offset = c;
4080 /* In PCC layout named bit-fields influence the alignment
4081 of the containing struct using the base types alignment,
4082 except for packed fields (which here have correct align). */
4083 if (f->v & SYM_FIRST_ANOM
4084 // && bit_size // ??? gcc on ARM/rpi does that
4086 align = 1;
4088 } else {
4089 bt = f->type.t & VT_BTYPE;
4090 if ((bit_pos + bit_size > size * 8)
4091 || (bit_size > 0) == (bt != prevbt)
4093 c = (c + align - 1) & -align;
4094 offset = c;
4095 bit_pos = 0;
4096 /* In MS bitfield mode a bit-field run always uses
4097 at least as many bits as the underlying type.
4098 To start a new run it's also required that this
4099 or the last bit-field had non-zero width. */
4100 if (bit_size || prev_bit_size)
4101 c += size;
4103 /* In MS layout the records alignment is normally
4104 influenced by the field, except for a zero-width
4105 field at the start of a run (but by further zero-width
4106 fields it is again). */
4107 if (bit_size == 0 && prevbt != bt)
4108 align = 1;
4109 prevbt = bt;
4110 prev_bit_size = bit_size;
4113 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4114 | (bit_pos << VT_STRUCT_SHIFT);
4115 bit_pos += bit_size;
4117 if (align > maxalign)
4118 maxalign = align;
4120 #ifdef BF_DEBUG
4121 printf("set field %s offset %-2d size %-2d align %-2d",
4122 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4123 if (f->type.t & VT_BITFIELD) {
4124 printf(" pos %-2d bits %-2d",
4125 BIT_POS(f->type.t),
4126 BIT_SIZE(f->type.t)
4129 printf("\n");
4130 #endif
4132 f->c = offset;
4133 f->r = 0;
4136 if (pcc)
4137 c += (bit_pos + 7) >> 3;
4139 /* store size and alignment */
4140 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4141 if (a < maxalign)
4142 a = maxalign;
4143 type->ref->r = a;
4144 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4145 /* can happen if individual align for some member was given. In
4146 this case MSVC ignores maxalign when aligning the size */
4147 a = pragma_pack;
4148 if (a < bt)
4149 a = bt;
4151 c = (c + a - 1) & -a;
4152 type->ref->c = c;
4154 #ifdef BF_DEBUG
4155 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4156 #endif
4158 /* check whether we can access bitfields by their type */
4159 for (f = type->ref->next; f; f = f->next) {
4160 int s, px, cx, c0;
4161 CType t;
4163 if (0 == (f->type.t & VT_BITFIELD))
4164 continue;
4165 f->type.ref = f;
4166 f->auxtype = -1;
4167 bit_size = BIT_SIZE(f->type.t);
4168 if (bit_size == 0)
4169 continue;
4170 bit_pos = BIT_POS(f->type.t);
4171 size = type_size(&f->type, &align);
4173 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4174 #ifdef TCC_TARGET_ARM
4175 && !(f->c & (align - 1))
4176 #endif
4178 continue;
4180 /* try to access the field using a different type */
4181 c0 = -1, s = align = 1;
4182 t.t = VT_BYTE;
4183 for (;;) {
4184 px = f->c * 8 + bit_pos;
4185 cx = (px >> 3) & -align;
4186 px = px - (cx << 3);
4187 if (c0 == cx)
4188 break;
4189 s = (px + bit_size + 7) >> 3;
4190 if (s > 4) {
4191 t.t = VT_LLONG;
4192 } else if (s > 2) {
4193 t.t = VT_INT;
4194 } else if (s > 1) {
4195 t.t = VT_SHORT;
4196 } else {
4197 t.t = VT_BYTE;
4199 s = type_size(&t, &align);
4200 c0 = cx;
4203 if (px + bit_size <= s * 8 && cx + s <= c
4204 #ifdef TCC_TARGET_ARM
4205 && !(cx & (align - 1))
4206 #endif
4208 /* update offset and bit position */
4209 f->c = cx;
4210 bit_pos = px;
4211 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4212 | (bit_pos << VT_STRUCT_SHIFT);
4213 if (s != size)
4214 f->auxtype = t.t;
4215 #ifdef BF_DEBUG
4216 printf("FIX field %s offset %-2d size %-2d align %-2d "
4217 "pos %-2d bits %-2d\n",
4218 get_tok_str(f->v & ~SYM_FIELD, NULL),
4219 cx, s, align, px, bit_size);
4220 #endif
4221 } else {
4222 /* fall back to load/store single-byte wise */
4223 f->auxtype = VT_STRUCT;
4224 #ifdef BF_DEBUG
4225 printf("FIX field %s : load byte-wise\n",
4226 get_tok_str(f->v & ~SYM_FIELD, NULL));
4227 #endif
4232 static void do_Static_assert(void);
4234 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4235 static void struct_decl(CType *type, int u)
4237 int v, c, size, align, flexible;
4238 int bit_size, bsize, bt;
4239 Sym *s, *ss, **ps;
4240 AttributeDef ad, ad1;
4241 CType type1, btype;
4243 memset(&ad, 0, sizeof ad);
4244 next();
4245 parse_attribute(&ad);
4246 if (tok != '{') {
4247 v = tok;
4248 next();
4249 /* struct already defined ? return it */
4250 if (v < TOK_IDENT)
4251 expect("struct/union/enum name");
4252 s = struct_find(v);
4253 if (s && (s->sym_scope == local_scope || tok != '{')) {
4254 if (u == s->type.t)
4255 goto do_decl;
4256 if (u == VT_ENUM && IS_ENUM(s->type.t))
4257 goto do_decl;
4258 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4260 } else {
4261 v = anon_sym++;
4263 /* Record the original enum/struct/union token. */
4264 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4265 type1.ref = NULL;
4266 /* we put an undefined size for struct/union */
4267 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4268 s->r = 0; /* default alignment is zero as gcc */
4269 do_decl:
4270 type->t = s->type.t;
4271 type->ref = s;
4273 if (tok == '{') {
4274 next();
4275 if (s->c != -1)
4276 tcc_error("struct/union/enum already defined");
4277 s->c = -2;
4278 /* cannot be empty */
4279 /* non empty enums are not allowed */
4280 ps = &s->next;
4281 if (u == VT_ENUM) {
4282 long long ll = 0, pl = 0, nl = 0;
4283 CType t;
4284 t.ref = s;
4285 /* enum symbols have static storage */
4286 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4287 for(;;) {
4288 v = tok;
4289 if (v < TOK_UIDENT)
4290 expect("identifier");
4291 ss = sym_find(v);
4292 if (ss && !local_stack)
4293 tcc_error("redefinition of enumerator '%s'",
4294 get_tok_str(v, NULL));
4295 next();
4296 if (tok == '=') {
4297 next();
4298 ll = expr_const64();
4300 ss = sym_push(v, &t, VT_CONST, 0);
4301 ss->enum_val = ll;
4302 *ps = ss, ps = &ss->next;
4303 if (ll < nl)
4304 nl = ll;
4305 if (ll > pl)
4306 pl = ll;
4307 if (tok != ',')
4308 break;
4309 next();
4310 ll++;
4311 /* NOTE: we accept a trailing comma */
4312 if (tok == '}')
4313 break;
4315 skip('}');
4316 /* set integral type of the enum */
4317 t.t = VT_INT;
4318 if (nl >= 0) {
4319 if (pl != (unsigned)pl)
4320 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4321 t.t |= VT_UNSIGNED;
4322 } else if (pl != (int)pl || nl != (int)nl)
4323 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4324 s->type.t = type->t = t.t | VT_ENUM;
4325 s->c = 0;
4326 /* set type for enum members */
4327 for (ss = s->next; ss; ss = ss->next) {
4328 ll = ss->enum_val;
4329 if (ll == (int)ll) /* default is int if it fits */
4330 continue;
4331 if (t.t & VT_UNSIGNED) {
4332 ss->type.t |= VT_UNSIGNED;
4333 if (ll == (unsigned)ll)
4334 continue;
4336 ss->type.t = (ss->type.t & ~VT_BTYPE)
4337 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4339 } else {
4340 c = 0;
4341 flexible = 0;
4342 while (tok != '}') {
4343 if (tok == TOK_STATIC_ASSERT) {
4344 do_Static_assert();
4345 continue;
4347 if (!parse_btype(&btype, &ad1, 0)) {
4348 skip(';');
4349 continue;
4351 while (1) {
4352 if (flexible)
4353 tcc_error("flexible array member '%s' not at the end of struct",
4354 get_tok_str(v, NULL));
4355 bit_size = -1;
4356 v = 0;
4357 type1 = btype;
4358 if (tok != ':') {
4359 if (tok != ';')
4360 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4361 if (v == 0) {
4362 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4363 expect("identifier");
4364 else {
4365 int v = btype.ref->v;
4366 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4367 if (tcc_state->ms_extensions == 0)
4368 expect("identifier");
4372 if (type_size(&type1, &align) < 0) {
4373 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4374 flexible = 1;
4375 else
4376 tcc_error("field '%s' has incomplete type",
4377 get_tok_str(v, NULL));
4379 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4380 (type1.t & VT_BTYPE) == VT_VOID ||
4381 (type1.t & VT_STORAGE))
4382 tcc_error("invalid type for '%s'",
4383 get_tok_str(v, NULL));
4385 if (tok == ':') {
4386 next();
4387 bit_size = expr_const();
4388 /* XXX: handle v = 0 case for messages */
4389 if (bit_size < 0)
4390 tcc_error("negative width in bit-field '%s'",
4391 get_tok_str(v, NULL));
4392 if (v && bit_size == 0)
4393 tcc_error("zero width for bit-field '%s'",
4394 get_tok_str(v, NULL));
4395 parse_attribute(&ad1);
4397 size = type_size(&type1, &align);
4398 if (bit_size >= 0) {
4399 bt = type1.t & VT_BTYPE;
4400 if (bt != VT_INT &&
4401 bt != VT_BYTE &&
4402 bt != VT_SHORT &&
4403 bt != VT_BOOL &&
4404 bt != VT_LLONG)
4405 tcc_error("bitfields must have scalar type");
4406 bsize = size * 8;
4407 if (bit_size > bsize) {
4408 tcc_error("width of '%s' exceeds its type",
4409 get_tok_str(v, NULL));
4410 } else if (bit_size == bsize
4411 && !ad.a.packed && !ad1.a.packed) {
4412 /* no need for bit fields */
4414 } else if (bit_size == 64) {
4415 tcc_error("field width 64 not implemented");
4416 } else {
4417 type1.t = (type1.t & ~VT_STRUCT_MASK)
4418 | VT_BITFIELD
4419 | (bit_size << (VT_STRUCT_SHIFT + 6));
4422 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4423 /* Remember we've seen a real field to check
4424 for placement of flexible array member. */
4425 c = 1;
4427 /* If member is a struct or bit-field, enforce
4428 placing into the struct (as anonymous). */
4429 if (v == 0 &&
4430 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4431 bit_size >= 0)) {
4432 v = anon_sym++;
4434 if (v) {
4435 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4436 ss->a = ad1.a;
4437 *ps = ss;
4438 ps = &ss->next;
4440 if (tok == ';' || tok == TOK_EOF)
4441 break;
4442 skip(',');
4444 skip(';');
4446 skip('}');
4447 parse_attribute(&ad);
4448 if (ad.cleanup_func) {
4449 tcc_warning("attribute '__cleanup__' ignored on type");
4451 check_fields(type, 1);
4452 check_fields(type, 0);
4453 struct_layout(type, &ad);
4454 if (debug_modes)
4455 tcc_debug_fix_anon(tcc_state, type);
4460 static void sym_to_attr(AttributeDef *ad, Sym *s)
4462 merge_symattr(&ad->a, &s->a);
4463 merge_funcattr(&ad->f, &s->f);
4466 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4467 are added to the element type, copied because it could be a typedef. */
4468 static void parse_btype_qualify(CType *type, int qualifiers)
4470 while (type->t & VT_ARRAY) {
4471 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4472 type = &type->ref->type;
4474 type->t |= qualifiers;
4477 /* return 0 if no type declaration. otherwise, return the basic type
4478 and skip it.
4480 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4482 int t, u, bt, st, type_found, typespec_found, g, n;
4483 Sym *s;
4484 CType type1;
4486 memset(ad, 0, sizeof(AttributeDef));
4487 type_found = 0;
4488 typespec_found = 0;
4489 t = VT_INT;
4490 bt = st = -1;
4491 type->ref = NULL;
4493 while(1) {
4494 switch(tok) {
4495 case TOK_EXTENSION:
4496 /* currently, we really ignore extension */
4497 next();
4498 continue;
4500 /* basic types */
4501 case TOK_CHAR:
4502 u = VT_BYTE;
4503 basic_type:
4504 next();
4505 basic_type1:
4506 if (u == VT_SHORT || u == VT_LONG) {
4507 if (st != -1 || (bt != -1 && bt != VT_INT))
4508 tmbt: tcc_error("too many basic types");
4509 st = u;
4510 } else {
4511 if (bt != -1 || (st != -1 && u != VT_INT))
4512 goto tmbt;
4513 bt = u;
4515 if (u != VT_INT)
4516 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4517 typespec_found = 1;
4518 break;
4519 case TOK_VOID:
4520 u = VT_VOID;
4521 goto basic_type;
4522 case TOK_SHORT:
4523 u = VT_SHORT;
4524 goto basic_type;
4525 case TOK_INT:
4526 u = VT_INT;
4527 goto basic_type;
4528 case TOK_ALIGNAS:
4529 { int n;
4530 AttributeDef ad1;
4531 next();
4532 skip('(');
4533 memset(&ad1, 0, sizeof(AttributeDef));
4534 if (parse_btype(&type1, &ad1, 0)) {
4535 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4536 if (ad1.a.aligned)
4537 n = 1 << (ad1.a.aligned - 1);
4538 else
4539 type_size(&type1, &n);
4540 } else {
4541 n = expr_const();
4542 if (n < 0 || (n & (n - 1)) != 0)
4543 tcc_error("alignment must be a positive power of two");
4545 skip(')');
4546 ad->a.aligned = exact_log2p1(n);
4548 continue;
4549 case TOK_LONG:
4550 if ((t & VT_BTYPE) == VT_DOUBLE) {
4551 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4552 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4553 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4554 } else {
4555 u = VT_LONG;
4556 goto basic_type;
4558 next();
4559 break;
4560 #ifdef TCC_TARGET_ARM64
4561 case TOK_UINT128:
4562 /* GCC's __uint128_t appears in some Linux header files. Make it a
4563 synonym for long double to get the size and alignment right. */
4564 u = VT_LDOUBLE;
4565 goto basic_type;
4566 #endif
4567 case TOK_BOOL:
4568 u = VT_BOOL;
4569 goto basic_type;
4570 case TOK_COMPLEX:
4571 tcc_error("_Complex is not yet supported");
4572 case TOK_FLOAT:
4573 u = VT_FLOAT;
4574 goto basic_type;
4575 case TOK_DOUBLE:
4576 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4577 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4578 } else {
4579 u = VT_DOUBLE;
4580 goto basic_type;
4582 next();
4583 break;
4584 case TOK_ENUM:
4585 struct_decl(&type1, VT_ENUM);
4586 basic_type2:
4587 u = type1.t;
4588 type->ref = type1.ref;
4589 goto basic_type1;
4590 case TOK_STRUCT:
4591 struct_decl(&type1, VT_STRUCT);
4592 goto basic_type2;
4593 case TOK_UNION:
4594 struct_decl(&type1, VT_UNION);
4595 goto basic_type2;
4597 /* type modifiers */
4598 case TOK__Atomic:
4599 next();
4600 type->t = t;
4601 parse_btype_qualify(type, VT_ATOMIC);
4602 t = type->t;
4603 if (tok == '(') {
4604 parse_expr_type(&type1);
4605 /* remove all storage modifiers except typedef */
4606 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4607 if (type1.ref)
4608 sym_to_attr(ad, type1.ref);
4609 goto basic_type2;
4611 break;
4612 case TOK_CONST1:
4613 case TOK_CONST2:
4614 case TOK_CONST3:
4615 type->t = t;
4616 parse_btype_qualify(type, VT_CONSTANT);
4617 t = type->t;
4618 next();
4619 break;
4620 case TOK_VOLATILE1:
4621 case TOK_VOLATILE2:
4622 case TOK_VOLATILE3:
4623 type->t = t;
4624 parse_btype_qualify(type, VT_VOLATILE);
4625 t = type->t;
4626 next();
4627 break;
4628 case TOK_SIGNED1:
4629 case TOK_SIGNED2:
4630 case TOK_SIGNED3:
4631 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4632 tcc_error("signed and unsigned modifier");
4633 t |= VT_DEFSIGN;
4634 next();
4635 typespec_found = 1;
4636 break;
4637 case TOK_REGISTER:
4638 case TOK_AUTO:
4639 case TOK_RESTRICT1:
4640 case TOK_RESTRICT2:
4641 case TOK_RESTRICT3:
4642 next();
4643 break;
4644 case TOK_UNSIGNED:
4645 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4646 tcc_error("signed and unsigned modifier");
4647 t |= VT_DEFSIGN | VT_UNSIGNED;
4648 next();
4649 typespec_found = 1;
4650 break;
4652 /* storage */
4653 case TOK_EXTERN:
4654 g = VT_EXTERN;
4655 goto storage;
4656 case TOK_STATIC:
4657 g = VT_STATIC;
4658 goto storage;
4659 case TOK_TYPEDEF:
4660 g = VT_TYPEDEF;
4661 goto storage;
4662 storage:
4663 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4664 tcc_error("multiple storage classes");
4665 t |= g;
4666 next();
4667 break;
4668 case TOK_INLINE1:
4669 case TOK_INLINE2:
4670 case TOK_INLINE3:
4671 t |= VT_INLINE;
4672 next();
4673 break;
4674 case TOK_NORETURN3:
4675 next();
4676 ad->f.func_noreturn = 1;
4677 break;
4678 /* GNUC attribute */
4679 case TOK_ATTRIBUTE1:
4680 case TOK_ATTRIBUTE2:
4681 parse_attribute(ad);
4682 if (ad->attr_mode) {
4683 u = ad->attr_mode -1;
4684 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4686 continue;
4687 /* GNUC typeof */
4688 case TOK_TYPEOF1:
4689 case TOK_TYPEOF2:
4690 case TOK_TYPEOF3:
4691 next();
4692 parse_expr_type(&type1);
4693 /* remove all storage modifiers except typedef */
4694 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4695 if (type1.ref)
4696 sym_to_attr(ad, type1.ref);
4697 goto basic_type2;
4698 case TOK_THREAD_LOCAL:
4699 tcc_error("_Thread_local is not implemented");
4700 default:
4701 if (typespec_found)
4702 goto the_end;
4703 s = sym_find(tok);
4704 if (!s || !(s->type.t & VT_TYPEDEF))
4705 goto the_end;
4707 n = tok, next();
4708 if (tok == ':' && ignore_label) {
4709 /* ignore if it's a label */
4710 unget_tok(n);
4711 goto the_end;
4714 t &= ~(VT_BTYPE|VT_LONG);
4715 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4716 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4717 type->ref = s->type.ref;
4718 if (t)
4719 parse_btype_qualify(type, t);
4720 t = type->t;
4721 /* get attributes from typedef */
4722 sym_to_attr(ad, s);
4723 typespec_found = 1;
4724 st = bt = -2;
4725 break;
4727 type_found = 1;
4729 the_end:
4730 if (tcc_state->char_is_unsigned) {
4731 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4732 t |= VT_UNSIGNED;
4734 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4735 bt = t & (VT_BTYPE|VT_LONG);
4736 if (bt == VT_LONG)
4737 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4738 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4739 if (bt == VT_LDOUBLE)
4740 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4741 #endif
4742 type->t = t;
4743 return type_found;
4746 /* convert a function parameter type (array to pointer and function to
4747 function pointer) */
4748 static inline void convert_parameter_type(CType *pt)
4750 /* remove const and volatile qualifiers (XXX: const could be used
4751 to indicate a const function parameter */
4752 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4753 /* array must be transformed to pointer according to ANSI C */
4754 pt->t &= ~VT_ARRAY;
4755 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4756 mk_pointer(pt);
4760 ST_FUNC void parse_asm_str(CString *astr)
4762 skip('(');
4763 parse_mult_str(astr, "string constant");
4766 /* Parse an asm label and return the token */
4767 static int asm_label_instr(void)
4769 int v;
4770 CString astr;
4772 next();
4773 parse_asm_str(&astr);
4774 skip(')');
4775 #ifdef ASM_DEBUG
4776 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4777 #endif
4778 v = tok_alloc(astr.data, astr.size - 1)->tok;
4779 cstr_free(&astr);
4780 return v;
4783 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4785 int n, l, t1, arg_size, align;
4786 Sym **plast, *s, *first;
4787 AttributeDef ad1;
4788 CType pt;
4789 TokenString *vla_array_tok = NULL;
4790 int *vla_array_str = NULL;
4792 if (tok == '(') {
4793 /* function type, or recursive declarator (return if so) */
4794 next();
4795 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4796 return 0;
4797 if (tok == ')')
4798 l = 0;
4799 else if (parse_btype(&pt, &ad1, 0))
4800 l = FUNC_NEW;
4801 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4802 merge_attr (ad, &ad1);
4803 return 0;
4804 } else
4805 l = FUNC_OLD;
4807 first = NULL;
4808 plast = &first;
4809 arg_size = 0;
4810 ++local_scope;
4811 if (l) {
4812 for(;;) {
4813 /* read param name and compute offset */
4814 if (l != FUNC_OLD) {
4815 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4816 break;
4817 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4818 if ((pt.t & VT_BTYPE) == VT_VOID)
4819 tcc_error("parameter declared as void");
4820 if (n == 0)
4821 n = SYM_FIELD;
4822 } else {
4823 n = tok;
4824 pt.t = VT_VOID; /* invalid type */
4825 pt.ref = NULL;
4826 next();
4828 if (n < TOK_UIDENT)
4829 expect("identifier");
4830 convert_parameter_type(&pt);
4831 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4832 /* these symbols may be evaluated for VLArrays (see below, under
4833 nocode_wanted) which is why we push them here as normal symbols
4834 temporarily. Example: int func(int a, int b[++a]); */
4835 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4836 *plast = s;
4837 plast = &s->next;
4838 if (tok == ')')
4839 break;
4840 skip(',');
4841 if (l == FUNC_NEW && tok == TOK_DOTS) {
4842 l = FUNC_ELLIPSIS;
4843 next();
4844 break;
4846 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4847 tcc_error("invalid type");
4849 } else
4850 /* if no parameters, then old type prototype */
4851 l = FUNC_OLD;
4852 skip(')');
4853 /* remove parameter symbols from token table, keep on stack */
4854 if (first) {
4855 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4856 for (s = first; s; s = s->next)
4857 s->v |= SYM_FIELD;
4859 --local_scope;
4860 /* NOTE: const is ignored in returned type as it has a special
4861 meaning in gcc / C++ */
4862 type->t &= ~VT_CONSTANT;
4863 /* some ancient pre-K&R C allows a function to return an array
4864 and the array brackets to be put after the arguments, such
4865 that "int c()[]" means something like "int[] c()" */
4866 if (tok == '[') {
4867 next();
4868 skip(']'); /* only handle simple "[]" */
4869 mk_pointer(type);
4871 /* we push a anonymous symbol which will contain the function prototype */
4872 ad->f.func_args = arg_size;
4873 ad->f.func_type = l;
4874 s = sym_push(SYM_FIELD, type, 0, 0);
4875 s->a = ad->a;
4876 s->f = ad->f;
4877 s->next = first;
4878 type->t = VT_FUNC;
4879 type->ref = s;
4880 } else if (tok == '[') {
4881 int saved_nocode_wanted = nocode_wanted;
4882 /* array definition */
4883 next();
4884 n = -1;
4885 t1 = 0;
4886 if (td & TYPE_PARAM) while (1) {
4887 /* XXX The optional type-quals and static should only be accepted
4888 in parameter decls. The '*' as well, and then even only
4889 in prototypes (not function defs). */
4890 switch (tok) {
4891 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4892 case TOK_CONST1:
4893 case TOK_VOLATILE1:
4894 case TOK_STATIC:
4895 case '*':
4896 next();
4897 continue;
4898 default:
4899 break;
4901 if (tok != ']') {
4902 /* Code generation is not done now but has to be done
4903 at start of function. Save code here for later use. */
4904 nocode_wanted = 1;
4905 skip_or_save_block(&vla_array_tok);
4906 unget_tok(0);
4907 vla_array_str = vla_array_tok->str;
4908 begin_macro(vla_array_tok, 2);
4909 next();
4910 gexpr();
4911 end_macro();
4912 next();
4913 goto check;
4915 break;
4917 } else if (tok != ']') {
4918 if (!local_stack || (storage & VT_STATIC))
4919 vpushi(expr_const());
4920 else {
4921 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4922 length must always be evaluated, even under nocode_wanted,
4923 so that its size slot is initialized (e.g. under sizeof
4924 or typeof). */
4925 nocode_wanted = 0;
4926 gexpr();
4928 check:
4929 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4930 n = vtop->c.i;
4931 if (n < 0)
4932 tcc_error("invalid array size");
4933 } else {
4934 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4935 tcc_error("size of variable length array should be an integer");
4936 n = 0;
4937 t1 = VT_VLA;
4940 skip(']');
4941 /* parse next post type */
4942 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4944 if ((type->t & VT_BTYPE) == VT_FUNC)
4945 tcc_error("declaration of an array of functions");
4946 if ((type->t & VT_BTYPE) == VT_VOID
4947 || type_size(type, &align) < 0)
4948 tcc_error("declaration of an array of incomplete type elements");
4950 t1 |= type->t & VT_VLA;
4952 if (t1 & VT_VLA) {
4953 if (n < 0) {
4954 if (td & TYPE_NEST)
4955 tcc_error("need explicit inner array size in VLAs");
4957 else {
4958 loc -= type_size(&int_type, &align);
4959 loc &= -align;
4960 n = loc;
4962 vpush_type_size(type, &align);
4963 gen_op('*');
4964 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4965 vswap();
4966 vstore();
4969 if (n != -1)
4970 vpop();
4971 nocode_wanted = saved_nocode_wanted;
4973 /* we push an anonymous symbol which will contain the array
4974 element type */
4975 s = sym_push(SYM_FIELD, type, 0, n);
4976 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4977 type->ref = s;
4979 if (vla_array_str) {
4980 if (t1 & VT_VLA)
4981 s->vla_array_str = vla_array_str;
4982 else
4983 tok_str_free_str(vla_array_str);
4986 return 1;
4989 /* Parse a type declarator (except basic type), and return the type
4990 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4991 expected. 'type' should contain the basic type. 'ad' is the
4992 attribute definition of the basic type. It can be modified by
4993 type_decl(). If this (possibly abstract) declarator is a pointer chain
4994 it returns the innermost pointed to type (equals *type, but is a different
4995 pointer), otherwise returns type itself, that's used for recursive calls. */
4996 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4998 CType *post, *ret;
4999 int qualifiers, storage;
5001 /* recursive type, remove storage bits first, apply them later again */
5002 storage = type->t & VT_STORAGE;
5003 type->t &= ~VT_STORAGE;
5004 post = ret = type;
5006 while (tok == '*') {
5007 qualifiers = 0;
5008 redo:
5009 next();
5010 switch(tok) {
5011 case TOK__Atomic:
5012 qualifiers |= VT_ATOMIC;
5013 goto redo;
5014 case TOK_CONST1:
5015 case TOK_CONST2:
5016 case TOK_CONST3:
5017 qualifiers |= VT_CONSTANT;
5018 goto redo;
5019 case TOK_VOLATILE1:
5020 case TOK_VOLATILE2:
5021 case TOK_VOLATILE3:
5022 qualifiers |= VT_VOLATILE;
5023 goto redo;
5024 case TOK_RESTRICT1:
5025 case TOK_RESTRICT2:
5026 case TOK_RESTRICT3:
5027 goto redo;
5028 /* XXX: clarify attribute handling */
5029 case TOK_ATTRIBUTE1:
5030 case TOK_ATTRIBUTE2:
5031 parse_attribute(ad);
5032 break;
5034 mk_pointer(type);
5035 type->t |= qualifiers;
5036 if (ret == type)
5037 /* innermost pointed to type is the one for the first derivation */
5038 ret = pointed_type(type);
5041 if (tok == '(') {
5042 /* This is possibly a parameter type list for abstract declarators
5043 ('int ()'), use post_type for testing this. */
5044 if (!post_type(type, ad, 0, td)) {
5045 /* It's not, so it's a nested declarator, and the post operations
5046 apply to the innermost pointed to type (if any). */
5047 /* XXX: this is not correct to modify 'ad' at this point, but
5048 the syntax is not clear */
5049 parse_attribute(ad);
5050 post = type_decl(type, ad, v, td);
5051 skip(')');
5052 } else
5053 goto abstract;
5054 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5055 /* type identifier */
5056 *v = tok;
5057 next();
5058 } else {
5059 abstract:
5060 if (!(td & TYPE_ABSTRACT))
5061 expect("identifier");
5062 *v = 0;
5064 post_type(post, ad, post != ret ? 0 : storage,
5065 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5066 parse_attribute(ad);
5067 type->t |= storage;
5068 return ret;
5071 /* indirection with full error checking and bound check */
5072 ST_FUNC void indir(void)
5074 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5075 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5076 return;
5077 expect("pointer");
5079 if (vtop->r & VT_LVAL)
5080 gv(RC_INT);
5081 vtop->type = *pointed_type(&vtop->type);
5082 /* Arrays and functions are never lvalues */
5083 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5084 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5085 vtop->r |= VT_LVAL;
5086 /* if bound checking, the referenced pointer must be checked */
5087 #ifdef CONFIG_TCC_BCHECK
5088 if (tcc_state->do_bounds_check)
5089 vtop->r |= VT_MUSTBOUND;
5090 #endif
5094 /* pass a parameter to a function and do type checking and casting */
5095 static void gfunc_param_typed(Sym *func, Sym *arg)
5097 int func_type;
5098 CType type;
5100 func_type = func->f.func_type;
5101 if (func_type == FUNC_OLD ||
5102 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5103 /* default casting : only need to convert float to double */
5104 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5105 gen_cast_s(VT_DOUBLE);
5106 } else if (vtop->type.t & VT_BITFIELD) {
5107 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5108 type.ref = vtop->type.ref;
5109 gen_cast(&type);
5110 } else if (vtop->r & VT_MUSTCAST) {
5111 force_charshort_cast();
5113 } else if (arg == NULL) {
5114 tcc_error("too many arguments to function");
5115 } else {
5116 type = arg->type;
5117 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5118 gen_assign_cast(&type);
5122 /* parse an expression and return its type without any side effect. */
5123 static void expr_type(CType *type, void (*expr_fn)(void))
5125 nocode_wanted++;
5126 expr_fn();
5127 *type = vtop->type;
5128 vpop();
5129 nocode_wanted--;
5132 /* parse an expression of the form '(type)' or '(expr)' and return its
5133 type */
5134 static void parse_expr_type(CType *type)
5136 int n;
5137 AttributeDef ad;
5139 skip('(');
5140 if (parse_btype(type, &ad, 0)) {
5141 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5142 } else {
5143 expr_type(type, gexpr);
5145 skip(')');
5148 static void parse_type(CType *type)
5150 AttributeDef ad;
5151 int n;
5153 if (!parse_btype(type, &ad, 0)) {
5154 expect("type");
5156 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5159 static void parse_builtin_params(int nc, const char *args)
5161 char c, sep = '(';
5162 CType type;
5163 if (nc)
5164 nocode_wanted++;
5165 next();
5166 if (*args == 0)
5167 skip(sep);
5168 while ((c = *args++)) {
5169 skip(sep);
5170 sep = ',';
5171 if (c == 't') {
5172 parse_type(&type);
5173 vpush(&type);
5174 continue;
5176 expr_eq();
5177 type.ref = NULL;
5178 type.t = 0;
5179 switch (c) {
5180 case 'e':
5181 continue;
5182 case 'V':
5183 type.t = VT_CONSTANT;
5184 case 'v':
5185 type.t |= VT_VOID;
5186 mk_pointer (&type);
5187 break;
5188 case 'S':
5189 type.t = VT_CONSTANT;
5190 case 's':
5191 type.t |= char_type.t;
5192 mk_pointer (&type);
5193 break;
5194 case 'i':
5195 type.t = VT_INT;
5196 break;
5197 case 'l':
5198 type.t = VT_SIZE_T;
5199 break;
5200 default:
5201 break;
5203 gen_assign_cast(&type);
5205 skip(')');
5206 if (nc)
5207 nocode_wanted--;
5210 static void parse_atomic(int atok)
5212 int size, align, arg, t, save = 0;
5213 CType *atom, *atom_ptr, ct = {0};
5214 SValue store;
5215 char buf[40];
5216 static const char *const templates[] = {
5218 * Each entry consists of callback and function template.
5219 * The template represents argument types and return type.
5221 * ? void (return-only)
5222 * b bool
5223 * a atomic
5224 * A read-only atomic
5225 * p pointer to memory
5226 * v value
5227 * l load pointer
5228 * s save pointer
5229 * m memory model
5232 /* keep in order of appearance in tcctok.h: */
5233 /* __atomic_store */ "alm.?",
5234 /* __atomic_load */ "Asm.v",
5235 /* __atomic_exchange */ "alsm.v",
5236 /* __atomic_compare_exchange */ "aplbmm.b",
5237 /* __atomic_fetch_add */ "avm.v",
5238 /* __atomic_fetch_sub */ "avm.v",
5239 /* __atomic_fetch_or */ "avm.v",
5240 /* __atomic_fetch_xor */ "avm.v",
5241 /* __atomic_fetch_and */ "avm.v",
5242 /* __atomic_fetch_nand */ "avm.v",
5243 /* __atomic_and_fetch */ "avm.v",
5244 /* __atomic_sub_fetch */ "avm.v",
5245 /* __atomic_or_fetch */ "avm.v",
5246 /* __atomic_xor_fetch */ "avm.v",
5247 /* __atomic_and_fetch */ "avm.v",
5248 /* __atomic_nand_fetch */ "avm.v"
5250 const char *template = templates[(atok - TOK___atomic_store)];
5252 atom = atom_ptr = NULL;
5253 size = 0; /* pacify compiler */
5254 next();
5255 skip('(');
5256 for (arg = 0;;) {
5257 expr_eq();
5258 switch (template[arg]) {
5259 case 'a':
5260 case 'A':
5261 atom_ptr = &vtop->type;
5262 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5263 expect("pointer");
5264 atom = pointed_type(atom_ptr);
5265 size = type_size(atom, &align);
5266 if (size > 8
5267 || (size & (size - 1))
5268 || (atok > TOK___atomic_compare_exchange
5269 && (0 == btype_size(atom->t & VT_BTYPE)
5270 || (atom->t & VT_BTYPE) == VT_PTR)))
5271 expect("integral or integer-sized pointer target type");
5272 /* GCC does not care either: */
5273 /* if (!(atom->t & VT_ATOMIC))
5274 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5275 break;
5277 case 'p':
5278 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5279 || type_size(pointed_type(&vtop->type), &align) != size)
5280 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5281 gen_assign_cast(atom_ptr);
5282 break;
5283 case 'v':
5284 gen_assign_cast(atom);
5285 break;
5286 case 'l':
5287 indir();
5288 gen_assign_cast(atom);
5289 break;
5290 case 's':
5291 save = 1;
5292 indir();
5293 store = *vtop;
5294 vpop();
5295 break;
5296 case 'm':
5297 gen_assign_cast(&int_type);
5298 break;
5299 case 'b':
5300 ct.t = VT_BOOL;
5301 gen_assign_cast(&ct);
5302 break;
5304 if ('.' == template[++arg])
5305 break;
5306 skip(',');
5308 skip(')');
5310 ct.t = VT_VOID;
5311 switch (template[arg + 1]) {
5312 case 'b':
5313 ct.t = VT_BOOL;
5314 break;
5315 case 'v':
5316 ct = *atom;
5317 break;
5320 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5321 vpush_helper_func(tok_alloc_const(buf));
5322 vrott(arg - save + 1);
5323 gfunc_call(arg - save);
5325 vpush(&ct);
5326 PUT_R_RET(vtop, ct.t);
5327 t = ct.t & VT_BTYPE;
5328 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5329 #ifdef PROMOTE_RET
5330 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5331 #else
5332 vtop->type.t = VT_INT;
5333 #endif
5335 gen_cast(&ct);
5336 if (save) {
5337 vpush(&ct);
5338 *vtop = store;
5339 vswap();
5340 vstore();
5344 ST_FUNC void unary(void)
5346 int n, t, align, size, r, sizeof_caller;
5347 CType type;
5348 Sym *s;
5349 AttributeDef ad;
5351 /* generate line number info */
5352 if (debug_modes)
5353 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5355 sizeof_caller = in_sizeof;
5356 in_sizeof = 0;
5357 type.ref = NULL;
5358 /* XXX: GCC 2.95.3 does not generate a table although it should be
5359 better here */
5360 tok_next:
5361 switch(tok) {
5362 case TOK_EXTENSION:
5363 next();
5364 goto tok_next;
5365 case TOK_LCHAR:
5366 #ifdef TCC_TARGET_PE
5367 t = VT_SHORT|VT_UNSIGNED;
5368 goto push_tokc;
5369 #endif
5370 case TOK_CINT:
5371 case TOK_CCHAR:
5372 t = VT_INT;
5373 push_tokc:
5374 type.t = t;
5375 vsetc(&type, VT_CONST, &tokc);
5376 next();
5377 break;
5378 case TOK_CUINT:
5379 t = VT_INT | VT_UNSIGNED;
5380 goto push_tokc;
5381 case TOK_CLLONG:
5382 t = VT_LLONG;
5383 goto push_tokc;
5384 case TOK_CULLONG:
5385 t = VT_LLONG | VT_UNSIGNED;
5386 goto push_tokc;
5387 case TOK_CFLOAT:
5388 t = VT_FLOAT;
5389 goto push_tokc;
5390 case TOK_CDOUBLE:
5391 t = VT_DOUBLE;
5392 goto push_tokc;
5393 case TOK_CLDOUBLE:
5394 t = VT_LDOUBLE;
5395 goto push_tokc;
5396 case TOK_CLONG:
5397 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5398 goto push_tokc;
5399 case TOK_CULONG:
5400 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5401 goto push_tokc;
5402 case TOK___FUNCTION__:
5403 if (!gnu_ext)
5404 goto tok_identifier;
5405 /* fall thru */
5406 case TOK___FUNC__:
5408 Section *sec;
5409 int len;
5410 /* special function name identifier */
5411 len = strlen(funcname) + 1;
5412 /* generate char[len] type */
5413 type.t = char_type.t;
5414 if (tcc_state->warn_write_strings & WARN_ON)
5415 type.t |= VT_CONSTANT;
5416 mk_pointer(&type);
5417 type.t |= VT_ARRAY;
5418 type.ref->c = len;
5419 sec = rodata_section;
5420 vpush_ref(&type, sec, sec->data_offset, len);
5421 if (!NODATA_WANTED)
5422 memcpy(section_ptr_add(sec, len), funcname, len);
5423 next();
5425 break;
5426 case TOK_LSTR:
5427 #ifdef TCC_TARGET_PE
5428 t = VT_SHORT | VT_UNSIGNED;
5429 #else
5430 t = VT_INT;
5431 #endif
5432 goto str_init;
5433 case TOK_STR:
5434 /* string parsing */
5435 t = char_type.t;
5436 str_init:
5437 if (tcc_state->warn_write_strings & WARN_ON)
5438 t |= VT_CONSTANT;
5439 type.t = t;
5440 mk_pointer(&type);
5441 type.t |= VT_ARRAY;
5442 memset(&ad, 0, sizeof(AttributeDef));
5443 ad.section = rodata_section;
5444 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5445 break;
5446 case '(':
5447 next();
5448 /* cast ? */
5449 if (parse_btype(&type, &ad, 0)) {
5450 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5451 skip(')');
5452 /* check ISOC99 compound literal */
5453 if (tok == '{') {
5454 /* data is allocated locally by default */
5455 if (global_expr)
5456 r = VT_CONST;
5457 else
5458 r = VT_LOCAL;
5459 /* all except arrays are lvalues */
5460 if (!(type.t & VT_ARRAY))
5461 r |= VT_LVAL;
5462 memset(&ad, 0, sizeof(AttributeDef));
5463 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5464 } else {
5465 if (sizeof_caller) {
5466 vpush(&type);
5467 return;
5469 unary();
5470 gen_cast(&type);
5472 } else if (tok == '{') {
5473 int saved_nocode_wanted = nocode_wanted;
5474 if (const_wanted && !(nocode_wanted & unevalmask))
5475 expect("constant");
5476 if (0 == local_scope)
5477 tcc_error("statement expression outside of function");
5478 /* save all registers */
5479 save_regs(0);
5480 /* statement expression : we do not accept break/continue
5481 inside as GCC does. We do retain the nocode_wanted state,
5482 as statement expressions can't ever be entered from the
5483 outside, so any reactivation of code emission (from labels
5484 or loop heads) can be disabled again after the end of it. */
5485 block(1);
5486 /* If the statement expr can be entered, then we retain the current
5487 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5488 If it can't be entered then the state is that from before the
5489 statement expression. */
5490 if (saved_nocode_wanted)
5491 nocode_wanted = saved_nocode_wanted;
5492 skip(')');
5493 } else {
5494 gexpr();
5495 skip(')');
5497 break;
5498 case '*':
5499 next();
5500 unary();
5501 indir();
5502 break;
5503 case '&':
5504 next();
5505 unary();
5506 /* functions names must be treated as function pointers,
5507 except for unary '&' and sizeof. Since we consider that
5508 functions are not lvalues, we only have to handle it
5509 there and in function calls. */
5510 /* arrays can also be used although they are not lvalues */
5511 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5512 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5513 test_lvalue();
5514 if (vtop->sym)
5515 vtop->sym->a.addrtaken = 1;
5516 mk_pointer(&vtop->type);
5517 gaddrof();
5518 break;
5519 case '!':
5520 next();
5521 unary();
5522 gen_test_zero(TOK_EQ);
5523 break;
5524 case '~':
5525 next();
5526 unary();
5527 vpushi(-1);
5528 gen_op('^');
5529 break;
5530 case '+':
5531 next();
5532 unary();
5533 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5534 tcc_error("pointer not accepted for unary plus");
5535 /* In order to force cast, we add zero, except for floating point
5536 where we really need an noop (otherwise -0.0 will be transformed
5537 into +0.0). */
5538 if (!is_float(vtop->type.t)) {
5539 vpushi(0);
5540 gen_op('+');
5542 break;
5543 case TOK_SIZEOF:
5544 case TOK_ALIGNOF1:
5545 case TOK_ALIGNOF2:
5546 case TOK_ALIGNOF3:
5547 t = tok;
5548 next();
5549 in_sizeof++;
5550 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5551 if (t == TOK_SIZEOF) {
5552 vpush_type_size(&type, &align);
5553 gen_cast_s(VT_SIZE_T);
5554 } else {
5555 type_size(&type, &align);
5556 s = NULL;
5557 if (vtop[1].r & VT_SYM)
5558 s = vtop[1].sym; /* hack: accessing previous vtop */
5559 if (s && s->a.aligned)
5560 align = 1 << (s->a.aligned - 1);
5561 vpushs(align);
5563 break;
5565 case TOK_builtin_expect:
5566 /* __builtin_expect is a no-op for now */
5567 parse_builtin_params(0, "ee");
5568 vpop();
5569 break;
5570 case TOK_builtin_types_compatible_p:
5571 parse_builtin_params(0, "tt");
5572 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5573 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5574 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5575 vtop -= 2;
5576 vpushi(n);
5577 break;
5578 case TOK_builtin_choose_expr:
5580 int64_t c;
5581 next();
5582 skip('(');
5583 c = expr_const64();
5584 skip(',');
5585 if (!c) {
5586 nocode_wanted++;
5588 expr_eq();
5589 if (!c) {
5590 vpop();
5591 nocode_wanted--;
5593 skip(',');
5594 if (c) {
5595 nocode_wanted++;
5597 expr_eq();
5598 if (c) {
5599 vpop();
5600 nocode_wanted--;
5602 skip(')');
5604 break;
5605 case TOK_builtin_constant_p:
5606 constant_p = 1;
5607 parse_builtin_params(1, "e");
5608 n = constant_p &&
5609 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5610 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5611 vtop--;
5612 vpushi(n);
5613 break;
5614 case TOK_builtin_frame_address:
5615 case TOK_builtin_return_address:
5617 int tok1 = tok;
5618 int64_t level;
5619 next();
5620 skip('(');
5621 level = expr_const64();
5622 if (level < 0) {
5623 tcc_error("%s only takes positive integers",
5624 tok1 == TOK_builtin_return_address ?
5625 "__builtin_return_address" :
5626 "__builtin_frame_address");
5628 skip(')');
5629 type.t = VT_VOID;
5630 mk_pointer(&type);
5631 vset(&type, VT_LOCAL, 0); /* local frame */
5632 while (level--) {
5633 #ifdef TCC_TARGET_RISCV64
5634 vpushi(2*PTR_SIZE);
5635 gen_op('-');
5636 #endif
5637 mk_pointer(&vtop->type);
5638 indir(); /* -> parent frame */
5640 if (tok1 == TOK_builtin_return_address) {
5641 // assume return address is just above frame pointer on stack
5642 #ifdef TCC_TARGET_ARM
5643 vpushi(2*PTR_SIZE);
5644 gen_op('+');
5645 #elif defined TCC_TARGET_RISCV64
5646 vpushi(PTR_SIZE);
5647 gen_op('-');
5648 #else
5649 vpushi(PTR_SIZE);
5650 gen_op('+');
5651 #endif
5652 mk_pointer(&vtop->type);
5653 indir();
5656 break;
5657 #ifdef TCC_TARGET_RISCV64
5658 case TOK_builtin_va_start:
5659 parse_builtin_params(0, "ee");
5660 r = vtop->r & VT_VALMASK;
5661 if (r == VT_LLOCAL)
5662 r = VT_LOCAL;
5663 if (r != VT_LOCAL)
5664 tcc_error("__builtin_va_start expects a local variable");
5665 gen_va_start();
5666 vstore();
5667 break;
5668 #endif
5669 #ifdef TCC_TARGET_X86_64
5670 #ifdef TCC_TARGET_PE
5671 case TOK_builtin_va_start:
5672 parse_builtin_params(0, "ee");
5673 r = vtop->r & VT_VALMASK;
5674 if (r == VT_LLOCAL)
5675 r = VT_LOCAL;
5676 if (r != VT_LOCAL)
5677 tcc_error("__builtin_va_start expects a local variable");
5678 vtop->r = r;
5679 vtop->type = char_pointer_type;
5680 vtop->c.i += 8;
5681 vstore();
5682 break;
5683 #else
5684 case TOK_builtin_va_arg_types:
5685 parse_builtin_params(0, "t");
5686 vpushi(classify_x86_64_va_arg(&vtop->type));
5687 vswap();
5688 vpop();
5689 break;
5690 #endif
5691 #endif
5693 #ifdef TCC_TARGET_ARM64
5694 case TOK_builtin_va_start: {
5695 parse_builtin_params(0, "ee");
5696 //xx check types
5697 gen_va_start();
5698 vpushi(0);
5699 vtop->type.t = VT_VOID;
5700 break;
5702 case TOK_builtin_va_arg: {
5703 parse_builtin_params(0, "et");
5704 type = vtop->type;
5705 vpop();
5706 //xx check types
5707 gen_va_arg(&type);
5708 vtop->type = type;
5709 break;
5711 case TOK___arm64_clear_cache: {
5712 parse_builtin_params(0, "ee");
5713 gen_clear_cache();
5714 vpushi(0);
5715 vtop->type.t = VT_VOID;
5716 break;
5718 #endif
5720 /* atomic operations */
5721 case TOK___atomic_store:
5722 case TOK___atomic_load:
5723 case TOK___atomic_exchange:
5724 case TOK___atomic_compare_exchange:
5725 case TOK___atomic_fetch_add:
5726 case TOK___atomic_fetch_sub:
5727 case TOK___atomic_fetch_or:
5728 case TOK___atomic_fetch_xor:
5729 case TOK___atomic_fetch_and:
5730 case TOK___atomic_fetch_nand:
5731 case TOK___atomic_add_fetch:
5732 case TOK___atomic_sub_fetch:
5733 case TOK___atomic_or_fetch:
5734 case TOK___atomic_xor_fetch:
5735 case TOK___atomic_and_fetch:
5736 case TOK___atomic_nand_fetch:
5737 parse_atomic(tok);
5738 break;
5740 /* pre operations */
5741 case TOK_INC:
5742 case TOK_DEC:
5743 t = tok;
5744 next();
5745 unary();
5746 inc(0, t);
5747 break;
5748 case '-':
5749 next();
5750 unary();
5751 if (is_float(vtop->type.t)) {
5752 gen_opif(TOK_NEG);
5753 } else {
5754 vpushi(0);
5755 vswap();
5756 gen_op('-');
5758 break;
5759 case TOK_LAND:
5760 if (!gnu_ext)
5761 goto tok_identifier;
5762 next();
5763 /* allow to take the address of a label */
5764 if (tok < TOK_UIDENT)
5765 expect("label identifier");
5766 s = label_find(tok);
5767 if (!s) {
5768 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5769 } else {
5770 if (s->r == LABEL_DECLARED)
5771 s->r = LABEL_FORWARD;
5773 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5774 s->type.t = VT_VOID;
5775 mk_pointer(&s->type);
5776 s->type.t |= VT_STATIC;
5778 vpushsym(&s->type, s);
5779 next();
5780 break;
5782 case TOK_GENERIC:
5784 CType controlling_type;
5785 int has_default = 0;
5786 int has_match = 0;
5787 int learn = 0;
5788 TokenString *str = NULL;
5789 int saved_const_wanted = const_wanted;
5791 next();
5792 skip('(');
5793 const_wanted = 0;
5794 expr_type(&controlling_type, expr_eq);
5795 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5796 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5797 mk_pointer(&controlling_type);
5798 const_wanted = saved_const_wanted;
5799 for (;;) {
5800 learn = 0;
5801 skip(',');
5802 if (tok == TOK_DEFAULT) {
5803 if (has_default)
5804 tcc_error("too many 'default'");
5805 has_default = 1;
5806 if (!has_match)
5807 learn = 1;
5808 next();
5809 } else {
5810 AttributeDef ad_tmp;
5811 int itmp;
5812 CType cur_type;
5814 parse_btype(&cur_type, &ad_tmp, 0);
5815 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5816 if (compare_types(&controlling_type, &cur_type, 0)) {
5817 if (has_match) {
5818 tcc_error("type match twice");
5820 has_match = 1;
5821 learn = 1;
5824 skip(':');
5825 if (learn) {
5826 if (str)
5827 tok_str_free(str);
5828 skip_or_save_block(&str);
5829 } else {
5830 skip_or_save_block(NULL);
5832 if (tok == ')')
5833 break;
5835 if (!str) {
5836 char buf[60];
5837 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5838 tcc_error("type '%s' does not match any association", buf);
5840 begin_macro(str, 1);
5841 next();
5842 expr_eq();
5843 if (tok != TOK_EOF)
5844 expect(",");
5845 end_macro();
5846 next();
5847 break;
5849 // special qnan , snan and infinity values
5850 case TOK___NAN__:
5851 n = 0x7fc00000;
5852 special_math_val:
5853 vpushi(n);
5854 vtop->type.t = VT_FLOAT;
5855 next();
5856 break;
5857 case TOK___SNAN__:
5858 n = 0x7f800001;
5859 goto special_math_val;
5860 case TOK___INF__:
5861 n = 0x7f800000;
5862 goto special_math_val;
5864 default:
5865 tok_identifier:
5866 t = tok;
5867 next();
5868 if (t < TOK_UIDENT)
5869 expect("identifier");
5870 s = sym_find(t);
5871 if (!s || IS_ASM_SYM(s)) {
5872 const char *name = get_tok_str(t, NULL);
5873 if (tok != '(')
5874 tcc_error("'%s' undeclared", name);
5875 /* for simple function calls, we tolerate undeclared
5876 external reference to int() function */
5877 tcc_warning_c(warn_implicit_function_declaration)(
5878 "implicit declaration of function '%s'", name);
5879 s = external_global_sym(t, &func_old_type);
5882 r = s->r;
5883 /* A symbol that has a register is a local register variable,
5884 which starts out as VT_LOCAL value. */
5885 if ((r & VT_VALMASK) < VT_CONST)
5886 r = (r & ~VT_VALMASK) | VT_LOCAL;
5888 vset(&s->type, r, s->c);
5889 /* Point to s as backpointer (even without r&VT_SYM).
5890 Will be used by at least the x86 inline asm parser for
5891 regvars. */
5892 vtop->sym = s;
5894 if (r & VT_SYM) {
5895 vtop->c.i = 0;
5896 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5897 vtop->c.i = s->enum_val;
5899 break;
5902 /* post operations */
5903 while (1) {
5904 if (tok == TOK_INC || tok == TOK_DEC) {
5905 inc(1, tok);
5906 next();
5907 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5908 int qualifiers, cumofs = 0;
5909 /* field */
5910 if (tok == TOK_ARROW)
5911 indir();
5912 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5913 test_lvalue();
5914 gaddrof();
5915 /* expect pointer on structure */
5916 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5917 expect("struct or union");
5918 if (tok == TOK_CDOUBLE)
5919 expect("field name");
5920 next();
5921 if (tok == TOK_CINT || tok == TOK_CUINT)
5922 expect("field name");
5923 s = find_field(&vtop->type, tok, &cumofs);
5924 /* add field offset to pointer */
5925 vtop->type = char_pointer_type; /* change type to 'char *' */
5926 vpushi(cumofs);
5927 gen_op('+');
5928 /* change type to field type, and set to lvalue */
5929 vtop->type = s->type;
5930 vtop->type.t |= qualifiers;
5931 /* an array is never an lvalue */
5932 if (!(vtop->type.t & VT_ARRAY)) {
5933 vtop->r |= VT_LVAL;
5934 #ifdef CONFIG_TCC_BCHECK
5935 /* if bound checking, the referenced pointer must be checked */
5936 if (tcc_state->do_bounds_check)
5937 vtop->r |= VT_MUSTBOUND;
5938 #endif
5940 next();
5941 } else if (tok == '[') {
5942 next();
5943 gexpr();
5944 gen_op('+');
5945 indir();
5946 skip(']');
5947 } else if (tok == '(') {
5948 SValue ret;
5949 Sym *sa;
5950 int nb_args, ret_nregs, ret_align, regsize, variadic;
5952 /* function call */
5953 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5954 /* pointer test (no array accepted) */
5955 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5956 vtop->type = *pointed_type(&vtop->type);
5957 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5958 goto error_func;
5959 } else {
5960 error_func:
5961 expect("function pointer");
5963 } else {
5964 vtop->r &= ~VT_LVAL; /* no lvalue */
5966 /* get return type */
5967 s = vtop->type.ref;
5968 next();
5969 sa = s->next; /* first parameter */
5970 nb_args = regsize = 0;
5971 ret.r2 = VT_CONST;
5972 /* compute first implicit argument if a structure is returned */
5973 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5974 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5975 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5976 &ret_align, &regsize);
5977 if (ret_nregs <= 0) {
5978 /* get some space for the returned structure */
5979 size = type_size(&s->type, &align);
5980 #ifdef TCC_TARGET_ARM64
5981 /* On arm64, a small struct is return in registers.
5982 It is much easier to write it to memory if we know
5983 that we are allowed to write some extra bytes, so
5984 round the allocated space up to a power of 2: */
5985 if (size < 16)
5986 while (size & (size - 1))
5987 size = (size | (size - 1)) + 1;
5988 #endif
5989 loc = (loc - size) & -align;
5990 ret.type = s->type;
5991 ret.r = VT_LOCAL | VT_LVAL;
5992 /* pass it as 'int' to avoid structure arg passing
5993 problems */
5994 vseti(VT_LOCAL, loc);
5995 #ifdef CONFIG_TCC_BCHECK
5996 if (tcc_state->do_bounds_check)
5997 --loc;
5998 #endif
5999 ret.c = vtop->c;
6000 if (ret_nregs < 0)
6001 vtop--;
6002 else
6003 nb_args++;
6005 } else {
6006 ret_nregs = 1;
6007 ret.type = s->type;
6010 if (ret_nregs > 0) {
6011 /* return in register */
6012 ret.c.i = 0;
6013 PUT_R_RET(&ret, ret.type.t);
6015 if (tok != ')') {
6016 for(;;) {
6017 expr_eq();
6018 gfunc_param_typed(s, sa);
6019 nb_args++;
6020 if (sa)
6021 sa = sa->next;
6022 if (tok == ')')
6023 break;
6024 skip(',');
6027 if (sa)
6028 tcc_error("too few arguments to function");
6029 skip(')');
6030 gfunc_call(nb_args);
6032 if (ret_nregs < 0) {
6033 vsetc(&ret.type, ret.r, &ret.c);
6034 #ifdef TCC_TARGET_RISCV64
6035 arch_transfer_ret_regs(1);
6036 #endif
6037 } else {
6038 /* return value */
6039 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6040 vsetc(&ret.type, r, &ret.c);
6041 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6044 /* handle packed struct return */
6045 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6046 int addr, offset;
6048 size = type_size(&s->type, &align);
6049 /* We're writing whole regs often, make sure there's enough
6050 space. Assume register size is power of 2. */
6051 if (regsize > align)
6052 align = regsize;
6053 loc = (loc - size) & -align;
6054 addr = loc;
6055 offset = 0;
6056 for (;;) {
6057 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6058 vswap();
6059 vstore();
6060 vtop--;
6061 if (--ret_nregs == 0)
6062 break;
6063 offset += regsize;
6065 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6068 /* Promote char/short return values. This is matters only
6069 for calling function that were not compiled by TCC and
6070 only on some architectures. For those where it doesn't
6071 matter we expect things to be already promoted to int,
6072 but not larger. */
6073 t = s->type.t & VT_BTYPE;
6074 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6075 #ifdef PROMOTE_RET
6076 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6077 #else
6078 vtop->type.t = VT_INT;
6079 #endif
6082 if (s->f.func_noreturn) {
6083 if (debug_modes)
6084 tcc_tcov_block_end(tcc_state, -1);
6085 CODE_OFF();
6087 } else {
6088 break;
6093 #ifndef precedence_parser /* original top-down parser */
6095 static void expr_prod(void)
6097 int t;
6099 unary();
6100 while ((t = tok) == '*' || t == '/' || t == '%') {
6101 next();
6102 unary();
6103 gen_op(t);
6107 static void expr_sum(void)
6109 int t;
6111 expr_prod();
6112 while ((t = tok) == '+' || t == '-') {
6113 next();
6114 expr_prod();
6115 gen_op(t);
6119 static void expr_shift(void)
6121 int t;
6123 expr_sum();
6124 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6125 next();
6126 expr_sum();
6127 gen_op(t);
6131 static void expr_cmp(void)
6133 int t;
6135 expr_shift();
6136 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6137 t == TOK_ULT || t == TOK_UGE) {
6138 next();
6139 expr_shift();
6140 gen_op(t);
6144 static void expr_cmpeq(void)
6146 int t;
6148 expr_cmp();
6149 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6150 next();
6151 expr_cmp();
6152 gen_op(t);
6156 static void expr_and(void)
6158 expr_cmpeq();
6159 while (tok == '&') {
6160 next();
6161 expr_cmpeq();
6162 gen_op('&');
6166 static void expr_xor(void)
6168 expr_and();
6169 while (tok == '^') {
6170 next();
6171 expr_and();
6172 gen_op('^');
6176 static void expr_or(void)
6178 expr_xor();
6179 while (tok == '|') {
6180 next();
6181 expr_xor();
6182 gen_op('|');
6186 static void expr_landor(int op);
6188 static void expr_land(void)
6190 expr_or();
6191 if (tok == TOK_LAND)
6192 expr_landor(tok);
6195 static void expr_lor(void)
6197 expr_land();
6198 if (tok == TOK_LOR)
6199 expr_landor(tok);
6202 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6203 #else /* defined precedence_parser */
6204 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6205 # define expr_lor() unary(), expr_infix(1)
6207 static int precedence(int tok)
6209 switch (tok) {
6210 case TOK_LOR: return 1;
6211 case TOK_LAND: return 2;
6212 case '|': return 3;
6213 case '^': return 4;
6214 case '&': return 5;
6215 case TOK_EQ: case TOK_NE: return 6;
6216 relat: case TOK_ULT: case TOK_UGE: return 7;
6217 case TOK_SHL: case TOK_SAR: return 8;
6218 case '+': case '-': return 9;
6219 case '*': case '/': case '%': return 10;
6220 default:
6221 if (tok >= TOK_ULE && tok <= TOK_GT)
6222 goto relat;
6223 return 0;
6226 static unsigned char prec[256];
6227 static void init_prec(void)
6229 int i;
6230 for (i = 0; i < 256; i++)
6231 prec[i] = precedence(i);
6233 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6235 static void expr_landor(int op);
6237 static void expr_infix(int p)
6239 int t = tok, p2;
6240 while ((p2 = precedence(t)) >= p) {
6241 if (t == TOK_LOR || t == TOK_LAND) {
6242 expr_landor(t);
6243 } else {
6244 next();
6245 unary();
6246 if (precedence(tok) > p2)
6247 expr_infix(p2 + 1);
6248 gen_op(t);
6250 t = tok;
6253 #endif
6255 /* Assuming vtop is a value used in a conditional context
6256 (i.e. compared with zero) return 0 if it's false, 1 if
6257 true and -1 if it can't be statically determined. */
6258 static int condition_3way(void)
6260 int c = -1;
6261 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6262 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6263 vdup();
6264 gen_cast_s(VT_BOOL);
6265 c = vtop->c.i;
6266 vpop();
6268 return c;
6271 static void expr_landor(int op)
6273 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6274 for(;;) {
6275 c = f ? i : condition_3way();
6276 if (c < 0)
6277 save_regs(1), cc = 0;
6278 else if (c != i)
6279 nocode_wanted++, f = 1;
6280 if (tok != op)
6281 break;
6282 if (c < 0)
6283 t = gvtst(i, t);
6284 else
6285 vpop();
6286 next();
6287 expr_landor_next(op);
6289 if (cc || f) {
6290 vpop();
6291 vpushi(i ^ f);
6292 gsym(t);
6293 nocode_wanted -= f;
6294 } else {
6295 gvtst_set(i, t);
6299 static int is_cond_bool(SValue *sv)
6301 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6302 && (sv->type.t & VT_BTYPE) == VT_INT)
6303 return (unsigned)sv->c.i < 2;
6304 if (sv->r == VT_CMP)
6305 return 1;
6306 return 0;
6309 static void expr_cond(void)
6311 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6312 SValue sv;
6313 CType type;
6315 expr_lor();
6316 if (tok == '?') {
6317 next();
6318 c = condition_3way();
6319 g = (tok == ':' && gnu_ext);
6320 tt = 0;
6321 if (!g) {
6322 if (c < 0) {
6323 save_regs(1);
6324 tt = gvtst(1, 0);
6325 } else {
6326 vpop();
6328 } else if (c < 0) {
6329 /* needed to avoid having different registers saved in
6330 each branch */
6331 save_regs(1);
6332 gv_dup();
6333 tt = gvtst(0, 0);
6336 if (c == 0)
6337 nocode_wanted++;
6338 if (!g)
6339 gexpr();
6341 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6342 mk_pointer(&vtop->type);
6343 sv = *vtop; /* save value to handle it later */
6344 vtop--; /* no vpop so that FP stack is not flushed */
6346 if (g) {
6347 u = tt;
6348 } else if (c < 0) {
6349 u = gjmp(0);
6350 gsym(tt);
6351 } else
6352 u = 0;
6354 if (c == 0)
6355 nocode_wanted--;
6356 if (c == 1)
6357 nocode_wanted++;
6358 skip(':');
6359 expr_cond();
6361 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6362 mk_pointer(&vtop->type);
6364 /* cast operands to correct type according to ISOC rules */
6365 if (!combine_types(&type, &sv, vtop, '?'))
6366 type_incompatibility_error(&sv.type, &vtop->type,
6367 "type mismatch in conditional expression (have '%s' and '%s')");
6369 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6370 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6371 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6372 this code jumps directly to the if's then/else branches. */
6373 t1 = gvtst(0, 0);
6374 t2 = gjmp(0);
6375 gsym(u);
6376 vpushv(&sv);
6377 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6378 gvtst_set(0, t1);
6379 gvtst_set(1, t2);
6380 gen_cast(&type);
6381 // tcc_warning("two conditions expr_cond");
6382 return;
6385 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6386 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6387 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6389 /* now we convert second operand */
6390 if (c != 1) {
6391 gen_cast(&type);
6392 if (islv) {
6393 mk_pointer(&vtop->type);
6394 gaddrof();
6395 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6396 gaddrof();
6399 rc = RC_TYPE(type.t);
6400 /* for long longs, we use fixed registers to avoid having
6401 to handle a complicated move */
6402 if (USING_TWO_WORDS(type.t))
6403 rc = RC_RET(type.t);
6405 tt = r2 = 0;
6406 if (c < 0) {
6407 r2 = gv(rc);
6408 tt = gjmp(0);
6410 gsym(u);
6411 if (c == 1)
6412 nocode_wanted--;
6414 /* this is horrible, but we must also convert first
6415 operand */
6416 if (c != 0) {
6417 *vtop = sv;
6418 gen_cast(&type);
6419 if (islv) {
6420 mk_pointer(&vtop->type);
6421 gaddrof();
6422 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6423 gaddrof();
6426 if (c < 0) {
6427 r1 = gv(rc);
6428 move_reg(r2, r1, islv ? VT_PTR : type.t);
6429 vtop->r = r2;
6430 gsym(tt);
6433 if (islv)
6434 indir();
6438 static void expr_eq(void)
6440 int t;
6442 expr_cond();
6443 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6444 test_lvalue();
6445 next();
6446 if (t == '=') {
6447 expr_eq();
6448 } else {
6449 vdup();
6450 expr_eq();
6451 gen_op(TOK_ASSIGN_OP(t));
6453 vstore();
6457 ST_FUNC void gexpr(void)
6459 while (1) {
6460 expr_eq();
6461 if (tok != ',')
6462 break;
6463 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6464 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6465 vpop();
6466 next();
6470 /* parse a constant expression and return value in vtop. */
6471 static void expr_const1(void)
6473 const_wanted++;
6474 nocode_wanted += unevalmask + 1;
6475 expr_cond();
6476 nocode_wanted -= unevalmask + 1;
6477 const_wanted--;
6480 /* parse an integer constant and return its value. */
6481 static inline int64_t expr_const64(void)
6483 int64_t c;
6484 expr_const1();
6485 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6486 expect("constant expression");
6487 c = vtop->c.i;
6488 vpop();
6489 return c;
6492 /* parse an integer constant and return its value.
6493 Complain if it doesn't fit 32bit (signed or unsigned). */
6494 ST_FUNC int expr_const(void)
6496 int c;
6497 int64_t wc = expr_const64();
6498 c = wc;
6499 if (c != wc && (unsigned)c != wc)
6500 tcc_error("constant exceeds 32 bit");
6501 return c;
6504 /* ------------------------------------------------------------------------- */
6505 /* return from function */
6507 #ifndef TCC_TARGET_ARM64
6508 static void gfunc_return(CType *func_type)
6510 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6511 CType type, ret_type;
6512 int ret_align, ret_nregs, regsize;
6513 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6514 &ret_align, &regsize);
6515 if (ret_nregs < 0) {
6516 #ifdef TCC_TARGET_RISCV64
6517 arch_transfer_ret_regs(0);
6518 #endif
6519 } else if (0 == ret_nregs) {
6520 /* if returning structure, must copy it to implicit
6521 first pointer arg location */
6522 type = *func_type;
6523 mk_pointer(&type);
6524 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6525 indir();
6526 vswap();
6527 /* copy structure value to pointer */
6528 vstore();
6529 } else {
6530 /* returning structure packed into registers */
6531 int size, addr, align, rc;
6532 size = type_size(func_type,&align);
6533 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6534 (vtop->c.i & (ret_align-1)))
6535 && (align & (ret_align-1))) {
6536 loc = (loc - size) & -ret_align;
6537 addr = loc;
6538 type = *func_type;
6539 vset(&type, VT_LOCAL | VT_LVAL, addr);
6540 vswap();
6541 vstore();
6542 vpop();
6543 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6545 vtop->type = ret_type;
6546 rc = RC_RET(ret_type.t);
6547 if (ret_nregs == 1)
6548 gv(rc);
6549 else {
6550 for (;;) {
6551 vdup();
6552 gv(rc);
6553 vpop();
6554 if (--ret_nregs == 0)
6555 break;
6556 /* We assume that when a structure is returned in multiple
6557 registers, their classes are consecutive values of the
6558 suite s(n) = 2^n */
6559 rc <<= 1;
6560 vtop->c.i += regsize;
6564 } else {
6565 gv(RC_RET(func_type->t));
6567 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6569 #endif
6571 static void check_func_return(void)
6573 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6574 return;
6575 if (!strcmp (funcname, "main")
6576 && (func_vt.t & VT_BTYPE) == VT_INT) {
6577 /* main returns 0 by default */
6578 vpushi(0);
6579 gen_assign_cast(&func_vt);
6580 gfunc_return(&func_vt);
6581 } else {
6582 tcc_warning("function might return no value: '%s'", funcname);
6586 /* ------------------------------------------------------------------------- */
6587 /* switch/case */
6589 static int case_cmpi(const void *pa, const void *pb)
6591 int64_t a = (*(struct case_t**) pa)->v1;
6592 int64_t b = (*(struct case_t**) pb)->v1;
6593 return a < b ? -1 : a > b;
6596 static int case_cmpu(const void *pa, const void *pb)
6598 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6599 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6600 return a < b ? -1 : a > b;
6603 static void gtst_addr(int t, int a)
6605 gsym_addr(gvtst(0, t), a);
6608 static void gcase(struct case_t **base, int len, int *bsym)
6610 struct case_t *p;
6611 int e;
6612 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6613 while (len > 8) {
6614 /* binary search */
6615 p = base[len/2];
6616 vdup();
6617 if (ll)
6618 vpushll(p->v2);
6619 else
6620 vpushi(p->v2);
6621 gen_op(TOK_LE);
6622 e = gvtst(1, 0);
6623 vdup();
6624 if (ll)
6625 vpushll(p->v1);
6626 else
6627 vpushi(p->v1);
6628 gen_op(TOK_GE);
6629 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6630 /* x < v1 */
6631 gcase(base, len/2, bsym);
6632 /* x > v2 */
6633 gsym(e);
6634 e = len/2 + 1;
6635 base += e; len -= e;
6637 /* linear scan */
6638 while (len--) {
6639 p = *base++;
6640 vdup();
6641 if (ll)
6642 vpushll(p->v2);
6643 else
6644 vpushi(p->v2);
6645 if (p->v1 == p->v2) {
6646 gen_op(TOK_EQ);
6647 gtst_addr(0, p->sym);
6648 } else {
6649 gen_op(TOK_LE);
6650 e = gvtst(1, 0);
6651 vdup();
6652 if (ll)
6653 vpushll(p->v1);
6654 else
6655 vpushi(p->v1);
6656 gen_op(TOK_GE);
6657 gtst_addr(0, p->sym);
6658 gsym(e);
6661 *bsym = gjmp(*bsym);
6664 /* ------------------------------------------------------------------------- */
6665 /* __attribute__((cleanup(fn))) */
6667 static void try_call_scope_cleanup(Sym *stop)
6669 Sym *cls = cur_scope->cl.s;
6671 for (; cls != stop; cls = cls->ncl) {
6672 Sym *fs = cls->next;
6673 Sym *vs = cls->prev_tok;
6675 vpushsym(&fs->type, fs);
6676 vset(&vs->type, vs->r, vs->c);
6677 vtop->sym = vs;
6678 mk_pointer(&vtop->type);
6679 gaddrof();
6680 gfunc_call(1);
6684 static void try_call_cleanup_goto(Sym *cleanupstate)
6686 Sym *oc, *cc;
6687 int ocd, ccd;
6689 if (!cur_scope->cl.s)
6690 return;
6692 /* search NCA of both cleanup chains given parents and initial depth */
6693 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6694 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6696 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6698 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6701 try_call_scope_cleanup(cc);
6704 /* call 'func' for each __attribute__((cleanup(func))) */
6705 static void block_cleanup(struct scope *o)
6707 int jmp = 0;
6708 Sym *g, **pg;
6709 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6710 if (g->prev_tok->r & LABEL_FORWARD) {
6711 Sym *pcl = g->next;
6712 if (!jmp)
6713 jmp = gjmp(0);
6714 gsym(pcl->jnext);
6715 try_call_scope_cleanup(o->cl.s);
6716 pcl->jnext = gjmp(0);
6717 if (!o->cl.n)
6718 goto remove_pending;
6719 g->c = o->cl.n;
6720 pg = &g->prev;
6721 } else {
6722 remove_pending:
6723 *pg = g->prev;
6724 sym_free(g);
6727 gsym(jmp);
6728 try_call_scope_cleanup(o->cl.s);
6731 /* ------------------------------------------------------------------------- */
6732 /* VLA */
6734 static void vla_restore(int loc)
6736 if (loc)
6737 gen_vla_sp_restore(loc);
6740 static void vla_leave(struct scope *o)
6742 struct scope *c = cur_scope, *v = NULL;
6743 for (; c != o && c; c = c->prev)
6744 if (c->vla.num)
6745 v = c;
6746 if (v)
6747 vla_restore(v->vla.locorig);
6750 /* ------------------------------------------------------------------------- */
6751 /* local scopes */
6753 static void new_scope(struct scope *o)
6755 /* copy and link previous scope */
6756 *o = *cur_scope;
6757 o->prev = cur_scope;
6758 cur_scope = o;
6759 cur_scope->vla.num = 0;
6761 /* record local declaration stack position */
6762 o->lstk = local_stack;
6763 o->llstk = local_label_stack;
6764 ++local_scope;
6766 if (debug_modes)
6767 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6770 static void prev_scope(struct scope *o, int is_expr)
6772 vla_leave(o->prev);
6774 if (o->cl.s != o->prev->cl.s)
6775 block_cleanup(o->prev);
6777 /* pop locally defined labels */
6778 label_pop(&local_label_stack, o->llstk, is_expr);
6780 /* In the is_expr case (a statement expression is finished here),
6781 vtop might refer to symbols on the local_stack. Either via the
6782 type or via vtop->sym. We can't pop those nor any that in turn
6783 might be referred to. To make it easier we don't roll back
6784 any symbols in that case; some upper level call to block() will
6785 do that. We do have to remove such symbols from the lookup
6786 tables, though. sym_pop will do that. */
6788 /* pop locally defined symbols */
6789 pop_local_syms(o->lstk, is_expr);
6790 cur_scope = o->prev;
6791 --local_scope;
6793 if (debug_modes)
6794 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6797 /* leave a scope via break/continue(/goto) */
6798 static void leave_scope(struct scope *o)
6800 if (!o)
6801 return;
6802 try_call_scope_cleanup(o->cl.s);
6803 vla_leave(o);
6806 /* ------------------------------------------------------------------------- */
6807 /* call block from 'for do while' loops */
6809 static void lblock(int *bsym, int *csym)
6811 struct scope *lo = loop_scope, *co = cur_scope;
6812 int *b = co->bsym, *c = co->csym;
6813 if (csym) {
6814 co->csym = csym;
6815 loop_scope = co;
6817 co->bsym = bsym;
6818 block(0);
6819 co->bsym = b;
6820 if (csym) {
6821 co->csym = c;
6822 loop_scope = lo;
6826 static void block(int is_expr)
6828 int a, b, c, d, e, t;
6829 struct scope o;
6830 Sym *s;
6832 if (is_expr) {
6833 /* default return value is (void) */
6834 vpushi(0);
6835 vtop->type.t = VT_VOID;
6838 again:
6839 t = tok;
6840 /* If the token carries a value, next() might destroy it. Only with
6841 invalid code such as f(){"123"4;} */
6842 if (TOK_HAS_VALUE(t))
6843 goto expr;
6844 next();
6846 if (debug_modes)
6847 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6849 if (t == TOK_IF) {
6850 //new_scope(&o); //?? breaks tests2.122
6851 skip('(');
6852 gexpr();
6853 skip(')');
6854 a = gvtst(1, 0);
6855 block(0);
6856 if (tok == TOK_ELSE) {
6857 d = gjmp(0);
6858 gsym(a);
6859 next();
6860 block(0);
6861 gsym(d); /* patch else jmp */
6862 } else {
6863 gsym(a);
6865 //prev_scope(&o,0); //?? breaks tests2.122
6867 } else if (t == TOK_WHILE) {
6868 new_scope(&o);
6869 d = gind();
6870 skip('(');
6871 gexpr();
6872 skip(')');
6873 a = gvtst(1, 0);
6874 b = 0;
6875 lblock(&a, &b);
6876 gjmp_addr(d);
6877 gsym_addr(b, d);
6878 gsym(a);
6879 prev_scope(&o,0);
6880 } else if (t == '{') {
6881 new_scope(&o);
6883 /* handle local labels declarations */
6884 while (tok == TOK_LABEL) {
6885 do {
6886 next();
6887 if (tok < TOK_UIDENT)
6888 expect("label identifier");
6889 label_push(&local_label_stack, tok, LABEL_DECLARED);
6890 next();
6891 } while (tok == ',');
6892 skip(';');
6895 while (tok != '}') {
6896 decl(VT_LOCAL);
6897 if (tok != '}') {
6898 if (is_expr)
6899 vpop();
6900 block(is_expr);
6904 prev_scope(&o, is_expr);
6905 if (local_scope)
6906 next();
6907 else if (!nocode_wanted)
6908 check_func_return();
6910 } else if (t == TOK_RETURN) {
6911 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6912 if (tok != ';') {
6913 gexpr();
6914 if (b) {
6915 gen_assign_cast(&func_vt);
6916 } else {
6917 if (vtop->type.t != VT_VOID)
6918 tcc_warning("void function returns a value");
6919 vtop--;
6921 } else if (b) {
6922 tcc_warning("'return' with no value");
6923 b = 0;
6925 leave_scope(root_scope);
6926 if (b)
6927 gfunc_return(&func_vt);
6928 skip(';');
6929 /* jump unless last stmt in top-level block */
6930 if (tok != '}' || local_scope != 1)
6931 rsym = gjmp(rsym);
6932 if (debug_modes)
6933 tcc_tcov_block_end (tcc_state, -1);
6934 CODE_OFF();
6936 } else if (t == TOK_BREAK) {
6937 /* compute jump */
6938 if (!cur_scope->bsym)
6939 tcc_error("cannot break");
6940 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6941 leave_scope(cur_switch->scope);
6942 else
6943 leave_scope(loop_scope);
6944 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6945 skip(';');
6947 } else if (t == TOK_CONTINUE) {
6948 /* compute jump */
6949 if (!cur_scope->csym)
6950 tcc_error("cannot continue");
6951 leave_scope(loop_scope);
6952 *cur_scope->csym = gjmp(*cur_scope->csym);
6953 skip(';');
6955 } else if (t == TOK_FOR) {
6956 new_scope(&o);
6958 skip('(');
6959 if (tok != ';') {
6960 /* c99 for-loop init decl? */
6961 if (!decl(VT_JMP)) {
6962 /* no, regular for-loop init expr */
6963 gexpr();
6964 vpop();
6967 skip(';');
6968 a = b = 0;
6969 c = d = gind();
6970 if (tok != ';') {
6971 gexpr();
6972 a = gvtst(1, 0);
6974 skip(';');
6975 if (tok != ')') {
6976 e = gjmp(0);
6977 d = gind();
6978 gexpr();
6979 vpop();
6980 gjmp_addr(c);
6981 gsym(e);
6983 skip(')');
6984 lblock(&a, &b);
6985 gjmp_addr(d);
6986 gsym_addr(b, d);
6987 gsym(a);
6988 prev_scope(&o, 0);
6990 } else if (t == TOK_DO) {
6991 new_scope(&o);
6992 a = b = 0;
6993 d = gind();
6994 lblock(&a, &b);
6995 gsym(b);
6996 skip(TOK_WHILE);
6997 skip('(');
6998 gexpr();
6999 skip(')');
7000 skip(';');
7001 prev_scope(&o,0);
7002 c = gvtst(0, 0);
7003 gsym_addr(c, d);
7004 gsym(a);
7006 } else if (t == TOK_SWITCH) {
7007 struct switch_t *sw;
7009 new_scope(&o);
7010 sw = tcc_mallocz(sizeof *sw);
7011 sw->bsym = &a;
7012 sw->scope = cur_scope;
7013 sw->prev = cur_switch;
7014 sw->nocode_wanted = nocode_wanted;
7015 cur_switch = sw;
7017 skip('(');
7018 gexpr();
7019 skip(')');
7020 sw->sv = *vtop--; /* save switch value */
7022 a = 0;
7023 b = gjmp(0); /* jump to first case */
7024 lblock(&a, NULL);
7025 a = gjmp(a); /* add implicit break */
7026 /* case lookup */
7027 gsym(b);
7029 if (sw->nocode_wanted)
7030 goto skip_switch;
7031 if (sw->sv.type.t & VT_UNSIGNED)
7032 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7033 else
7034 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7035 for (b = 1; b < sw->n; b++)
7036 if (sw->sv.type.t & VT_UNSIGNED
7037 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7038 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7039 tcc_error("duplicate case value");
7040 vpushv(&sw->sv);
7041 gv(RC_INT);
7042 d = 0, gcase(sw->p, sw->n, &d);
7043 vpop();
7044 if (sw->def_sym)
7045 gsym_addr(d, sw->def_sym);
7046 else
7047 gsym(d);
7048 skip_switch:
7049 /* break label */
7050 gsym(a);
7052 dynarray_reset(&sw->p, &sw->n);
7053 cur_switch = sw->prev;
7054 tcc_free(sw);
7055 prev_scope(&o,0);
7057 } else if (t == TOK_CASE) {
7058 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7059 if (!cur_switch)
7060 expect("switch");
7061 cr->v1 = cr->v2 = expr_const64();
7062 if (gnu_ext && tok == TOK_DOTS) {
7063 next();
7064 cr->v2 = expr_const64();
7065 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7066 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7067 tcc_warning("empty case range");
7069 /* case and default are unreachable from a switch under nocode_wanted */
7070 if (!cur_switch->nocode_wanted)
7071 cr->sym = gind();
7072 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7073 skip(':');
7074 is_expr = 0;
7075 goto block_after_label;
7077 } else if (t == TOK_DEFAULT) {
7078 if (!cur_switch)
7079 expect("switch");
7080 if (cur_switch->def_sym)
7081 tcc_error("too many 'default'");
7082 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7083 skip(':');
7084 is_expr = 0;
7085 goto block_after_label;
7087 } else if (t == TOK_GOTO) {
7088 if (cur_scope->vla.num)
7089 vla_restore(cur_scope->vla.locorig);
7090 if (tok == '*' && gnu_ext) {
7091 /* computed goto */
7092 next();
7093 gexpr();
7094 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7095 expect("pointer");
7096 ggoto();
7098 } else if (tok >= TOK_UIDENT) {
7099 s = label_find(tok);
7100 /* put forward definition if needed */
7101 if (!s)
7102 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7103 else if (s->r == LABEL_DECLARED)
7104 s->r = LABEL_FORWARD;
7106 if (s->r & LABEL_FORWARD) {
7107 /* start new goto chain for cleanups, linked via label->next */
7108 if (cur_scope->cl.s && !nocode_wanted) {
7109 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7110 pending_gotos->prev_tok = s;
7111 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7112 pending_gotos->next = s;
7114 s->jnext = gjmp(s->jnext);
7115 } else {
7116 try_call_cleanup_goto(s->cleanupstate);
7117 gjmp_addr(s->jnext);
7119 next();
7121 } else {
7122 expect("label identifier");
7124 skip(';');
7126 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7127 asm_instr();
7129 } else {
7130 if (tok == ':' && t >= TOK_UIDENT) {
7131 /* label case */
7132 next();
7133 s = label_find(t);
7134 if (s) {
7135 if (s->r == LABEL_DEFINED)
7136 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7137 s->r = LABEL_DEFINED;
7138 if (s->next) {
7139 Sym *pcl; /* pending cleanup goto */
7140 for (pcl = s->next; pcl; pcl = pcl->prev)
7141 gsym(pcl->jnext);
7142 sym_pop(&s->next, NULL, 0);
7143 } else
7144 gsym(s->jnext);
7145 } else {
7146 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7148 s->jnext = gind();
7149 s->cleanupstate = cur_scope->cl.s;
7151 block_after_label:
7153 /* Accept attributes after labels (e.g. 'unused') */
7154 AttributeDef ad_tmp;
7155 parse_attribute(&ad_tmp);
7157 if (debug_modes)
7158 tcc_tcov_reset_ind(tcc_state);
7159 vla_restore(cur_scope->vla.loc);
7160 if (tok != '}')
7161 goto again;
7162 /* we accept this, but it is a mistake */
7163 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7165 } else {
7166 /* expression case */
7167 if (t != ';') {
7168 unget_tok(t);
7169 expr:
7170 if (is_expr) {
7171 vpop();
7172 gexpr();
7173 } else {
7174 gexpr();
7175 vpop();
7177 skip(';');
7182 if (debug_modes)
7183 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7186 /* This skips over a stream of tokens containing balanced {} and ()
7187 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7188 with a '{'). If STR then allocates and stores the skipped tokens
7189 in *STR. This doesn't check if () and {} are nested correctly,
7190 i.e. "({)}" is accepted. */
7191 static void skip_or_save_block(TokenString **str)
7193 int braces = tok == '{';
7194 int level = 0;
7195 if (str)
7196 *str = tok_str_alloc();
7198 while (1) {
7199 int t = tok;
7200 if (level == 0
7201 && (t == ','
7202 || t == ';'
7203 || t == '}'
7204 || t == ')'
7205 || t == ']'))
7206 break;
7207 if (t == TOK_EOF) {
7208 if (str || level > 0)
7209 tcc_error("unexpected end of file");
7210 else
7211 break;
7213 if (str)
7214 tok_str_add_tok(*str);
7215 next();
7216 if (t == '{' || t == '(' || t == '[') {
7217 level++;
7218 } else if (t == '}' || t == ')' || t == ']') {
7219 level--;
7220 if (level == 0 && braces && t == '}')
7221 break;
7224 if (str) {
7225 tok_str_add(*str, -1);
7226 tok_str_add(*str, 0);
7230 #define EXPR_CONST 1
7231 #define EXPR_ANY 2
7233 static void parse_init_elem(int expr_type)
7235 int saved_global_expr;
7236 switch(expr_type) {
7237 case EXPR_CONST:
7238 /* compound literals must be allocated globally in this case */
7239 saved_global_expr = global_expr;
7240 global_expr = 1;
7241 expr_const1();
7242 global_expr = saved_global_expr;
7243 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7244 (compound literals). */
7245 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7246 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7247 || vtop->sym->v < SYM_FIRST_ANOM))
7248 #ifdef TCC_TARGET_PE
7249 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7250 #endif
7252 tcc_error("initializer element is not constant");
7253 break;
7254 case EXPR_ANY:
7255 expr_eq();
7256 break;
7260 #if 1
7261 static void init_assert(init_params *p, int offset)
7263 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7264 : !nocode_wanted && offset > p->local_offset)
7265 tcc_internal_error("initializer overflow");
7267 #else
7268 #define init_assert(sec, offset)
7269 #endif
7271 /* put zeros for variable based init */
7272 static void init_putz(init_params *p, unsigned long c, int size)
7274 init_assert(p, c + size);
7275 if (p->sec) {
7276 /* nothing to do because globals are already set to zero */
7277 } else {
7278 vpush_helper_func(TOK_memset);
7279 vseti(VT_LOCAL, c);
7280 #ifdef TCC_TARGET_ARM
7281 vpushs(size);
7282 vpushi(0);
7283 #else
7284 vpushi(0);
7285 vpushs(size);
7286 #endif
7287 gfunc_call(3);
7291 #define DIF_FIRST 1
7292 #define DIF_SIZE_ONLY 2
7293 #define DIF_HAVE_ELEM 4
7294 #define DIF_CLEAR 8
7296 /* delete relocations for specified range c ... c + size. Unfortunatly
7297 in very special cases, relocations may occur unordered */
7298 static void decl_design_delrels(Section *sec, int c, int size)
7300 ElfW_Rel *rel, *rel2, *rel_end;
7301 if (!sec || !sec->reloc)
7302 return;
7303 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7304 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7305 while (rel < rel_end) {
7306 if (rel->r_offset >= c && rel->r_offset < c + size) {
7307 sec->reloc->data_offset -= sizeof *rel;
7308 } else {
7309 if (rel2 != rel)
7310 memcpy(rel2, rel, sizeof *rel);
7311 ++rel2;
7313 ++rel;
7317 static void decl_design_flex(init_params *p, Sym *ref, int index)
7319 if (ref == p->flex_array_ref) {
7320 if (index >= ref->c)
7321 ref->c = index + 1;
7322 } else if (ref->c < 0)
7323 tcc_error("flexible array has zero size in this context");
7326 /* t is the array or struct type. c is the array or struct
7327 address. cur_field is the pointer to the current
7328 field, for arrays the 'c' member contains the current start
7329 index. 'flags' is as in decl_initializer.
7330 'al' contains the already initialized length of the
7331 current container (starting at c). This returns the new length of that. */
7332 static int decl_designator(init_params *p, CType *type, unsigned long c,
7333 Sym **cur_field, int flags, int al)
7335 Sym *s, *f;
7336 int index, index_last, align, l, nb_elems, elem_size;
7337 unsigned long corig = c;
7339 elem_size = 0;
7340 nb_elems = 1;
7342 if (flags & DIF_HAVE_ELEM)
7343 goto no_designator;
7345 if (gnu_ext && tok >= TOK_UIDENT) {
7346 l = tok, next();
7347 if (tok == ':')
7348 goto struct_field;
7349 unget_tok(l);
7352 /* NOTE: we only support ranges for last designator */
7353 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7354 if (tok == '[') {
7355 if (!(type->t & VT_ARRAY))
7356 expect("array type");
7357 next();
7358 index = index_last = expr_const();
7359 if (tok == TOK_DOTS && gnu_ext) {
7360 next();
7361 index_last = expr_const();
7363 skip(']');
7364 s = type->ref;
7365 decl_design_flex(p, s, index_last);
7366 if (index < 0 || index_last >= s->c || index_last < index)
7367 tcc_error("index exceeds array bounds or range is empty");
7368 if (cur_field)
7369 (*cur_field)->c = index_last;
7370 type = pointed_type(type);
7371 elem_size = type_size(type, &align);
7372 c += index * elem_size;
7373 nb_elems = index_last - index + 1;
7374 } else {
7375 int cumofs;
7376 next();
7377 l = tok;
7378 struct_field:
7379 next();
7380 if ((type->t & VT_BTYPE) != VT_STRUCT)
7381 expect("struct/union type");
7382 cumofs = 0;
7383 f = find_field(type, l, &cumofs);
7384 if (cur_field)
7385 *cur_field = f;
7386 type = &f->type;
7387 c += cumofs;
7389 cur_field = NULL;
7391 if (!cur_field) {
7392 if (tok == '=') {
7393 next();
7394 } else if (!gnu_ext) {
7395 expect("=");
7397 } else {
7398 no_designator:
7399 if (type->t & VT_ARRAY) {
7400 index = (*cur_field)->c;
7401 s = type->ref;
7402 decl_design_flex(p, s, index);
7403 if (index >= s->c)
7404 tcc_error("too many initializers");
7405 type = pointed_type(type);
7406 elem_size = type_size(type, &align);
7407 c += index * elem_size;
7408 } else {
7409 f = *cur_field;
7410 /* Skip bitfield padding. Also with size 32 and 64. */
7411 while (f && (f->v & SYM_FIRST_ANOM) &&
7412 is_integer_btype(f->type.t & VT_BTYPE))
7413 *cur_field = f = f->next;
7414 if (!f)
7415 tcc_error("too many initializers");
7416 type = &f->type;
7417 c += f->c;
7421 if (!elem_size) /* for structs */
7422 elem_size = type_size(type, &align);
7424 /* Using designators the same element can be initialized more
7425 than once. In that case we need to delete possibly already
7426 existing relocations. */
7427 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7428 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7429 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7432 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7434 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7435 Sym aref = {0};
7436 CType t1;
7437 int i;
7438 if (p->sec || (type->t & VT_ARRAY)) {
7439 /* make init_putv/vstore believe it were a struct */
7440 aref.c = elem_size;
7441 t1.t = VT_STRUCT, t1.ref = &aref;
7442 type = &t1;
7444 if (p->sec)
7445 vpush_ref(type, p->sec, c, elem_size);
7446 else
7447 vset(type, VT_LOCAL|VT_LVAL, c);
7448 for (i = 1; i < nb_elems; i++) {
7449 vdup();
7450 init_putv(p, type, c + elem_size * i);
7452 vpop();
7455 c += nb_elems * elem_size;
7456 if (c - corig > al)
7457 al = c - corig;
7458 return al;
7461 /* store a value or an expression directly in global data or in local array */
7462 static void init_putv(init_params *p, CType *type, unsigned long c)
7464 int bt;
7465 void *ptr;
7466 CType dtype;
7467 int size, align;
7468 Section *sec = p->sec;
7469 uint64_t val;
7471 dtype = *type;
7472 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7474 size = type_size(type, &align);
7475 if (type->t & VT_BITFIELD)
7476 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7477 init_assert(p, c + size);
7479 if (sec) {
7480 /* XXX: not portable */
7481 /* XXX: generate error if incorrect relocation */
7482 gen_assign_cast(&dtype);
7483 bt = type->t & VT_BTYPE;
7485 if ((vtop->r & VT_SYM)
7486 && bt != VT_PTR
7487 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7488 || (type->t & VT_BITFIELD))
7489 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7491 tcc_error("initializer element is not computable at load time");
7493 if (NODATA_WANTED) {
7494 vtop--;
7495 return;
7498 ptr = sec->data + c;
7499 val = vtop->c.i;
7501 /* XXX: make code faster ? */
7502 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7503 vtop->sym->v >= SYM_FIRST_ANOM &&
7504 /* XXX This rejects compound literals like
7505 '(void *){ptr}'. The problem is that '&sym' is
7506 represented the same way, which would be ruled out
7507 by the SYM_FIRST_ANOM check above, but also '"string"'
7508 in 'char *p = "string"' is represented the same
7509 with the type being VT_PTR and the symbol being an
7510 anonymous one. That is, there's no difference in vtop
7511 between '(void *){x}' and '&(void *){x}'. Ignore
7512 pointer typed entities here. Hopefully no real code
7513 will ever use compound literals with scalar type. */
7514 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7515 /* These come from compound literals, memcpy stuff over. */
7516 Section *ssec;
7517 ElfSym *esym;
7518 ElfW_Rel *rel;
7519 esym = elfsym(vtop->sym);
7520 ssec = tcc_state->sections[esym->st_shndx];
7521 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7522 if (ssec->reloc) {
7523 /* We need to copy over all memory contents, and that
7524 includes relocations. Use the fact that relocs are
7525 created it order, so look from the end of relocs
7526 until we hit one before the copied region. */
7527 unsigned long relofs = ssec->reloc->data_offset;
7528 while (relofs >= sizeof(*rel)) {
7529 relofs -= sizeof(*rel);
7530 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7531 if (rel->r_offset >= esym->st_value + size)
7532 continue;
7533 if (rel->r_offset < esym->st_value)
7534 break;
7535 put_elf_reloca(symtab_section, sec,
7536 c + rel->r_offset - esym->st_value,
7537 ELFW(R_TYPE)(rel->r_info),
7538 ELFW(R_SYM)(rel->r_info),
7539 #if PTR_SIZE == 8
7540 rel->r_addend
7541 #else
7543 #endif
7547 } else {
7548 if (type->t & VT_BITFIELD) {
7549 int bit_pos, bit_size, bits, n;
7550 unsigned char *p, v, m;
7551 bit_pos = BIT_POS(vtop->type.t);
7552 bit_size = BIT_SIZE(vtop->type.t);
7553 p = (unsigned char*)ptr + (bit_pos >> 3);
7554 bit_pos &= 7, bits = 0;
7555 while (bit_size) {
7556 n = 8 - bit_pos;
7557 if (n > bit_size)
7558 n = bit_size;
7559 v = val >> bits << bit_pos;
7560 m = ((1 << n) - 1) << bit_pos;
7561 *p = (*p & ~m) | (v & m);
7562 bits += n, bit_size -= n, bit_pos = 0, ++p;
7564 } else
7565 switch(bt) {
7566 case VT_BOOL:
7567 *(char *)ptr = val != 0;
7568 break;
7569 case VT_BYTE:
7570 *(char *)ptr = val;
7571 break;
7572 case VT_SHORT:
7573 write16le(ptr, val);
7574 break;
7575 case VT_FLOAT:
7576 write32le(ptr, val);
7577 break;
7578 case VT_DOUBLE:
7579 write64le(ptr, val);
7580 break;
7581 case VT_LDOUBLE:
7582 #if defined TCC_IS_NATIVE_387
7583 /* Host and target platform may be different but both have x87.
7584 On windows, tcc does not use VT_LDOUBLE, except when it is a
7585 cross compiler. In this case a mingw gcc as host compiler
7586 comes here with 10-byte long doubles, while msvc or tcc won't.
7587 tcc itself can still translate by asm.
7588 In any case we avoid possibly random bytes 11 and 12.
7590 if (sizeof (long double) >= 10)
7591 memcpy(ptr, &vtop->c.ld, 10);
7592 #ifdef __TINYC__
7593 else if (sizeof (long double) == sizeof (double))
7594 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7595 #endif
7596 else if (vtop->c.ld == 0.0)
7598 else
7599 #endif
7600 /* For other platforms it should work natively, but may not work
7601 for cross compilers */
7602 if (sizeof(long double) == LDOUBLE_SIZE)
7603 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7604 else if (sizeof(double) == LDOUBLE_SIZE)
7605 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7606 #ifndef TCC_CROSS_TEST
7607 else
7608 tcc_error("can't cross compile long double constants");
7609 #endif
7610 break;
7612 #if PTR_SIZE == 8
7613 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7614 case VT_LLONG:
7615 case VT_PTR:
7616 if (vtop->r & VT_SYM)
7617 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7618 else
7619 write64le(ptr, val);
7620 break;
7621 case VT_INT:
7622 write32le(ptr, val);
7623 break;
7624 #else
7625 case VT_LLONG:
7626 write64le(ptr, val);
7627 break;
7628 case VT_PTR:
7629 case VT_INT:
7630 if (vtop->r & VT_SYM)
7631 greloc(sec, vtop->sym, c, R_DATA_PTR);
7632 write32le(ptr, val);
7633 break;
7634 #endif
7635 default:
7636 //tcc_internal_error("unexpected type");
7637 break;
7640 vtop--;
7641 } else {
7642 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7643 vswap();
7644 vstore();
7645 vpop();
7649 /* 't' contains the type and storage info. 'c' is the offset of the
7650 object in section 'sec'. If 'sec' is NULL, it means stack based
7651 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7652 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7653 size only evaluation is wanted (only for arrays). */
7654 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7656 int len, n, no_oblock, i;
7657 int size1, align1;
7658 Sym *s, *f;
7659 Sym indexsym;
7660 CType *t1;
7662 /* generate line number info */
7663 if (debug_modes && !p->sec)
7664 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7666 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7667 /* In case of strings we have special handling for arrays, so
7668 don't consume them as initializer value (which would commit them
7669 to some anonymous symbol). */
7670 tok != TOK_LSTR && tok != TOK_STR &&
7671 (!(flags & DIF_SIZE_ONLY)
7672 /* a struct may be initialized from a struct of same type, as in
7673 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7674 In that case we need to parse the element in order to check
7675 it for compatibility below */
7676 || (type->t & VT_BTYPE) == VT_STRUCT)
7678 int ncw_prev = nocode_wanted;
7679 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7680 ++nocode_wanted;
7681 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7682 nocode_wanted = ncw_prev;
7683 flags |= DIF_HAVE_ELEM;
7686 if (type->t & VT_ARRAY) {
7687 no_oblock = 1;
7688 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7689 tok == '{') {
7690 skip('{');
7691 no_oblock = 0;
7694 s = type->ref;
7695 n = s->c;
7696 t1 = pointed_type(type);
7697 size1 = type_size(t1, &align1);
7699 /* only parse strings here if correct type (otherwise: handle
7700 them as ((w)char *) expressions */
7701 if ((tok == TOK_LSTR &&
7702 #ifdef TCC_TARGET_PE
7703 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7704 #else
7705 (t1->t & VT_BTYPE) == VT_INT
7706 #endif
7707 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7708 len = 0;
7709 cstr_reset(&initstr);
7710 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7711 tcc_error("unhandled string literal merging");
7712 while (tok == TOK_STR || tok == TOK_LSTR) {
7713 if (initstr.size)
7714 initstr.size -= size1;
7715 if (tok == TOK_STR)
7716 len += tokc.str.size;
7717 else
7718 len += tokc.str.size / sizeof(nwchar_t);
7719 len--;
7720 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7721 next();
7723 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7724 && tok != TOK_EOF) {
7725 /* Not a lone literal but part of a bigger expression. */
7726 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7727 tokc.str.size = initstr.size;
7728 tokc.str.data = initstr.data;
7729 goto do_init_array;
7732 decl_design_flex(p, s, len);
7733 if (!(flags & DIF_SIZE_ONLY)) {
7734 int nb = n, ch;
7735 if (len < nb)
7736 nb = len;
7737 if (len > nb)
7738 tcc_warning("initializer-string for array is too long");
7739 /* in order to go faster for common case (char
7740 string in global variable, we handle it
7741 specifically */
7742 if (p->sec && size1 == 1) {
7743 init_assert(p, c + nb);
7744 if (!NODATA_WANTED)
7745 memcpy(p->sec->data + c, initstr.data, nb);
7746 } else {
7747 for(i=0;i<n;i++) {
7748 if (i >= nb) {
7749 /* only add trailing zero if enough storage (no
7750 warning in this case since it is standard) */
7751 if (flags & DIF_CLEAR)
7752 break;
7753 if (n - i >= 4) {
7754 init_putz(p, c + i * size1, (n - i) * size1);
7755 break;
7757 ch = 0;
7758 } else if (size1 == 1)
7759 ch = ((unsigned char *)initstr.data)[i];
7760 else
7761 ch = ((nwchar_t *)initstr.data)[i];
7762 vpushi(ch);
7763 init_putv(p, t1, c + i * size1);
7767 } else {
7769 do_init_array:
7770 indexsym.c = 0;
7771 f = &indexsym;
7773 do_init_list:
7774 /* zero memory once in advance */
7775 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7776 init_putz(p, c, n*size1);
7777 flags |= DIF_CLEAR;
7780 len = 0;
7781 /* GNU extension: if the initializer is empty for a flex array,
7782 it's size is zero. We won't enter the loop, so set the size
7783 now. */
7784 decl_design_flex(p, s, len);
7785 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7786 len = decl_designator(p, type, c, &f, flags, len);
7787 flags &= ~DIF_HAVE_ELEM;
7788 if (type->t & VT_ARRAY) {
7789 ++indexsym.c;
7790 /* special test for multi dimensional arrays (may not
7791 be strictly correct if designators are used at the
7792 same time) */
7793 if (no_oblock && len >= n*size1)
7794 break;
7795 } else {
7796 if (s->type.t == VT_UNION)
7797 f = NULL;
7798 else
7799 f = f->next;
7800 if (no_oblock && f == NULL)
7801 break;
7804 if (tok == '}')
7805 break;
7806 skip(',');
7809 if (!no_oblock)
7810 skip('}');
7812 } else if ((flags & DIF_HAVE_ELEM)
7813 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7814 The source type might have VT_CONSTANT set, which is
7815 of course assignable to non-const elements. */
7816 && is_compatible_unqualified_types(type, &vtop->type)) {
7817 goto one_elem;
7819 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7820 no_oblock = 1;
7821 if ((flags & DIF_FIRST) || tok == '{') {
7822 skip('{');
7823 no_oblock = 0;
7825 s = type->ref;
7826 f = s->next;
7827 n = s->c;
7828 size1 = 1;
7829 goto do_init_list;
7831 } else if (tok == '{') {
7832 if (flags & DIF_HAVE_ELEM)
7833 skip(';');
7834 next();
7835 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7836 skip('}');
7838 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7839 /* If we supported only ISO C we wouldn't have to accept calling
7840 this on anything than an array if DIF_SIZE_ONLY (and even then
7841 only on the outermost level, so no recursion would be needed),
7842 because initializing a flex array member isn't supported.
7843 But GNU C supports it, so we need to recurse even into
7844 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7845 /* just skip expression */
7846 if (flags & DIF_HAVE_ELEM)
7847 vpop();
7848 else
7849 skip_or_save_block(NULL);
7851 } else {
7852 if (!(flags & DIF_HAVE_ELEM)) {
7853 /* This should happen only when we haven't parsed
7854 the init element above for fear of committing a
7855 string constant to memory too early. */
7856 if (tok != TOK_STR && tok != TOK_LSTR)
7857 expect("string constant");
7858 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7860 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7861 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7862 && vtop->c.i == 0
7863 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7865 vpop();
7866 else
7867 init_putv(p, type, c);
7871 /* parse an initializer for type 't' if 'has_init' is non zero, and
7872 allocate space in local or global data space ('r' is either
7873 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7874 variable 'v' of scope 'scope' is declared before initializers
7875 are parsed. If 'v' is zero, then a reference to the new object
7876 is put in the value stack. If 'has_init' is 2, a special parsing
7877 is done to handle string constants. */
7878 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7879 int has_init, int v, int global)
7881 int size, align, addr;
7882 TokenString *init_str = NULL;
7884 Section *sec;
7885 Sym *flexible_array;
7886 Sym *sym;
7887 int saved_nocode_wanted = nocode_wanted;
7888 #ifdef CONFIG_TCC_BCHECK
7889 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7890 #endif
7891 init_params p = {0};
7893 /* Always allocate static or global variables */
7894 if (v && (r & VT_VALMASK) == VT_CONST)
7895 nocode_wanted |= DATA_ONLY_WANTED;
7897 flexible_array = NULL;
7898 size = type_size(type, &align);
7900 /* exactly one flexible array may be initialized, either the
7901 toplevel array or the last member of the toplevel struct */
7903 if (size < 0) {
7904 /* If the base type itself was an array type of unspecified size
7905 (like in 'typedef int arr[]; arr x = {1};') then we will
7906 overwrite the unknown size by the real one for this decl.
7907 We need to unshare the ref symbol holding that size. */
7908 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7909 p.flex_array_ref = type->ref;
7911 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7912 Sym *field = type->ref->next;
7913 if (field) {
7914 while (field->next)
7915 field = field->next;
7916 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7917 flexible_array = field;
7918 p.flex_array_ref = field->type.ref;
7919 size = -1;
7924 if (size < 0) {
7925 /* If unknown size, do a dry-run 1st pass */
7926 if (!has_init)
7927 tcc_error("unknown type size");
7928 if (has_init == 2) {
7929 /* only get strings */
7930 init_str = tok_str_alloc();
7931 while (tok == TOK_STR || tok == TOK_LSTR) {
7932 tok_str_add_tok(init_str);
7933 next();
7935 tok_str_add(init_str, -1);
7936 tok_str_add(init_str, 0);
7937 } else
7938 skip_or_save_block(&init_str);
7939 unget_tok(0);
7941 /* compute size */
7942 begin_macro(init_str, 1);
7943 next();
7944 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7945 /* prepare second initializer parsing */
7946 macro_ptr = init_str->str;
7947 next();
7949 /* if still unknown size, error */
7950 size = type_size(type, &align);
7951 if (size < 0)
7952 tcc_error("unknown type size");
7954 /* If there's a flex member and it was used in the initializer
7955 adjust size. */
7956 if (flexible_array && flexible_array->type.ref->c > 0)
7957 size += flexible_array->type.ref->c
7958 * pointed_size(&flexible_array->type);
7961 /* take into account specified alignment if bigger */
7962 if (ad->a.aligned) {
7963 int speca = 1 << (ad->a.aligned - 1);
7964 if (speca > align)
7965 align = speca;
7966 } else if (ad->a.packed) {
7967 align = 1;
7970 if (!v && NODATA_WANTED)
7971 size = 0, align = 1;
7973 if ((r & VT_VALMASK) == VT_LOCAL) {
7974 sec = NULL;
7975 #ifdef CONFIG_TCC_BCHECK
7976 if (bcheck && v) {
7977 /* add padding between stack variables for bound checking */
7978 loc -= align;
7980 #endif
7981 loc = (loc - size) & -align;
7982 addr = loc;
7983 p.local_offset = addr + size;
7984 #ifdef CONFIG_TCC_BCHECK
7985 if (bcheck && v) {
7986 /* add padding between stack variables for bound checking */
7987 loc -= align;
7989 #endif
7990 if (v) {
7991 /* local variable */
7992 #ifdef CONFIG_TCC_ASM
7993 if (ad->asm_label) {
7994 int reg = asm_parse_regvar(ad->asm_label);
7995 if (reg >= 0)
7996 r = (r & ~VT_VALMASK) | reg;
7998 #endif
7999 sym = sym_push(v, type, r, addr);
8000 if (ad->cleanup_func) {
8001 Sym *cls = sym_push2(&all_cleanups,
8002 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8003 cls->prev_tok = sym;
8004 cls->next = ad->cleanup_func;
8005 cls->ncl = cur_scope->cl.s;
8006 cur_scope->cl.s = cls;
8009 sym->a = ad->a;
8010 } else {
8011 /* push local reference */
8012 vset(type, r, addr);
8014 } else {
8015 sym = NULL;
8016 if (v && global) {
8017 /* see if the symbol was already defined */
8018 sym = sym_find(v);
8019 if (sym) {
8020 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8021 && sym->type.ref->c > type->ref->c) {
8022 /* flex array was already declared with explicit size
8023 extern int arr[10];
8024 int arr[] = { 1,2,3 }; */
8025 type->ref->c = sym->type.ref->c;
8026 size = type_size(type, &align);
8028 patch_storage(sym, ad, type);
8029 /* we accept several definitions of the same global variable. */
8030 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8031 goto no_alloc;
8035 /* allocate symbol in corresponding section */
8036 sec = ad->section;
8037 if (!sec) {
8038 CType *tp = type;
8039 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8040 tp = &tp->ref->type;
8041 if (tp->t & VT_CONSTANT) {
8042 sec = rodata_section;
8043 } else if (has_init) {
8044 sec = data_section;
8045 /*if (tcc_state->g_debug & 4)
8046 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8047 } else if (tcc_state->nocommon)
8048 sec = bss_section;
8051 if (sec) {
8052 addr = section_add(sec, size, align);
8053 #ifdef CONFIG_TCC_BCHECK
8054 /* add padding if bound check */
8055 if (bcheck)
8056 section_add(sec, 1, 1);
8057 #endif
8058 } else {
8059 addr = align; /* SHN_COMMON is special, symbol value is align */
8060 sec = common_section;
8063 if (v) {
8064 if (!sym) {
8065 sym = sym_push(v, type, r | VT_SYM, 0);
8066 patch_storage(sym, ad, NULL);
8068 /* update symbol definition */
8069 put_extern_sym(sym, sec, addr, size);
8070 } else {
8071 /* push global reference */
8072 vpush_ref(type, sec, addr, size);
8073 sym = vtop->sym;
8074 vtop->r |= r;
8077 #ifdef CONFIG_TCC_BCHECK
8078 /* handles bounds now because the symbol must be defined
8079 before for the relocation */
8080 if (bcheck) {
8081 addr_t *bounds_ptr;
8083 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8084 /* then add global bound info */
8085 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8086 bounds_ptr[0] = 0; /* relocated */
8087 bounds_ptr[1] = size;
8089 #endif
8092 if (type->t & VT_VLA) {
8093 int a;
8095 if (NODATA_WANTED)
8096 goto no_alloc;
8098 /* save before-VLA stack pointer if needed */
8099 if (cur_scope->vla.num == 0) {
8100 if (cur_scope->prev && cur_scope->prev->vla.num) {
8101 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8102 } else {
8103 gen_vla_sp_save(loc -= PTR_SIZE);
8104 cur_scope->vla.locorig = loc;
8108 vpush_type_size(type, &a);
8109 gen_vla_alloc(type, a);
8110 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8111 /* on _WIN64, because of the function args scratch area, the
8112 result of alloca differs from RSP and is returned in RAX. */
8113 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8114 #endif
8115 gen_vla_sp_save(addr);
8116 cur_scope->vla.loc = addr;
8117 cur_scope->vla.num++;
8118 } else if (has_init) {
8119 p.sec = sec;
8120 decl_initializer(&p, type, addr, DIF_FIRST);
8121 /* patch flexible array member size back to -1, */
8122 /* for possible subsequent similar declarations */
8123 if (flexible_array)
8124 flexible_array->type.ref->c = -1;
8127 no_alloc:
8128 /* restore parse state if needed */
8129 if (init_str) {
8130 end_macro();
8131 next();
8134 nocode_wanted = saved_nocode_wanted;
8137 /* generate vla code saved in post_type() */
8138 static void func_vla_arg_code(Sym *arg)
8140 int align;
8141 TokenString *vla_array_tok = NULL;
8143 if (arg->type.ref)
8144 func_vla_arg_code(arg->type.ref);
8146 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8147 loc -= type_size(&int_type, &align);
8148 loc &= -align;
8149 arg->type.ref->c = loc;
8151 unget_tok(0);
8152 vla_array_tok = tok_str_alloc();
8153 vla_array_tok->str = arg->type.ref->vla_array_str;
8154 begin_macro(vla_array_tok, 1);
8155 next();
8156 gexpr();
8157 end_macro();
8158 next();
8159 vpush_type_size(&arg->type.ref->type, &align);
8160 gen_op('*');
8161 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8162 vswap();
8163 vstore();
8164 vpop();
8168 static void func_vla_arg(Sym *sym)
8170 Sym *arg;
8172 for (arg = sym->type.ref->next; arg; arg = arg->next)
8173 if (arg->type.t & VT_VLA)
8174 func_vla_arg_code(arg);
8177 /* parse a function defined by symbol 'sym' and generate its code in
8178 'cur_text_section' */
8179 static void gen_function(Sym *sym)
8181 struct scope f = { 0 };
8182 cur_scope = root_scope = &f;
8183 nocode_wanted = 0;
8184 ind = cur_text_section->data_offset;
8185 if (sym->a.aligned) {
8186 size_t newoff = section_add(cur_text_section, 0,
8187 1 << (sym->a.aligned - 1));
8188 gen_fill_nops(newoff - ind);
8190 /* NOTE: we patch the symbol size later */
8191 put_extern_sym(sym, cur_text_section, ind, 0);
8192 if (sym->type.ref->f.func_ctor)
8193 add_array (tcc_state, ".init_array", sym->c);
8194 if (sym->type.ref->f.func_dtor)
8195 add_array (tcc_state, ".fini_array", sym->c);
8197 funcname = get_tok_str(sym->v, NULL);
8198 func_ind = ind;
8199 func_vt = sym->type.ref->type;
8200 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8202 /* put debug symbol */
8203 tcc_debug_funcstart(tcc_state, sym);
8204 /* push a dummy symbol to enable local sym storage */
8205 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8206 local_scope = 1; /* for function parameters */
8207 gfunc_prolog(sym);
8208 tcc_debug_prolog_epilog(tcc_state, 0);
8209 local_scope = 0;
8210 rsym = 0;
8211 clear_temp_local_var_list();
8212 func_vla_arg(sym);
8213 block(0);
8214 gsym(rsym);
8215 nocode_wanted = 0;
8216 /* reset local stack */
8217 pop_local_syms(NULL, 0);
8218 tcc_debug_prolog_epilog(tcc_state, 1);
8219 gfunc_epilog();
8220 cur_text_section->data_offset = ind;
8221 local_scope = 0;
8222 label_pop(&global_label_stack, NULL, 0);
8223 sym_pop(&all_cleanups, NULL, 0);
8224 /* patch symbol size */
8225 elfsym(sym)->st_size = ind - func_ind;
8226 /* end of function */
8227 tcc_debug_funcend(tcc_state, ind - func_ind);
8228 /* It's better to crash than to generate wrong code */
8229 cur_text_section = NULL;
8230 funcname = ""; /* for safety */
8231 func_vt.t = VT_VOID; /* for safety */
8232 func_var = 0; /* for safety */
8233 ind = 0; /* for safety */
8234 func_ind = -1;
8235 nocode_wanted = DATA_ONLY_WANTED;
8236 check_vstack();
8237 /* do this after funcend debug info */
8238 next();
8241 static void gen_inline_functions(TCCState *s)
8243 Sym *sym;
8244 int inline_generated, i;
8245 struct InlineFunc *fn;
8247 tcc_open_bf(s, ":inline:", 0);
8248 /* iterate while inline function are referenced */
8249 do {
8250 inline_generated = 0;
8251 for (i = 0; i < s->nb_inline_fns; ++i) {
8252 fn = s->inline_fns[i];
8253 sym = fn->sym;
8254 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8255 /* the function was used or forced (and then not internal):
8256 generate its code and convert it to a normal function */
8257 fn->sym = NULL;
8258 tcc_debug_putfile(s, fn->filename);
8259 begin_macro(fn->func_str, 1);
8260 next();
8261 cur_text_section = text_section;
8262 gen_function(sym);
8263 end_macro();
8265 inline_generated = 1;
8268 } while (inline_generated);
8269 tcc_close();
8272 static void free_inline_functions(TCCState *s)
8274 int i;
8275 /* free tokens of unused inline functions */
8276 for (i = 0; i < s->nb_inline_fns; ++i) {
8277 struct InlineFunc *fn = s->inline_fns[i];
8278 if (fn->sym)
8279 tok_str_free(fn->func_str);
8281 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8284 static void do_Static_assert(void){
8285 CString error_str;
8286 int c;
8288 next();
8289 skip('(');
8290 c = expr_const();
8292 if (tok == ')') {
8293 if (!c)
8294 tcc_error("_Static_assert fail");
8295 next();
8296 goto static_assert_out;
8299 skip(',');
8300 parse_mult_str(&error_str, "string constant");
8301 if (c == 0)
8302 tcc_error("%s", (char *)error_str.data);
8303 cstr_free(&error_str);
8304 skip(')');
8305 static_assert_out:
8306 skip(';');
8309 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8310 or VT_CMP if parsing old style parameter list
8311 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8312 static int decl(int l)
8314 int v, has_init, r, oldint;
8315 CType type, btype;
8316 Sym *sym;
8317 AttributeDef ad, adbase;
8319 while (1) {
8320 if (tok == TOK_STATIC_ASSERT) {
8321 do_Static_assert();
8322 continue;
8325 oldint = 0;
8326 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8327 if (l == VT_JMP)
8328 return 0;
8329 /* skip redundant ';' if not in old parameter decl scope */
8330 if (tok == ';' && l != VT_CMP) {
8331 next();
8332 continue;
8334 if (l != VT_CONST)
8335 break;
8336 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8337 /* global asm block */
8338 asm_global_instr();
8339 continue;
8341 if (tok >= TOK_UIDENT) {
8342 /* special test for old K&R protos without explicit int
8343 type. Only accepted when defining global data */
8344 btype.t = VT_INT;
8345 oldint = 1;
8346 } else {
8347 if (tok != TOK_EOF)
8348 expect("declaration");
8349 break;
8353 if (tok == ';') {
8354 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8355 v = btype.ref->v;
8356 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8357 tcc_warning("unnamed struct/union that defines no instances");
8358 next();
8359 continue;
8361 if (IS_ENUM(btype.t)) {
8362 next();
8363 continue;
8367 while (1) { /* iterate thru each declaration */
8368 type = btype;
8369 ad = adbase;
8370 type_decl(&type, &ad, &v, TYPE_DIRECT);
8371 #if 0
8373 char buf[500];
8374 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8375 printf("type = '%s'\n", buf);
8377 #endif
8378 if ((type.t & VT_BTYPE) == VT_FUNC) {
8379 if ((type.t & VT_STATIC) && (l != VT_CONST))
8380 tcc_error("function without file scope cannot be static");
8381 /* if old style function prototype, we accept a
8382 declaration list */
8383 sym = type.ref;
8384 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8385 func_vt = type;
8386 decl(VT_CMP);
8388 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8389 if (sym->f.func_alwinl
8390 && ((type.t & (VT_EXTERN | VT_INLINE))
8391 == (VT_EXTERN | VT_INLINE))) {
8392 /* always_inline functions must be handled as if they
8393 don't generate multiple global defs, even if extern
8394 inline, i.e. GNU inline semantics for those. Rewrite
8395 them into static inline. */
8396 type.t &= ~VT_EXTERN;
8397 type.t |= VT_STATIC;
8399 #endif
8400 /* always compile 'extern inline' */
8401 if (type.t & VT_EXTERN)
8402 type.t &= ~VT_INLINE;
8404 } else if (oldint) {
8405 tcc_warning("type defaults to int");
8408 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8409 ad.asm_label = asm_label_instr();
8410 /* parse one last attribute list, after asm label */
8411 parse_attribute(&ad);
8412 #if 0
8413 /* gcc does not allow __asm__("label") with function definition,
8414 but why not ... */
8415 if (tok == '{')
8416 expect(";");
8417 #endif
8420 #ifdef TCC_TARGET_PE
8421 if (ad.a.dllimport || ad.a.dllexport) {
8422 if (type.t & VT_STATIC)
8423 tcc_error("cannot have dll linkage with static");
8424 if (type.t & VT_TYPEDEF) {
8425 tcc_warning("'%s' attribute ignored for typedef",
8426 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8427 (ad.a.dllexport = 0, "dllexport"));
8428 } else if (ad.a.dllimport) {
8429 if ((type.t & VT_BTYPE) == VT_FUNC)
8430 ad.a.dllimport = 0;
8431 else
8432 type.t |= VT_EXTERN;
8435 #endif
8436 if (tok == '{') {
8437 if (l != VT_CONST)
8438 tcc_error("cannot use local functions");
8439 if ((type.t & VT_BTYPE) != VT_FUNC)
8440 expect("function definition");
8442 /* reject abstract declarators in function definition
8443 make old style params without decl have int type */
8444 sym = type.ref;
8445 while ((sym = sym->next) != NULL) {
8446 if (!(sym->v & ~SYM_FIELD))
8447 expect("identifier");
8448 if (sym->type.t == VT_VOID)
8449 sym->type = int_type;
8452 /* apply post-declaraton attributes */
8453 merge_funcattr(&type.ref->f, &ad.f);
8455 /* put function symbol */
8456 type.t &= ~VT_EXTERN;
8457 sym = external_sym(v, &type, 0, &ad);
8459 /* static inline functions are just recorded as a kind
8460 of macro. Their code will be emitted at the end of
8461 the compilation unit only if they are used */
8462 if (sym->type.t & VT_INLINE) {
8463 struct InlineFunc *fn;
8464 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8465 strcpy(fn->filename, file->filename);
8466 fn->sym = sym;
8467 skip_or_save_block(&fn->func_str);
8468 dynarray_add(&tcc_state->inline_fns,
8469 &tcc_state->nb_inline_fns, fn);
8470 } else {
8471 /* compute text section */
8472 cur_text_section = ad.section;
8473 if (!cur_text_section)
8474 cur_text_section = text_section;
8475 gen_function(sym);
8477 break;
8478 } else {
8479 if (l == VT_CMP) {
8480 /* find parameter in function parameter list */
8481 for (sym = func_vt.ref->next; sym; sym = sym->next)
8482 if ((sym->v & ~SYM_FIELD) == v)
8483 goto found;
8484 tcc_error("declaration for parameter '%s' but no such parameter",
8485 get_tok_str(v, NULL));
8486 found:
8487 if (type.t & VT_STORAGE) /* 'register' is okay */
8488 tcc_error("storage class specified for '%s'",
8489 get_tok_str(v, NULL));
8490 if (sym->type.t != VT_VOID)
8491 tcc_error("redefinition of parameter '%s'",
8492 get_tok_str(v, NULL));
8493 convert_parameter_type(&type);
8494 sym->type = type;
8495 } else if (type.t & VT_TYPEDEF) {
8496 /* save typedefed type */
8497 /* XXX: test storage specifiers ? */
8498 sym = sym_find(v);
8499 if (sym && sym->sym_scope == local_scope) {
8500 if (!is_compatible_types(&sym->type, &type)
8501 || !(sym->type.t & VT_TYPEDEF))
8502 tcc_error("incompatible redefinition of '%s'",
8503 get_tok_str(v, NULL));
8504 sym->type = type;
8505 } else {
8506 sym = sym_push(v, &type, 0, 0);
8508 sym->a = ad.a;
8509 if ((type.t & VT_BTYPE) == VT_FUNC)
8510 merge_funcattr(&sym->type.ref->f, &ad.f);
8511 if (debug_modes)
8512 tcc_debug_typedef (tcc_state, sym);
8513 } else if ((type.t & VT_BTYPE) == VT_VOID
8514 && !(type.t & VT_EXTERN)) {
8515 tcc_error("declaration of void object");
8516 } else {
8517 r = 0;
8518 if ((type.t & VT_BTYPE) == VT_FUNC) {
8519 /* external function definition */
8520 /* specific case for func_call attribute */
8521 merge_funcattr(&type.ref->f, &ad.f);
8522 } else if (!(type.t & VT_ARRAY)) {
8523 /* not lvalue if array */
8524 r |= VT_LVAL;
8526 has_init = (tok == '=');
8527 if (has_init && (type.t & VT_VLA))
8528 tcc_error("variable length array cannot be initialized");
8529 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8530 || (type.t & VT_BTYPE) == VT_FUNC
8531 /* as with GCC, uninitialized global arrays with no size
8532 are considered extern: */
8533 || ((type.t & VT_ARRAY) && !has_init
8534 && l == VT_CONST && type.ref->c < 0)
8536 /* external variable or function */
8537 type.t |= VT_EXTERN;
8538 sym = external_sym(v, &type, r, &ad);
8539 if (ad.alias_target) {
8540 /* Aliases need to be emitted when their target
8541 symbol is emitted, even if perhaps unreferenced.
8542 We only support the case where the base is
8543 already defined, otherwise we would need
8544 deferring to emit the aliases until the end of
8545 the compile unit. */
8546 Sym *alias_target = sym_find(ad.alias_target);
8547 ElfSym *esym = elfsym(alias_target);
8548 if (!esym)
8549 tcc_error("unsupported forward __alias__ attribute");
8550 put_extern_sym2(sym, esym->st_shndx,
8551 esym->st_value, esym->st_size, 1);
8553 } else {
8554 if (l == VT_CONST || (type.t & VT_STATIC))
8555 r |= VT_CONST;
8556 else
8557 r |= VT_LOCAL;
8558 if (has_init)
8559 next();
8560 else if (l == VT_CONST)
8561 /* uninitialized global variables may be overridden */
8562 type.t |= VT_EXTERN;
8563 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8566 if (tok != ',') {
8567 if (l == VT_JMP)
8568 return 1;
8569 skip(';');
8570 break;
8572 next();
8576 return 0;
8579 /* ------------------------------------------------------------------------- */
8580 #undef gjmp_addr
8581 #undef gjmp
8582 /* ------------------------------------------------------------------------- */