Fix null pointer constants
[tinycc.git] / tccgen.c
blob4b2c491222043f529e6d999f0c6a6c202b9299b2
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 ST_DATA char debug_modes;
49 ST_DATA SValue *vtop;
50 static SValue _vstack[1 + VSTACK_SIZE];
51 #define vstack (_vstack + 1)
53 ST_DATA int const_wanted; /* true if constant wanted */
54 ST_DATA int nocode_wanted; /* no code generation wanted */
55 #define unevalmask 0xffff /* unevaluated subexpression */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
65 static int gind(int known_unreachable)
67 int t = ind;
68 if (!known_unreachable)
69 CODE_ON();
70 if (debug_modes)
71 tcc_tcov_block_begin(tcc_state);
72 return t;
75 /* Set 'nocode_wanted' after unconditional jumps */
76 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
77 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
79 /* These are #undef'd at the end of this file */
80 #define gjmp_addr gjmp_addr_acs
81 #define gjmp gjmp_acs
82 /* <---- */
84 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
85 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
86 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
87 ST_DATA int func_vc;
88 ST_DATA int func_ind;
89 ST_DATA const char *funcname;
90 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
91 static CString initstr;
93 #if PTR_SIZE == 4
94 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
95 #define VT_PTRDIFF_T VT_INT
96 #elif LONG_SIZE == 4
97 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
98 #define VT_PTRDIFF_T VT_LLONG
99 #else
100 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
101 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
102 #endif
104 static struct switch_t {
105 struct case_t {
106 int64_t v1, v2;
107 int sym;
108 } **p; int n; /* list of case ranges */
109 int def_sym; /* default symbol */
110 int nocode_wanted;
111 int *bsym;
112 struct scope *scope;
113 struct switch_t *prev;
114 SValue sv;
115 } *cur_switch; /* current switch */
117 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
118 /*list of temporary local variables on the stack in current function. */
119 static struct temp_local_variable {
120 int location; //offset on stack. Svalue.c.i
121 short size;
122 short align;
123 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
124 static int nb_temp_local_vars;
126 static struct scope {
127 struct scope *prev;
128 struct { int loc, locorig, num; } vla;
129 struct { Sym *s; int n; } cl;
130 int *bsym, *csym;
131 Sym *lstk, *llstk;
132 } *cur_scope, *loop_scope, *root_scope;
134 typedef struct {
135 Section *sec;
136 int local_offset;
137 Sym *flex_array_ref;
138 } init_params;
140 #if 1
141 #define precedence_parser
142 static void init_prec(void);
143 #endif
145 static void gen_cast(CType *type);
146 static void gen_cast_s(int t);
147 static inline CType *pointed_type(CType *type);
148 static int is_compatible_types(CType *type1, CType *type2);
149 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
150 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
151 static void parse_expr_type(CType *type);
152 static void init_putv(init_params *p, CType *type, unsigned long c);
153 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
154 static void block(int is_expr);
155 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
156 static void decl(int l);
157 static int decl0(int l, int is_for_loop_init, Sym *);
158 static void expr_eq(void);
159 static void vpush_type_size(CType *type, int *a);
160 static int is_compatible_unqualified_types(CType *type1, CType *type2);
161 static inline int64_t expr_const64(void);
162 static void vpush64(int ty, unsigned long long v);
163 static void vpush(CType *type);
164 static int gvtst(int inv, int t);
165 static void gen_inline_functions(TCCState *s);
166 static void free_inline_functions(TCCState *s);
167 static void skip_or_save_block(TokenString **str);
168 static void gv_dup(void);
169 static int get_temp_local_var(int size,int align);
170 static void clear_temp_local_var_list();
171 static void cast_error(CType *st, CType *dt);
173 /* ------------------------------------------------------------------------- */
175 ST_INLN int is_float(int t)
177 int bt = t & VT_BTYPE;
178 return bt == VT_LDOUBLE
179 || bt == VT_DOUBLE
180 || bt == VT_FLOAT
181 || bt == VT_QFLOAT;
184 static inline int is_integer_btype(int bt)
186 return bt == VT_BYTE
187 || bt == VT_BOOL
188 || bt == VT_SHORT
189 || bt == VT_INT
190 || bt == VT_LLONG;
193 static int btype_size(int bt)
195 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
196 bt == VT_SHORT ? 2 :
197 bt == VT_INT ? 4 :
198 bt == VT_LLONG ? 8 :
199 bt == VT_PTR ? PTR_SIZE : 0;
202 /* returns function return register from type */
203 static int R_RET(int t)
205 if (!is_float(t))
206 return REG_IRET;
207 #ifdef TCC_TARGET_X86_64
208 if ((t & VT_BTYPE) == VT_LDOUBLE)
209 return TREG_ST0;
210 #elif defined TCC_TARGET_RISCV64
211 if ((t & VT_BTYPE) == VT_LDOUBLE)
212 return REG_IRET;
213 #endif
214 return REG_FRET;
217 /* returns 2nd function return register, if any */
218 static int R2_RET(int t)
220 t &= VT_BTYPE;
221 #if PTR_SIZE == 4
222 if (t == VT_LLONG)
223 return REG_IRE2;
224 #elif defined TCC_TARGET_X86_64
225 if (t == VT_QLONG)
226 return REG_IRE2;
227 if (t == VT_QFLOAT)
228 return REG_FRE2;
229 #elif defined TCC_TARGET_RISCV64
230 if (t == VT_LDOUBLE)
231 return REG_IRE2;
232 #endif
233 return VT_CONST;
236 /* returns true for two-word types */
237 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
239 /* put function return registers to stack value */
240 static void PUT_R_RET(SValue *sv, int t)
242 sv->r = R_RET(t), sv->r2 = R2_RET(t);
245 /* returns function return register class for type t */
246 static int RC_RET(int t)
248 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
251 /* returns generic register class for type t */
252 static int RC_TYPE(int t)
254 if (!is_float(t))
255 return RC_INT;
256 #ifdef TCC_TARGET_X86_64
257 if ((t & VT_BTYPE) == VT_LDOUBLE)
258 return RC_ST0;
259 if ((t & VT_BTYPE) == VT_QFLOAT)
260 return RC_FRET;
261 #elif defined TCC_TARGET_RISCV64
262 if ((t & VT_BTYPE) == VT_LDOUBLE)
263 return RC_INT;
264 #endif
265 return RC_FLOAT;
268 /* returns 2nd register class corresponding to t and rc */
269 static int RC2_TYPE(int t, int rc)
271 if (!USING_TWO_WORDS(t))
272 return 0;
273 #ifdef RC_IRE2
274 if (rc == RC_IRET)
275 return RC_IRE2;
276 #endif
277 #ifdef RC_FRE2
278 if (rc == RC_FRET)
279 return RC_FRE2;
280 #endif
281 if (rc & RC_FLOAT)
282 return RC_FLOAT;
283 return RC_INT;
286 /* we use our own 'finite' function to avoid potential problems with
287 non standard math libs */
288 /* XXX: endianness dependent */
289 ST_FUNC int ieee_finite(double d)
291 int p[4];
292 memcpy(p, &d, sizeof(double));
293 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
296 /* compiling intel long double natively */
297 #if (defined __i386__ || defined __x86_64__) \
298 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
299 # define TCC_IS_NATIVE_387
300 #endif
302 ST_FUNC void test_lvalue(void)
304 if (!(vtop->r & VT_LVAL))
305 expect("lvalue");
308 ST_FUNC void check_vstack(void)
310 if (vtop != vstack - 1)
311 tcc_error("internal compiler error: vstack leak (%d)",
312 (int)(vtop - vstack + 1));
315 /* vstack debugging aid */
316 #if 0
317 void pv (const char *lbl, int a, int b)
319 int i;
320 for (i = a; i < a + b; ++i) {
321 SValue *p = &vtop[-i];
322 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
323 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
326 #endif
328 /* ------------------------------------------------------------------------- */
329 /* initialize vstack and types. This must be done also for tcc -E */
330 ST_FUNC void tccgen_init(TCCState *s1)
332 vtop = vstack - 1;
333 memset(vtop, 0, sizeof *vtop);
335 /* define some often used types */
336 int_type.t = VT_INT;
338 char_type.t = VT_BYTE;
339 if (s1->char_is_unsigned)
340 char_type.t |= VT_UNSIGNED;
341 char_pointer_type = char_type;
342 mk_pointer(&char_pointer_type);
344 func_old_type.t = VT_FUNC;
345 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
346 func_old_type.ref->f.func_call = FUNC_CDECL;
347 func_old_type.ref->f.func_type = FUNC_OLD;
348 #ifdef precedence_parser
349 init_prec();
350 #endif
351 cstr_new(&initstr);
354 ST_FUNC int tccgen_compile(TCCState *s1)
356 cur_text_section = NULL;
357 funcname = "";
358 func_ind = -1;
359 anon_sym = SYM_FIRST_ANOM;
360 const_wanted = 0;
361 nocode_wanted = 0x80000000;
362 local_scope = 0;
363 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
365 tcc_debug_start(s1);
366 tcc_tcov_start (s1);
367 #ifdef TCC_TARGET_ARM
368 arm_init(s1);
369 #endif
370 #ifdef INC_DEBUG
371 printf("%s: **** new file\n", file->filename);
372 #endif
373 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
374 next();
375 decl(VT_CONST);
376 gen_inline_functions(s1);
377 check_vstack();
378 /* end of translation unit info */
379 tcc_debug_end(s1);
380 tcc_tcov_end(s1);
381 return 0;
384 ST_FUNC void tccgen_finish(TCCState *s1)
386 cstr_free(&initstr);
387 free_inline_functions(s1);
388 sym_pop(&global_stack, NULL, 0);
389 sym_pop(&local_stack, NULL, 0);
390 /* free preprocessor macros */
391 free_defines(NULL);
392 /* free sym_pools */
393 dynarray_reset(&sym_pools, &nb_sym_pools);
394 sym_free_first = NULL;
397 /* ------------------------------------------------------------------------- */
398 ST_FUNC ElfSym *elfsym(Sym *s)
400 if (!s || !s->c)
401 return NULL;
402 return &((ElfSym *)symtab_section->data)[s->c];
405 /* apply storage attributes to Elf symbol */
406 ST_FUNC void update_storage(Sym *sym)
408 ElfSym *esym;
409 int sym_bind, old_sym_bind;
411 esym = elfsym(sym);
412 if (!esym)
413 return;
415 if (sym->a.visibility)
416 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
417 | sym->a.visibility;
419 if (sym->type.t & (VT_STATIC | VT_INLINE))
420 sym_bind = STB_LOCAL;
421 else if (sym->a.weak)
422 sym_bind = STB_WEAK;
423 else
424 sym_bind = STB_GLOBAL;
425 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
426 if (sym_bind != old_sym_bind) {
427 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
430 #ifdef TCC_TARGET_PE
431 if (sym->a.dllimport)
432 esym->st_other |= ST_PE_IMPORT;
433 if (sym->a.dllexport)
434 esym->st_other |= ST_PE_EXPORT;
435 #endif
437 #if 0
438 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
439 get_tok_str(sym->v, NULL),
440 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
441 sym->a.visibility,
442 sym->a.dllexport,
443 sym->a.dllimport
445 #endif
448 /* ------------------------------------------------------------------------- */
449 /* update sym->c so that it points to an external symbol in section
450 'section' with value 'value' */
452 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
453 addr_t value, unsigned long size,
454 int can_add_underscore)
456 int sym_type, sym_bind, info, other, t;
457 ElfSym *esym;
458 const char *name;
459 char buf1[256];
461 if (!sym->c) {
462 name = get_tok_str(sym->v, NULL);
463 t = sym->type.t;
464 if ((t & VT_BTYPE) == VT_FUNC) {
465 sym_type = STT_FUNC;
466 } else if ((t & VT_BTYPE) == VT_VOID) {
467 sym_type = STT_NOTYPE;
468 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
469 sym_type = STT_FUNC;
470 } else {
471 sym_type = STT_OBJECT;
473 if (t & (VT_STATIC | VT_INLINE))
474 sym_bind = STB_LOCAL;
475 else
476 sym_bind = STB_GLOBAL;
477 other = 0;
479 #ifdef TCC_TARGET_PE
480 if (sym_type == STT_FUNC && sym->type.ref) {
481 Sym *ref = sym->type.ref;
482 if (ref->a.nodecorate) {
483 can_add_underscore = 0;
485 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
486 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
487 name = buf1;
488 other |= ST_PE_STDCALL;
489 can_add_underscore = 0;
492 #endif
494 if (sym->asm_label) {
495 name = get_tok_str(sym->asm_label, NULL);
496 can_add_underscore = 0;
499 if (tcc_state->leading_underscore && can_add_underscore) {
500 buf1[0] = '_';
501 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
502 name = buf1;
505 info = ELFW(ST_INFO)(sym_bind, sym_type);
506 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
508 if (debug_modes)
509 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
511 } else {
512 esym = elfsym(sym);
513 esym->st_value = value;
514 esym->st_size = size;
515 esym->st_shndx = sh_num;
517 update_storage(sym);
520 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
522 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
523 return;
524 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
527 /* add a new relocation entry to symbol 'sym' in section 's' */
528 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
529 addr_t addend)
531 int c = 0;
533 if (nocode_wanted && s == cur_text_section)
534 return;
536 if (sym) {
537 if (0 == sym->c)
538 put_extern_sym(sym, NULL, 0, 0);
539 c = sym->c;
542 /* now we can add ELF relocation info */
543 put_elf_reloca(symtab_section, s, offset, type, c, addend);
546 #if PTR_SIZE == 4
547 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
549 greloca(s, sym, offset, type, 0);
551 #endif
553 /* ------------------------------------------------------------------------- */
554 /* symbol allocator */
555 static Sym *__sym_malloc(void)
557 Sym *sym_pool, *sym, *last_sym;
558 int i;
560 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
561 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
563 last_sym = sym_free_first;
564 sym = sym_pool;
565 for(i = 0; i < SYM_POOL_NB; i++) {
566 sym->next = last_sym;
567 last_sym = sym;
568 sym++;
570 sym_free_first = last_sym;
571 return last_sym;
574 static inline Sym *sym_malloc(void)
576 Sym *sym;
577 #ifndef SYM_DEBUG
578 sym = sym_free_first;
579 if (!sym)
580 sym = __sym_malloc();
581 sym_free_first = sym->next;
582 return sym;
583 #else
584 sym = tcc_malloc(sizeof(Sym));
585 return sym;
586 #endif
589 ST_INLN void sym_free(Sym *sym)
591 #ifndef SYM_DEBUG
592 sym->next = sym_free_first;
593 sym_free_first = sym;
594 #else
595 tcc_free(sym);
596 #endif
599 /* push, without hashing */
600 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
602 Sym *s;
604 s = sym_malloc();
605 memset(s, 0, sizeof *s);
606 s->v = v;
607 s->type.t = t;
608 s->c = c;
609 /* add in stack */
610 s->prev = *ps;
611 *ps = s;
612 return s;
615 /* find a symbol and return its associated structure. 's' is the top
616 of the symbol stack */
617 ST_FUNC Sym *sym_find2(Sym *s, int v)
619 while (s) {
620 if (s->v == v)
621 return s;
622 else if (s->v == -1)
623 return NULL;
624 s = s->prev;
626 return NULL;
629 /* structure lookup */
630 ST_INLN Sym *struct_find(int v)
632 v -= TOK_IDENT;
633 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
634 return NULL;
635 return table_ident[v]->sym_struct;
638 /* find an identifier */
639 ST_INLN Sym *sym_find(int v)
641 v -= TOK_IDENT;
642 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
643 return NULL;
644 return table_ident[v]->sym_identifier;
647 static int sym_scope(Sym *s)
649 if (IS_ENUM_VAL (s->type.t))
650 return s->type.ref->sym_scope;
651 else
652 return s->sym_scope;
655 /* push a given symbol on the symbol stack */
656 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
658 Sym *s, **ps;
659 TokenSym *ts;
661 if (local_stack)
662 ps = &local_stack;
663 else
664 ps = &global_stack;
665 s = sym_push2(ps, v, type->t, c);
666 s->type.ref = type->ref;
667 s->r = r;
668 /* don't record fields or anonymous symbols */
669 /* XXX: simplify */
670 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
671 /* record symbol in token array */
672 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
673 if (v & SYM_STRUCT)
674 ps = &ts->sym_struct;
675 else
676 ps = &ts->sym_identifier;
677 s->prev_tok = *ps;
678 *ps = s;
679 s->sym_scope = local_scope;
680 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
681 tcc_error("redeclaration of '%s'",
682 get_tok_str(v & ~SYM_STRUCT, NULL));
684 return s;
687 /* push a global identifier */
688 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
690 Sym *s, **ps;
691 s = sym_push2(&global_stack, v, t, c);
692 s->r = VT_CONST | VT_SYM;
693 /* don't record anonymous symbol */
694 if (v < SYM_FIRST_ANOM) {
695 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
696 /* modify the top most local identifier, so that sym_identifier will
697 point to 's' when popped; happens when called from inline asm */
698 while (*ps != NULL && (*ps)->sym_scope)
699 ps = &(*ps)->prev_tok;
700 s->prev_tok = *ps;
701 *ps = s;
703 return s;
706 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
707 pop them yet from the list, but do remove them from the token array. */
708 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
710 Sym *s, *ss, **ps;
711 TokenSym *ts;
712 int v;
714 s = *ptop;
715 while(s != b) {
716 ss = s->prev;
717 v = s->v;
718 /* remove symbol in token array */
719 /* XXX: simplify */
720 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
721 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
722 if (v & SYM_STRUCT)
723 ps = &ts->sym_struct;
724 else
725 ps = &ts->sym_identifier;
726 *ps = s->prev_tok;
728 if (!keep)
729 sym_free(s);
730 s = ss;
732 if (!keep)
733 *ptop = b;
736 /* ------------------------------------------------------------------------- */
737 static void vcheck_cmp(void)
739 /* cannot let cpu flags if other instruction are generated. Also
740 avoid leaving VT_JMP anywhere except on the top of the stack
741 because it would complicate the code generator.
743 Don't do this when nocode_wanted. vtop might come from
744 !nocode_wanted regions (see 88_codeopt.c) and transforming
745 it to a register without actually generating code is wrong
746 as their value might still be used for real. All values
747 we push under nocode_wanted will eventually be popped
748 again, so that the VT_CMP/VT_JMP value will be in vtop
749 when code is unsuppressed again. */
751 if (vtop->r == VT_CMP && !nocode_wanted)
752 gv(RC_INT);
755 static void vsetc(CType *type, int r, CValue *vc)
757 if (vtop >= vstack + (VSTACK_SIZE - 1))
758 tcc_error("memory full (vstack)");
759 vcheck_cmp();
760 vtop++;
761 vtop->type = *type;
762 vtop->r = r;
763 vtop->r2 = VT_CONST;
764 vtop->c = *vc;
765 vtop->sym = NULL;
768 ST_FUNC void vswap(void)
770 SValue tmp;
772 vcheck_cmp();
773 tmp = vtop[0];
774 vtop[0] = vtop[-1];
775 vtop[-1] = tmp;
778 /* pop stack value */
779 ST_FUNC void vpop(void)
781 int v;
782 v = vtop->r & VT_VALMASK;
783 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
784 /* for x86, we need to pop the FP stack */
785 if (v == TREG_ST0) {
786 o(0xd8dd); /* fstp %st(0) */
787 } else
788 #endif
789 if (v == VT_CMP) {
790 /* need to put correct jump if && or || without test */
791 gsym(vtop->jtrue);
792 gsym(vtop->jfalse);
794 vtop--;
797 /* push constant of type "type" with useless value */
798 static void vpush(CType *type)
800 vset(type, VT_CONST, 0);
803 /* push arbitrary 64bit constant */
804 static void vpush64(int ty, unsigned long long v)
806 CValue cval;
807 CType ctype;
808 ctype.t = ty;
809 ctype.ref = NULL;
810 cval.i = v;
811 vsetc(&ctype, VT_CONST, &cval);
814 /* push integer constant */
815 ST_FUNC void vpushi(int v)
817 vpush64(VT_INT, v);
820 /* push a pointer sized constant */
821 static void vpushs(addr_t v)
823 vpush64(VT_SIZE_T, v);
826 /* push long long constant */
827 static inline void vpushll(long long v)
829 vpush64(VT_LLONG, v);
832 ST_FUNC void vset(CType *type, int r, int v)
834 CValue cval;
835 cval.i = v;
836 vsetc(type, r, &cval);
839 static void vseti(int r, int v)
841 CType type;
842 type.t = VT_INT;
843 type.ref = NULL;
844 vset(&type, r, v);
847 ST_FUNC void vpushv(SValue *v)
849 if (vtop >= vstack + (VSTACK_SIZE - 1))
850 tcc_error("memory full (vstack)");
851 vtop++;
852 *vtop = *v;
855 static void vdup(void)
857 vpushv(vtop);
860 /* rotate n first stack elements to the bottom
861 I1 ... In -> I2 ... In I1 [top is right]
863 ST_FUNC void vrotb(int n)
865 int i;
866 SValue tmp;
868 vcheck_cmp();
869 tmp = vtop[-n + 1];
870 for(i=-n+1;i!=0;i++)
871 vtop[i] = vtop[i+1];
872 vtop[0] = tmp;
875 /* rotate the n elements before entry e towards the top
876 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
878 ST_FUNC void vrote(SValue *e, int n)
880 int i;
881 SValue tmp;
883 vcheck_cmp();
884 tmp = *e;
885 for(i = 0;i < n - 1; i++)
886 e[-i] = e[-i - 1];
887 e[-n + 1] = tmp;
890 /* rotate n first stack elements to the top
891 I1 ... In -> In I1 ... I(n-1) [top is right]
893 ST_FUNC void vrott(int n)
895 vrote(vtop, n);
898 /* ------------------------------------------------------------------------- */
899 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
901 /* called from generators to set the result from relational ops */
902 ST_FUNC void vset_VT_CMP(int op)
904 vtop->r = VT_CMP;
905 vtop->cmp_op = op;
906 vtop->jfalse = 0;
907 vtop->jtrue = 0;
910 /* called once before asking generators to load VT_CMP to a register */
911 static void vset_VT_JMP(void)
913 int op = vtop->cmp_op;
915 if (vtop->jtrue || vtop->jfalse) {
916 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
917 int inv = op & (op < 2); /* small optimization */
918 vseti(VT_JMP+inv, gvtst(inv, 0));
919 } else {
920 /* otherwise convert flags (rsp. 0/1) to register */
921 vtop->c.i = op;
922 if (op < 2) /* doesn't seem to happen */
923 vtop->r = VT_CONST;
927 /* Set CPU Flags, doesn't yet jump */
928 static void gvtst_set(int inv, int t)
930 int *p;
932 if (vtop->r != VT_CMP) {
933 vpushi(0);
934 gen_op(TOK_NE);
935 if (vtop->r != VT_CMP) /* must be VT_CONST then */
936 vset_VT_CMP(vtop->c.i != 0);
939 p = inv ? &vtop->jfalse : &vtop->jtrue;
940 *p = gjmp_append(*p, t);
943 /* Generate value test
945 * Generate a test for any value (jump, comparison and integers) */
946 static int gvtst(int inv, int t)
948 int op, x, u;
950 gvtst_set(inv, t);
951 t = vtop->jtrue, u = vtop->jfalse;
952 if (inv)
953 x = u, u = t, t = x;
954 op = vtop->cmp_op;
956 /* jump to the wanted target */
957 if (op > 1)
958 t = gjmp_cond(op ^ inv, t);
959 else if (op != inv)
960 t = gjmp(t);
961 /* resolve complementary jumps to here */
962 gsym(u);
964 vtop--;
965 return t;
968 /* generate a zero or nozero test */
969 static void gen_test_zero(int op)
971 if (vtop->r == VT_CMP) {
972 int j;
973 if (op == TOK_EQ) {
974 j = vtop->jfalse;
975 vtop->jfalse = vtop->jtrue;
976 vtop->jtrue = j;
977 vtop->cmp_op ^= 1;
979 } else {
980 vpushi(0);
981 gen_op(op);
985 /* ------------------------------------------------------------------------- */
986 /* push a symbol value of TYPE */
987 ST_FUNC void vpushsym(CType *type, Sym *sym)
989 CValue cval;
990 cval.i = 0;
991 vsetc(type, VT_CONST | VT_SYM, &cval);
992 vtop->sym = sym;
995 /* Return a static symbol pointing to a section */
996 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
998 int v;
999 Sym *sym;
1001 v = anon_sym++;
1002 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1003 sym->type.t |= VT_STATIC;
1004 put_extern_sym(sym, sec, offset, size);
1005 return sym;
1008 /* push a reference to a section offset by adding a dummy symbol */
1009 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1011 vpushsym(type, get_sym_ref(type, sec, offset, size));
1014 /* define a new external reference to a symbol 'v' of type 'u' */
1015 ST_FUNC Sym *external_global_sym(int v, CType *type)
1017 Sym *s;
1019 s = sym_find(v);
1020 if (!s) {
1021 /* push forward reference */
1022 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1023 s->type.ref = type->ref;
1024 } else if (IS_ASM_SYM(s)) {
1025 s->type.t = type->t | (s->type.t & VT_EXTERN);
1026 s->type.ref = type->ref;
1027 update_storage(s);
1029 return s;
1032 /* create an external reference with no specific type similar to asm labels.
1033 This avoids type conflicts if the symbol is used from C too */
1034 ST_FUNC Sym *external_helper_sym(int v)
1036 CType ct = { VT_ASM_FUNC, NULL };
1037 return external_global_sym(v, &ct);
1040 /* push a reference to an helper function (such as memmove) */
1041 ST_FUNC void vpush_helper_func(int v)
1043 vpushsym(&func_old_type, external_helper_sym(v));
1046 /* Merge symbol attributes. */
1047 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1049 if (sa1->aligned && !sa->aligned)
1050 sa->aligned = sa1->aligned;
1051 sa->packed |= sa1->packed;
1052 sa->weak |= sa1->weak;
1053 if (sa1->visibility != STV_DEFAULT) {
1054 int vis = sa->visibility;
1055 if (vis == STV_DEFAULT
1056 || vis > sa1->visibility)
1057 vis = sa1->visibility;
1058 sa->visibility = vis;
1060 sa->dllexport |= sa1->dllexport;
1061 sa->nodecorate |= sa1->nodecorate;
1062 sa->dllimport |= sa1->dllimport;
1065 /* Merge function attributes. */
1066 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1068 if (fa1->func_call && !fa->func_call)
1069 fa->func_call = fa1->func_call;
1070 if (fa1->func_type && !fa->func_type)
1071 fa->func_type = fa1->func_type;
1072 if (fa1->func_args && !fa->func_args)
1073 fa->func_args = fa1->func_args;
1074 if (fa1->func_noreturn)
1075 fa->func_noreturn = 1;
1076 if (fa1->func_ctor)
1077 fa->func_ctor = 1;
1078 if (fa1->func_dtor)
1079 fa->func_dtor = 1;
1082 /* Merge attributes. */
1083 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1085 merge_symattr(&ad->a, &ad1->a);
1086 merge_funcattr(&ad->f, &ad1->f);
1088 if (ad1->section)
1089 ad->section = ad1->section;
1090 if (ad1->alias_target)
1091 ad->alias_target = ad1->alias_target;
1092 if (ad1->asm_label)
1093 ad->asm_label = ad1->asm_label;
1094 if (ad1->attr_mode)
1095 ad->attr_mode = ad1->attr_mode;
1098 /* Merge some type attributes. */
1099 static void patch_type(Sym *sym, CType *type)
1101 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1102 if (!(sym->type.t & VT_EXTERN))
1103 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1104 sym->type.t &= ~VT_EXTERN;
1107 if (IS_ASM_SYM(sym)) {
1108 /* stay static if both are static */
1109 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1110 sym->type.ref = type->ref;
1113 if (!is_compatible_types(&sym->type, type)) {
1114 tcc_error("incompatible types for redefinition of '%s'",
1115 get_tok_str(sym->v, NULL));
1117 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1118 int static_proto = sym->type.t & VT_STATIC;
1119 /* warn if static follows non-static function declaration */
1120 if ((type->t & VT_STATIC) && !static_proto
1121 /* XXX this test for inline shouldn't be here. Until we
1122 implement gnu-inline mode again it silences a warning for
1123 mingw caused by our workarounds. */
1124 && !((type->t | sym->type.t) & VT_INLINE))
1125 tcc_warning("static storage ignored for redefinition of '%s'",
1126 get_tok_str(sym->v, NULL));
1128 /* set 'inline' if both agree or if one has static */
1129 if ((type->t | sym->type.t) & VT_INLINE) {
1130 if (!((type->t ^ sym->type.t) & VT_INLINE)
1131 || ((type->t | sym->type.t) & VT_STATIC))
1132 static_proto |= VT_INLINE;
1135 if (0 == (type->t & VT_EXTERN)) {
1136 struct FuncAttr f = sym->type.ref->f;
1137 /* put complete type, use static from prototype */
1138 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1139 sym->type.ref = type->ref;
1140 merge_funcattr(&sym->type.ref->f, &f);
1141 } else {
1142 sym->type.t &= ~VT_INLINE | static_proto;
1145 if (sym->type.ref->f.func_type == FUNC_OLD
1146 && type->ref->f.func_type != FUNC_OLD) {
1147 sym->type.ref = type->ref;
1150 } else {
1151 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1152 /* set array size if it was omitted in extern declaration */
1153 sym->type.ref->c = type->ref->c;
1155 if ((type->t ^ sym->type.t) & VT_STATIC)
1156 tcc_warning("storage mismatch for redefinition of '%s'",
1157 get_tok_str(sym->v, NULL));
1161 /* Merge some storage attributes. */
1162 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1164 if (type)
1165 patch_type(sym, type);
1167 #ifdef TCC_TARGET_PE
1168 if (sym->a.dllimport != ad->a.dllimport)
1169 tcc_error("incompatible dll linkage for redefinition of '%s'",
1170 get_tok_str(sym->v, NULL));
1171 #endif
1172 merge_symattr(&sym->a, &ad->a);
1173 if (ad->asm_label)
1174 sym->asm_label = ad->asm_label;
1175 update_storage(sym);
1178 /* copy sym to other stack */
1179 static Sym *sym_copy(Sym *s0, Sym **ps)
1181 Sym *s;
1182 s = sym_malloc(), *s = *s0;
1183 s->prev = *ps, *ps = s;
1184 if (s->v < SYM_FIRST_ANOM) {
1185 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1186 s->prev_tok = *ps, *ps = s;
1188 return s;
1191 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1192 static void sym_copy_ref(Sym *s, Sym **ps)
1194 int bt = s->type.t & VT_BTYPE;
1195 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1196 Sym **sp = &s->type.ref;
1197 for (s = *sp, *sp = NULL; s; s = s->next) {
1198 Sym *s2 = sym_copy(s, ps);
1199 sp = &(*sp = s2)->next;
1200 sym_copy_ref(s2, ps);
1205 /* define a new external reference to a symbol 'v' */
1206 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1208 Sym *s;
1210 /* look for global symbol */
1211 s = sym_find(v);
1212 while (s && s->sym_scope)
1213 s = s->prev_tok;
1215 if (!s) {
1216 /* push forward reference */
1217 s = global_identifier_push(v, type->t, 0);
1218 s->r |= r;
1219 s->a = ad->a;
1220 s->asm_label = ad->asm_label;
1221 s->type.ref = type->ref;
1222 /* copy type to the global stack */
1223 if (local_stack)
1224 sym_copy_ref(s, &global_stack);
1225 } else {
1226 patch_storage(s, ad, type);
1228 /* push variables on local_stack if any */
1229 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1230 s = sym_copy(s, &local_stack);
1231 return s;
1234 /* save registers up to (vtop - n) stack entry */
1235 ST_FUNC void save_regs(int n)
1237 SValue *p, *p1;
1238 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1239 save_reg(p->r);
1242 /* save r to the memory stack, and mark it as being free */
1243 ST_FUNC void save_reg(int r)
1245 save_reg_upstack(r, 0);
1248 /* save r to the memory stack, and mark it as being free,
1249 if seen up to (vtop - n) stack entry */
1250 ST_FUNC void save_reg_upstack(int r, int n)
1252 int l, size, align, bt;
1253 SValue *p, *p1, sv;
1255 if ((r &= VT_VALMASK) >= VT_CONST)
1256 return;
1257 if (nocode_wanted)
1258 return;
1259 l = 0;
1260 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1261 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1262 /* must save value on stack if not already done */
1263 if (!l) {
1264 bt = p->type.t & VT_BTYPE;
1265 if (bt == VT_VOID)
1266 continue;
1267 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1268 bt = VT_PTR;
1269 sv.type.t = bt;
1270 size = type_size(&sv.type, &align);
1271 l = get_temp_local_var(size,align);
1272 sv.r = VT_LOCAL | VT_LVAL;
1273 sv.c.i = l;
1274 store(p->r & VT_VALMASK, &sv);
1275 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1276 /* x86 specific: need to pop fp register ST0 if saved */
1277 if (r == TREG_ST0) {
1278 o(0xd8dd); /* fstp %st(0) */
1280 #endif
1281 /* special long long case */
1282 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1283 sv.c.i += PTR_SIZE;
1284 store(p->r2, &sv);
1287 /* mark that stack entry as being saved on the stack */
1288 if (p->r & VT_LVAL) {
1289 /* also clear the bounded flag because the
1290 relocation address of the function was stored in
1291 p->c.i */
1292 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1293 } else {
1294 p->r = VT_LVAL | VT_LOCAL;
1296 p->sym = NULL;
1297 p->r2 = VT_CONST;
1298 p->c.i = l;
1303 #ifdef TCC_TARGET_ARM
1304 /* find a register of class 'rc2' with at most one reference on stack.
1305 * If none, call get_reg(rc) */
1306 ST_FUNC int get_reg_ex(int rc, int rc2)
1308 int r;
1309 SValue *p;
1311 for(r=0;r<NB_REGS;r++) {
1312 if (reg_classes[r] & rc2) {
1313 int n;
1314 n=0;
1315 for(p = vstack; p <= vtop; p++) {
1316 if ((p->r & VT_VALMASK) == r ||
1317 p->r2 == r)
1318 n++;
1320 if (n <= 1)
1321 return r;
1324 return get_reg(rc);
1326 #endif
1328 /* find a free register of class 'rc'. If none, save one register */
1329 ST_FUNC int get_reg(int rc)
1331 int r;
1332 SValue *p;
1334 /* find a free register */
1335 for(r=0;r<NB_REGS;r++) {
1336 if (reg_classes[r] & rc) {
1337 if (nocode_wanted)
1338 return r;
1339 for(p=vstack;p<=vtop;p++) {
1340 if ((p->r & VT_VALMASK) == r ||
1341 p->r2 == r)
1342 goto notfound;
1344 return r;
1346 notfound: ;
1349 /* no register left : free the first one on the stack (VERY
1350 IMPORTANT to start from the bottom to ensure that we don't
1351 spill registers used in gen_opi()) */
1352 for(p=vstack;p<=vtop;p++) {
1353 /* look at second register (if long long) */
1354 r = p->r2;
1355 if (r < VT_CONST && (reg_classes[r] & rc))
1356 goto save_found;
1357 r = p->r & VT_VALMASK;
1358 if (r < VT_CONST && (reg_classes[r] & rc)) {
1359 save_found:
1360 save_reg(r);
1361 return r;
1364 /* Should never comes here */
1365 return -1;
1368 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1369 static int get_temp_local_var(int size,int align){
1370 int i;
1371 struct temp_local_variable *temp_var;
1372 int found_var;
1373 SValue *p;
1374 int r;
1375 char free;
1376 char found;
1377 found=0;
1378 for(i=0;i<nb_temp_local_vars;i++){
1379 temp_var=&arr_temp_local_vars[i];
1380 if(temp_var->size<size||align!=temp_var->align){
1381 continue;
1383 /*check if temp_var is free*/
1384 free=1;
1385 for(p=vstack;p<=vtop;p++) {
1386 r=p->r&VT_VALMASK;
1387 if(r==VT_LOCAL||r==VT_LLOCAL){
1388 if(p->c.i==temp_var->location){
1389 free=0;
1390 break;
1394 if(free){
1395 found_var=temp_var->location;
1396 found=1;
1397 break;
1400 if(!found){
1401 loc = (loc - size) & -align;
1402 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1403 temp_var=&arr_temp_local_vars[i];
1404 temp_var->location=loc;
1405 temp_var->size=size;
1406 temp_var->align=align;
1407 nb_temp_local_vars++;
1409 found_var=loc;
1411 return found_var;
1414 static void clear_temp_local_var_list(){
1415 nb_temp_local_vars=0;
1418 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1419 if needed */
1420 static void move_reg(int r, int s, int t)
1422 SValue sv;
1424 if (r != s) {
1425 save_reg(r);
1426 sv.type.t = t;
1427 sv.type.ref = NULL;
1428 sv.r = s;
1429 sv.c.i = 0;
1430 load(r, &sv);
1434 /* get address of vtop (vtop MUST BE an lvalue) */
1435 ST_FUNC void gaddrof(void)
1437 vtop->r &= ~VT_LVAL;
1438 /* tricky: if saved lvalue, then we can go back to lvalue */
1439 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1440 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1443 #ifdef CONFIG_TCC_BCHECK
1444 /* generate a bounded pointer addition */
1445 static void gen_bounded_ptr_add(void)
1447 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1448 if (save) {
1449 vpushv(&vtop[-1]);
1450 vrott(3);
1452 vpush_helper_func(TOK___bound_ptr_add);
1453 vrott(3);
1454 gfunc_call(2);
1455 vtop -= save;
1456 vpushi(0);
1457 /* returned pointer is in REG_IRET */
1458 vtop->r = REG_IRET | VT_BOUNDED;
1459 if (nocode_wanted)
1460 return;
1461 /* relocation offset of the bounding function call point */
1462 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1465 /* patch pointer addition in vtop so that pointer dereferencing is
1466 also tested */
1467 static void gen_bounded_ptr_deref(void)
1469 addr_t func;
1470 int size, align;
1471 ElfW_Rel *rel;
1472 Sym *sym;
1474 if (nocode_wanted)
1475 return;
1477 size = type_size(&vtop->type, &align);
1478 switch(size) {
1479 case 1: func = TOK___bound_ptr_indir1; break;
1480 case 2: func = TOK___bound_ptr_indir2; break;
1481 case 4: func = TOK___bound_ptr_indir4; break;
1482 case 8: func = TOK___bound_ptr_indir8; break;
1483 case 12: func = TOK___bound_ptr_indir12; break;
1484 case 16: func = TOK___bound_ptr_indir16; break;
1485 default:
1486 /* may happen with struct member access */
1487 return;
1489 sym = external_helper_sym(func);
1490 if (!sym->c)
1491 put_extern_sym(sym, NULL, 0, 0);
1492 /* patch relocation */
1493 /* XXX: find a better solution ? */
1494 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1495 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1498 /* generate lvalue bound code */
1499 static void gbound(void)
1501 CType type1;
1503 vtop->r &= ~VT_MUSTBOUND;
1504 /* if lvalue, then use checking code before dereferencing */
1505 if (vtop->r & VT_LVAL) {
1506 /* if not VT_BOUNDED value, then make one */
1507 if (!(vtop->r & VT_BOUNDED)) {
1508 /* must save type because we must set it to int to get pointer */
1509 type1 = vtop->type;
1510 vtop->type.t = VT_PTR;
1511 gaddrof();
1512 vpushi(0);
1513 gen_bounded_ptr_add();
1514 vtop->r |= VT_LVAL;
1515 vtop->type = type1;
1517 /* then check for dereferencing */
1518 gen_bounded_ptr_deref();
1522 /* we need to call __bound_ptr_add before we start to load function
1523 args into registers */
1524 ST_FUNC void gbound_args(int nb_args)
1526 int i, v;
1527 SValue *sv;
1529 for (i = 1; i <= nb_args; ++i)
1530 if (vtop[1 - i].r & VT_MUSTBOUND) {
1531 vrotb(i);
1532 gbound();
1533 vrott(i);
1536 sv = vtop - nb_args;
1537 if (sv->r & VT_SYM) {
1538 v = sv->sym->v;
1539 if (v == TOK_setjmp
1540 || v == TOK__setjmp
1541 #ifndef TCC_TARGET_PE
1542 || v == TOK_sigsetjmp
1543 || v == TOK___sigsetjmp
1544 #endif
1546 vpush_helper_func(TOK___bound_setjmp);
1547 vpushv(sv + 1);
1548 gfunc_call(1);
1549 func_bound_add_epilog = 1;
1551 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1552 if (v == TOK_alloca)
1553 func_bound_add_epilog = 1;
1554 #endif
1555 #if TARGETOS_NetBSD
1556 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1557 sv->sym->asm_label = TOK___bound_longjmp;
1558 #endif
1562 /* Add bounds for local symbols from S to E (via ->prev) */
1563 static void add_local_bounds(Sym *s, Sym *e)
1565 for (; s != e; s = s->prev) {
1566 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1567 continue;
1568 /* Add arrays/structs/unions because we always take address */
1569 if ((s->type.t & VT_ARRAY)
1570 || (s->type.t & VT_BTYPE) == VT_STRUCT
1571 || s->a.addrtaken) {
1572 /* add local bound info */
1573 int align, size = type_size(&s->type, &align);
1574 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1575 2 * sizeof(addr_t));
1576 bounds_ptr[0] = s->c;
1577 bounds_ptr[1] = size;
1581 #endif
1583 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1584 static void pop_local_syms(Sym *b, int keep)
1586 #ifdef CONFIG_TCC_BCHECK
1587 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1588 add_local_bounds(local_stack, b);
1589 #endif
1590 if (debug_modes)
1591 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1592 sym_pop(&local_stack, b, keep);
1595 static void incr_bf_adr(int o)
1597 vtop->type = char_pointer_type;
1598 gaddrof();
1599 vpushs(o);
1600 gen_op('+');
1601 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1602 vtop->r |= VT_LVAL;
1605 /* single-byte load mode for packed or otherwise unaligned bitfields */
1606 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1608 int n, o, bits;
1609 save_reg_upstack(vtop->r, 1);
1610 vpush64(type->t & VT_BTYPE, 0); // B X
1611 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1612 do {
1613 vswap(); // X B
1614 incr_bf_adr(o);
1615 vdup(); // X B B
1616 n = 8 - bit_pos;
1617 if (n > bit_size)
1618 n = bit_size;
1619 if (bit_pos)
1620 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1621 if (n < 8)
1622 vpushi((1 << n) - 1), gen_op('&');
1623 gen_cast(type);
1624 if (bits)
1625 vpushi(bits), gen_op(TOK_SHL);
1626 vrotb(3); // B Y X
1627 gen_op('|'); // B X
1628 bits += n, bit_size -= n, o = 1;
1629 } while (bit_size);
1630 vswap(), vpop();
1631 if (!(type->t & VT_UNSIGNED)) {
1632 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1633 vpushi(n), gen_op(TOK_SHL);
1634 vpushi(n), gen_op(TOK_SAR);
1638 /* single-byte store mode for packed or otherwise unaligned bitfields */
1639 static void store_packed_bf(int bit_pos, int bit_size)
1641 int bits, n, o, m, c;
1642 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1643 vswap(); // X B
1644 save_reg_upstack(vtop->r, 1);
1645 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1646 do {
1647 incr_bf_adr(o); // X B
1648 vswap(); //B X
1649 c ? vdup() : gv_dup(); // B V X
1650 vrott(3); // X B V
1651 if (bits)
1652 vpushi(bits), gen_op(TOK_SHR);
1653 if (bit_pos)
1654 vpushi(bit_pos), gen_op(TOK_SHL);
1655 n = 8 - bit_pos;
1656 if (n > bit_size)
1657 n = bit_size;
1658 if (n < 8) {
1659 m = ((1 << n) - 1) << bit_pos;
1660 vpushi(m), gen_op('&'); // X B V1
1661 vpushv(vtop-1); // X B V1 B
1662 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1663 gen_op('&'); // X B V1 B1
1664 gen_op('|'); // X B V2
1666 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1667 vstore(), vpop(); // X B
1668 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1669 } while (bit_size);
1670 vpop(), vpop();
1673 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1675 int t;
1676 if (0 == sv->type.ref)
1677 return 0;
1678 t = sv->type.ref->auxtype;
1679 if (t != -1 && t != VT_STRUCT) {
1680 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1681 sv->r |= VT_LVAL;
1683 return t;
1686 /* store vtop a register belonging to class 'rc'. lvalues are
1687 converted to values. Cannot be used if cannot be converted to
1688 register value (such as structures). */
1689 ST_FUNC int gv(int rc)
1691 int r, r2, r_ok, r2_ok, rc2, bt;
1692 int bit_pos, bit_size, size, align;
1694 /* NOTE: get_reg can modify vstack[] */
1695 if (vtop->type.t & VT_BITFIELD) {
1696 CType type;
1698 bit_pos = BIT_POS(vtop->type.t);
1699 bit_size = BIT_SIZE(vtop->type.t);
1700 /* remove bit field info to avoid loops */
1701 vtop->type.t &= ~VT_STRUCT_MASK;
1703 type.ref = NULL;
1704 type.t = vtop->type.t & VT_UNSIGNED;
1705 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1706 type.t |= VT_UNSIGNED;
1708 r = adjust_bf(vtop, bit_pos, bit_size);
1710 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1711 type.t |= VT_LLONG;
1712 else
1713 type.t |= VT_INT;
1715 if (r == VT_STRUCT) {
1716 load_packed_bf(&type, bit_pos, bit_size);
1717 } else {
1718 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1719 /* cast to int to propagate signedness in following ops */
1720 gen_cast(&type);
1721 /* generate shifts */
1722 vpushi(bits - (bit_pos + bit_size));
1723 gen_op(TOK_SHL);
1724 vpushi(bits - bit_size);
1725 /* NOTE: transformed to SHR if unsigned */
1726 gen_op(TOK_SAR);
1728 r = gv(rc);
1729 } else {
1730 if (is_float(vtop->type.t) &&
1731 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1732 /* CPUs usually cannot use float constants, so we store them
1733 generically in data segment */
1734 init_params p = { rodata_section };
1735 unsigned long offset;
1736 size = type_size(&vtop->type, &align);
1737 if (NODATA_WANTED)
1738 size = 0, align = 1;
1739 offset = section_add(p.sec, size, align);
1740 vpush_ref(&vtop->type, p.sec, offset, size);
1741 vswap();
1742 init_putv(&p, &vtop->type, offset);
1743 vtop->r |= VT_LVAL;
1745 #ifdef CONFIG_TCC_BCHECK
1746 if (vtop->r & VT_MUSTBOUND)
1747 gbound();
1748 #endif
1750 bt = vtop->type.t & VT_BTYPE;
1752 #ifdef TCC_TARGET_RISCV64
1753 /* XXX mega hack */
1754 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1755 rc = RC_INT;
1756 #endif
1757 rc2 = RC2_TYPE(bt, rc);
1759 /* need to reload if:
1760 - constant
1761 - lvalue (need to dereference pointer)
1762 - already a register, but not in the right class */
1763 r = vtop->r & VT_VALMASK;
1764 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1765 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1767 if (!r_ok || !r2_ok) {
1768 if (!r_ok)
1769 r = get_reg(rc);
1770 if (rc2) {
1771 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1772 int original_type = vtop->type.t;
1774 /* two register type load :
1775 expand to two words temporarily */
1776 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1777 /* load constant */
1778 unsigned long long ll = vtop->c.i;
1779 vtop->c.i = ll; /* first word */
1780 load(r, vtop);
1781 vtop->r = r; /* save register value */
1782 vpushi(ll >> 32); /* second word */
1783 } else if (vtop->r & VT_LVAL) {
1784 /* We do not want to modifier the long long pointer here.
1785 So we save any other instances down the stack */
1786 save_reg_upstack(vtop->r, 1);
1787 /* load from memory */
1788 vtop->type.t = load_type;
1789 load(r, vtop);
1790 vdup();
1791 vtop[-1].r = r; /* save register value */
1792 /* increment pointer to get second word */
1793 vtop->type.t = VT_PTRDIFF_T;
1794 gaddrof();
1795 vpushs(PTR_SIZE);
1796 gen_op('+');
1797 vtop->r |= VT_LVAL;
1798 vtop->type.t = load_type;
1799 } else {
1800 /* move registers */
1801 if (!r_ok)
1802 load(r, vtop);
1803 if (r2_ok && vtop->r2 < VT_CONST)
1804 goto done;
1805 vdup();
1806 vtop[-1].r = r; /* save register value */
1807 vtop->r = vtop[-1].r2;
1809 /* Allocate second register. Here we rely on the fact that
1810 get_reg() tries first to free r2 of an SValue. */
1811 r2 = get_reg(rc2);
1812 load(r2, vtop);
1813 vpop();
1814 /* write second register */
1815 vtop->r2 = r2;
1816 done:
1817 vtop->type.t = original_type;
1818 } else {
1819 if (vtop->r == VT_CMP)
1820 vset_VT_JMP();
1821 /* one register type load */
1822 load(r, vtop);
1825 vtop->r = r;
1826 #ifdef TCC_TARGET_C67
1827 /* uses register pairs for doubles */
1828 if (bt == VT_DOUBLE)
1829 vtop->r2 = r+1;
1830 #endif
1832 return r;
1835 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1836 ST_FUNC void gv2(int rc1, int rc2)
1838 /* generate more generic register first. But VT_JMP or VT_CMP
1839 values must be generated first in all cases to avoid possible
1840 reload errors */
1841 if (vtop->r != VT_CMP && rc1 <= rc2) {
1842 vswap();
1843 gv(rc1);
1844 vswap();
1845 gv(rc2);
1846 /* test if reload is needed for first register */
1847 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1848 vswap();
1849 gv(rc1);
1850 vswap();
1852 } else {
1853 gv(rc2);
1854 vswap();
1855 gv(rc1);
1856 vswap();
1857 /* test if reload is needed for first register */
1858 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1859 gv(rc2);
1864 #if PTR_SIZE == 4
1865 /* expand 64bit on stack in two ints */
1866 ST_FUNC void lexpand(void)
1868 int u, v;
1869 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1870 v = vtop->r & (VT_VALMASK | VT_LVAL);
1871 if (v == VT_CONST) {
1872 vdup();
1873 vtop[0].c.i >>= 32;
1874 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1875 vdup();
1876 vtop[0].c.i += 4;
1877 } else {
1878 gv(RC_INT);
1879 vdup();
1880 vtop[0].r = vtop[-1].r2;
1881 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1883 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1885 #endif
1887 #if PTR_SIZE == 4
1888 /* build a long long from two ints */
1889 static void lbuild(int t)
1891 gv2(RC_INT, RC_INT);
1892 vtop[-1].r2 = vtop[0].r;
1893 vtop[-1].type.t = t;
1894 vpop();
1896 #endif
1898 /* convert stack entry to register and duplicate its value in another
1899 register */
1900 static void gv_dup(void)
1902 int t, rc, r;
1904 t = vtop->type.t;
1905 #if PTR_SIZE == 4
1906 if ((t & VT_BTYPE) == VT_LLONG) {
1907 if (t & VT_BITFIELD) {
1908 gv(RC_INT);
1909 t = vtop->type.t;
1911 lexpand();
1912 gv_dup();
1913 vswap();
1914 vrotb(3);
1915 gv_dup();
1916 vrotb(4);
1917 /* stack: H L L1 H1 */
1918 lbuild(t);
1919 vrotb(3);
1920 vrotb(3);
1921 vswap();
1922 lbuild(t);
1923 vswap();
1924 return;
1926 #endif
1927 /* duplicate value */
1928 rc = RC_TYPE(t);
1929 gv(rc);
1930 r = get_reg(rc);
1931 vdup();
1932 load(r, vtop);
1933 vtop->r = r;
1936 #if PTR_SIZE == 4
1937 /* generate CPU independent (unsigned) long long operations */
1938 static void gen_opl(int op)
1940 int t, a, b, op1, c, i;
1941 int func;
1942 unsigned short reg_iret = REG_IRET;
1943 unsigned short reg_lret = REG_IRE2;
1944 SValue tmp;
1946 switch(op) {
1947 case '/':
1948 case TOK_PDIV:
1949 func = TOK___divdi3;
1950 goto gen_func;
1951 case TOK_UDIV:
1952 func = TOK___udivdi3;
1953 goto gen_func;
1954 case '%':
1955 func = TOK___moddi3;
1956 goto gen_mod_func;
1957 case TOK_UMOD:
1958 func = TOK___umoddi3;
1959 gen_mod_func:
1960 #ifdef TCC_ARM_EABI
1961 reg_iret = TREG_R2;
1962 reg_lret = TREG_R3;
1963 #endif
1964 gen_func:
1965 /* call generic long long function */
1966 vpush_helper_func(func);
1967 vrott(3);
1968 gfunc_call(2);
1969 vpushi(0);
1970 vtop->r = reg_iret;
1971 vtop->r2 = reg_lret;
1972 break;
1973 case '^':
1974 case '&':
1975 case '|':
1976 case '*':
1977 case '+':
1978 case '-':
1979 //pv("gen_opl A",0,2);
1980 t = vtop->type.t;
1981 vswap();
1982 lexpand();
1983 vrotb(3);
1984 lexpand();
1985 /* stack: L1 H1 L2 H2 */
1986 tmp = vtop[0];
1987 vtop[0] = vtop[-3];
1988 vtop[-3] = tmp;
1989 tmp = vtop[-2];
1990 vtop[-2] = vtop[-3];
1991 vtop[-3] = tmp;
1992 vswap();
1993 /* stack: H1 H2 L1 L2 */
1994 //pv("gen_opl B",0,4);
1995 if (op == '*') {
1996 vpushv(vtop - 1);
1997 vpushv(vtop - 1);
1998 gen_op(TOK_UMULL);
1999 lexpand();
2000 /* stack: H1 H2 L1 L2 ML MH */
2001 for(i=0;i<4;i++)
2002 vrotb(6);
2003 /* stack: ML MH H1 H2 L1 L2 */
2004 tmp = vtop[0];
2005 vtop[0] = vtop[-2];
2006 vtop[-2] = tmp;
2007 /* stack: ML MH H1 L2 H2 L1 */
2008 gen_op('*');
2009 vrotb(3);
2010 vrotb(3);
2011 gen_op('*');
2012 /* stack: ML MH M1 M2 */
2013 gen_op('+');
2014 gen_op('+');
2015 } else if (op == '+' || op == '-') {
2016 /* XXX: add non carry method too (for MIPS or alpha) */
2017 if (op == '+')
2018 op1 = TOK_ADDC1;
2019 else
2020 op1 = TOK_SUBC1;
2021 gen_op(op1);
2022 /* stack: H1 H2 (L1 op L2) */
2023 vrotb(3);
2024 vrotb(3);
2025 gen_op(op1 + 1); /* TOK_xxxC2 */
2026 } else {
2027 gen_op(op);
2028 /* stack: H1 H2 (L1 op L2) */
2029 vrotb(3);
2030 vrotb(3);
2031 /* stack: (L1 op L2) H1 H2 */
2032 gen_op(op);
2033 /* stack: (L1 op L2) (H1 op H2) */
2035 /* stack: L H */
2036 lbuild(t);
2037 break;
2038 case TOK_SAR:
2039 case TOK_SHR:
2040 case TOK_SHL:
2041 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2042 t = vtop[-1].type.t;
2043 vswap();
2044 lexpand();
2045 vrotb(3);
2046 /* stack: L H shift */
2047 c = (int)vtop->c.i;
2048 /* constant: simpler */
2049 /* NOTE: all comments are for SHL. the other cases are
2050 done by swapping words */
2051 vpop();
2052 if (op != TOK_SHL)
2053 vswap();
2054 if (c >= 32) {
2055 /* stack: L H */
2056 vpop();
2057 if (c > 32) {
2058 vpushi(c - 32);
2059 gen_op(op);
2061 if (op != TOK_SAR) {
2062 vpushi(0);
2063 } else {
2064 gv_dup();
2065 vpushi(31);
2066 gen_op(TOK_SAR);
2068 vswap();
2069 } else {
2070 vswap();
2071 gv_dup();
2072 /* stack: H L L */
2073 vpushi(c);
2074 gen_op(op);
2075 vswap();
2076 vpushi(32 - c);
2077 if (op == TOK_SHL)
2078 gen_op(TOK_SHR);
2079 else
2080 gen_op(TOK_SHL);
2081 vrotb(3);
2082 /* stack: L L H */
2083 vpushi(c);
2084 if (op == TOK_SHL)
2085 gen_op(TOK_SHL);
2086 else
2087 gen_op(TOK_SHR);
2088 gen_op('|');
2090 if (op != TOK_SHL)
2091 vswap();
2092 lbuild(t);
2093 } else {
2094 /* XXX: should provide a faster fallback on x86 ? */
2095 switch(op) {
2096 case TOK_SAR:
2097 func = TOK___ashrdi3;
2098 goto gen_func;
2099 case TOK_SHR:
2100 func = TOK___lshrdi3;
2101 goto gen_func;
2102 case TOK_SHL:
2103 func = TOK___ashldi3;
2104 goto gen_func;
2107 break;
2108 default:
2109 /* compare operations */
2110 t = vtop->type.t;
2111 vswap();
2112 lexpand();
2113 vrotb(3);
2114 lexpand();
2115 /* stack: L1 H1 L2 H2 */
2116 tmp = vtop[-1];
2117 vtop[-1] = vtop[-2];
2118 vtop[-2] = tmp;
2119 /* stack: L1 L2 H1 H2 */
2120 save_regs(4);
2121 /* compare high */
2122 op1 = op;
2123 /* when values are equal, we need to compare low words. since
2124 the jump is inverted, we invert the test too. */
2125 if (op1 == TOK_LT)
2126 op1 = TOK_LE;
2127 else if (op1 == TOK_GT)
2128 op1 = TOK_GE;
2129 else if (op1 == TOK_ULT)
2130 op1 = TOK_ULE;
2131 else if (op1 == TOK_UGT)
2132 op1 = TOK_UGE;
2133 a = 0;
2134 b = 0;
2135 gen_op(op1);
2136 if (op == TOK_NE) {
2137 b = gvtst(0, 0);
2138 } else {
2139 a = gvtst(1, 0);
2140 if (op != TOK_EQ) {
2141 /* generate non equal test */
2142 vpushi(0);
2143 vset_VT_CMP(TOK_NE);
2144 b = gvtst(0, 0);
2147 /* compare low. Always unsigned */
2148 op1 = op;
2149 if (op1 == TOK_LT)
2150 op1 = TOK_ULT;
2151 else if (op1 == TOK_LE)
2152 op1 = TOK_ULE;
2153 else if (op1 == TOK_GT)
2154 op1 = TOK_UGT;
2155 else if (op1 == TOK_GE)
2156 op1 = TOK_UGE;
2157 gen_op(op1);
2158 #if 0//def TCC_TARGET_I386
2159 if (op == TOK_NE) { gsym(b); break; }
2160 if (op == TOK_EQ) { gsym(a); break; }
2161 #endif
2162 gvtst_set(1, a);
2163 gvtst_set(0, b);
2164 break;
2167 #endif
2169 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2171 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2172 return (a ^ b) >> 63 ? -x : x;
2175 static int gen_opic_lt(uint64_t a, uint64_t b)
2177 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2180 /* handle integer constant optimizations and various machine
2181 independent opt */
2182 static void gen_opic(int op)
2184 SValue *v1 = vtop - 1;
2185 SValue *v2 = vtop;
2186 int t1 = v1->type.t & VT_BTYPE;
2187 int t2 = v2->type.t & VT_BTYPE;
2188 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2189 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2190 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2191 uint64_t l1 = c1 ? v1->c.i : 0;
2192 uint64_t l2 = c2 ? v2->c.i : 0;
2193 int shm = (t1 == VT_LLONG) ? 63 : 31;
2195 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2196 l1 = ((uint32_t)l1 |
2197 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2198 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2199 l2 = ((uint32_t)l2 |
2200 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2202 if (c1 && c2) {
2203 switch(op) {
2204 case '+': l1 += l2; break;
2205 case '-': l1 -= l2; break;
2206 case '&': l1 &= l2; break;
2207 case '^': l1 ^= l2; break;
2208 case '|': l1 |= l2; break;
2209 case '*': l1 *= l2; break;
2211 case TOK_PDIV:
2212 case '/':
2213 case '%':
2214 case TOK_UDIV:
2215 case TOK_UMOD:
2216 /* if division by zero, generate explicit division */
2217 if (l2 == 0) {
2218 if (const_wanted && !(nocode_wanted & unevalmask))
2219 tcc_error("division by zero in constant");
2220 goto general_case;
2222 switch(op) {
2223 default: l1 = gen_opic_sdiv(l1, l2); break;
2224 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2225 case TOK_UDIV: l1 = l1 / l2; break;
2226 case TOK_UMOD: l1 = l1 % l2; break;
2228 break;
2229 case TOK_SHL: l1 <<= (l2 & shm); break;
2230 case TOK_SHR: l1 >>= (l2 & shm); break;
2231 case TOK_SAR:
2232 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2233 break;
2234 /* tests */
2235 case TOK_ULT: l1 = l1 < l2; break;
2236 case TOK_UGE: l1 = l1 >= l2; break;
2237 case TOK_EQ: l1 = l1 == l2; break;
2238 case TOK_NE: l1 = l1 != l2; break;
2239 case TOK_ULE: l1 = l1 <= l2; break;
2240 case TOK_UGT: l1 = l1 > l2; break;
2241 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2242 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2243 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2244 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2245 /* logical */
2246 case TOK_LAND: l1 = l1 && l2; break;
2247 case TOK_LOR: l1 = l1 || l2; break;
2248 default:
2249 goto general_case;
2251 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2252 l1 = ((uint32_t)l1 |
2253 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2254 v1->c.i = l1;
2255 vtop--;
2256 } else {
2257 nonconst = VT_NONCONST;
2258 /* if commutative ops, put c2 as constant */
2259 if (c1 && (op == '+' || op == '&' || op == '^' ||
2260 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2261 vswap();
2262 c2 = c1; //c = c1, c1 = c2, c2 = c;
2263 l2 = l1; //l = l1, l1 = l2, l2 = l;
2265 if (!const_wanted &&
2266 c1 && ((l1 == 0 &&
2267 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2268 (l1 == -1 && op == TOK_SAR))) {
2269 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2270 vtop--;
2271 } else if (!const_wanted &&
2272 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2273 (op == '|' &&
2274 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2275 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2276 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2277 if (l2 == 1)
2278 vtop->c.i = 0;
2279 vswap();
2280 vtop--;
2281 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2282 op == TOK_PDIV) &&
2283 l2 == 1) ||
2284 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2285 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2286 l2 == 0) ||
2287 (op == '&' &&
2288 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2289 /* filter out NOP operations like x*1, x-0, x&-1... */
2290 vtop--;
2291 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2292 /* try to use shifts instead of muls or divs */
2293 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2294 int n = -1;
2295 while (l2) {
2296 l2 >>= 1;
2297 n++;
2299 vtop->c.i = n;
2300 if (op == '*')
2301 op = TOK_SHL;
2302 else if (op == TOK_PDIV)
2303 op = TOK_SAR;
2304 else
2305 op = TOK_SHR;
2307 goto general_case;
2308 } else if (c2 && (op == '+' || op == '-') &&
2309 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2310 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2311 /* symbol + constant case */
2312 if (op == '-')
2313 l2 = -l2;
2314 l2 += vtop[-1].c.i;
2315 /* The backends can't always deal with addends to symbols
2316 larger than +-1<<31. Don't construct such. */
2317 if ((int)l2 != l2)
2318 goto general_case;
2319 vtop--;
2320 vtop->c.i = l2;
2321 } else {
2322 general_case:
2323 /* call low level op generator */
2324 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2325 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2326 gen_opl(op);
2327 else
2328 gen_opi(op);
2331 if (vtop->r == VT_CONST)
2332 vtop->r |= nonconst;
2335 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2336 # define gen_negf gen_opf
2337 #elif defined TCC_TARGET_ARM
2338 void gen_negf(int op)
2340 /* arm will detect 0-x and replace by vneg */
2341 vpushi(0), vswap(), gen_op('-');
2343 #else
2344 /* XXX: implement in gen_opf() for other backends too */
2345 void gen_negf(int op)
2347 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2348 subtract(-0, x), but with them it's really a sign flip
2349 operation. We implement this with bit manipulation and have
2350 to do some type reinterpretation for this, which TCC can do
2351 only via memory. */
2353 int align, size, bt;
2355 size = type_size(&vtop->type, &align);
2356 bt = vtop->type.t & VT_BTYPE;
2357 save_reg(gv(RC_TYPE(bt)));
2358 vdup();
2359 incr_bf_adr(size - 1);
2360 vdup();
2361 vpushi(0x80); /* flip sign */
2362 gen_op('^');
2363 vstore();
2364 vpop();
2366 #endif
2368 /* generate a floating point operation with constant propagation */
2369 static void gen_opif(int op)
2371 int c1, c2;
2372 SValue *v1, *v2;
2373 #if defined _MSC_VER && defined __x86_64__
2374 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2375 volatile
2376 #endif
2377 long double f1, f2;
2379 v1 = vtop - 1;
2380 v2 = vtop;
2381 if (op == TOK_NEG)
2382 v1 = v2;
2384 /* currently, we cannot do computations with forward symbols */
2385 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2386 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2387 if (c1 && c2) {
2388 if (v1->type.t == VT_FLOAT) {
2389 f1 = v1->c.f;
2390 f2 = v2->c.f;
2391 } else if (v1->type.t == VT_DOUBLE) {
2392 f1 = v1->c.d;
2393 f2 = v2->c.d;
2394 } else {
2395 f1 = v1->c.ld;
2396 f2 = v2->c.ld;
2398 /* NOTE: we only do constant propagation if finite number (not
2399 NaN or infinity) (ANSI spec) */
2400 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2401 goto general_case;
2402 switch(op) {
2403 case '+': f1 += f2; break;
2404 case '-': f1 -= f2; break;
2405 case '*': f1 *= f2; break;
2406 case '/':
2407 if (f2 == 0.0) {
2408 union { float f; unsigned u; } x1, x2, y;
2409 /* If not in initializer we need to potentially generate
2410 FP exceptions at runtime, otherwise we want to fold. */
2411 if (!const_wanted)
2412 goto general_case;
2413 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2414 when used to compile the f1 /= f2 below, would be -nan */
2415 x1.f = f1, x2.f = f2;
2416 if (f1 == 0.0)
2417 y.u = 0x7fc00000; /* nan */
2418 else
2419 y.u = 0x7f800000; /* infinity */
2420 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2421 f1 = y.f;
2422 break;
2424 f1 /= f2;
2425 break;
2426 case TOK_NEG:
2427 f1 = -f1;
2428 goto unary_result;
2429 /* XXX: also handles tests ? */
2430 default:
2431 goto general_case;
2433 vtop--;
2434 unary_result:
2435 /* XXX: overflow test ? */
2436 if (v1->type.t == VT_FLOAT) {
2437 v1->c.f = f1;
2438 } else if (v1->type.t == VT_DOUBLE) {
2439 v1->c.d = f1;
2440 } else {
2441 v1->c.ld = f1;
2443 } else {
2444 general_case:
2445 if (op == TOK_NEG) {
2446 gen_negf(op);
2447 } else {
2448 gen_opf(op);
2453 /* print a type. If 'varstr' is not NULL, then the variable is also
2454 printed in the type */
2455 /* XXX: union */
2456 /* XXX: add array and function pointers */
2457 static void type_to_str(char *buf, int buf_size,
2458 CType *type, const char *varstr)
2460 int bt, v, t;
2461 Sym *s, *sa;
2462 char buf1[256];
2463 const char *tstr;
2465 t = type->t;
2466 bt = t & VT_BTYPE;
2467 buf[0] = '\0';
2469 if (t & VT_EXTERN)
2470 pstrcat(buf, buf_size, "extern ");
2471 if (t & VT_STATIC)
2472 pstrcat(buf, buf_size, "static ");
2473 if (t & VT_TYPEDEF)
2474 pstrcat(buf, buf_size, "typedef ");
2475 if (t & VT_INLINE)
2476 pstrcat(buf, buf_size, "inline ");
2477 if (bt != VT_PTR) {
2478 if (t & VT_VOLATILE)
2479 pstrcat(buf, buf_size, "volatile ");
2480 if (t & VT_CONSTANT)
2481 pstrcat(buf, buf_size, "const ");
2483 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2484 || ((t & VT_UNSIGNED)
2485 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2486 && !IS_ENUM(t)
2488 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2490 buf_size -= strlen(buf);
2491 buf += strlen(buf);
2493 switch(bt) {
2494 case VT_VOID:
2495 tstr = "void";
2496 goto add_tstr;
2497 case VT_BOOL:
2498 tstr = "_Bool";
2499 goto add_tstr;
2500 case VT_BYTE:
2501 tstr = "char";
2502 goto add_tstr;
2503 case VT_SHORT:
2504 tstr = "short";
2505 goto add_tstr;
2506 case VT_INT:
2507 tstr = "int";
2508 goto maybe_long;
2509 case VT_LLONG:
2510 tstr = "long long";
2511 maybe_long:
2512 if (t & VT_LONG)
2513 tstr = "long";
2514 if (!IS_ENUM(t))
2515 goto add_tstr;
2516 tstr = "enum ";
2517 goto tstruct;
2518 case VT_FLOAT:
2519 tstr = "float";
2520 goto add_tstr;
2521 case VT_DOUBLE:
2522 tstr = "double";
2523 if (!(t & VT_LONG))
2524 goto add_tstr;
2525 case VT_LDOUBLE:
2526 tstr = "long double";
2527 add_tstr:
2528 pstrcat(buf, buf_size, tstr);
2529 break;
2530 case VT_STRUCT:
2531 tstr = "struct ";
2532 if (IS_UNION(t))
2533 tstr = "union ";
2534 tstruct:
2535 pstrcat(buf, buf_size, tstr);
2536 v = type->ref->v & ~SYM_STRUCT;
2537 if (v >= SYM_FIRST_ANOM)
2538 pstrcat(buf, buf_size, "<anonymous>");
2539 else
2540 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2541 break;
2542 case VT_FUNC:
2543 s = type->ref;
2544 buf1[0]=0;
2545 if (varstr && '*' == *varstr) {
2546 pstrcat(buf1, sizeof(buf1), "(");
2547 pstrcat(buf1, sizeof(buf1), varstr);
2548 pstrcat(buf1, sizeof(buf1), ")");
2550 pstrcat(buf1, buf_size, "(");
2551 sa = s->next;
2552 while (sa != NULL) {
2553 char buf2[256];
2554 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2555 pstrcat(buf1, sizeof(buf1), buf2);
2556 sa = sa->next;
2557 if (sa)
2558 pstrcat(buf1, sizeof(buf1), ", ");
2560 if (s->f.func_type == FUNC_ELLIPSIS)
2561 pstrcat(buf1, sizeof(buf1), ", ...");
2562 pstrcat(buf1, sizeof(buf1), ")");
2563 type_to_str(buf, buf_size, &s->type, buf1);
2564 goto no_var;
2565 case VT_PTR:
2566 s = type->ref;
2567 if (t & (VT_ARRAY|VT_VLA)) {
2568 if (varstr && '*' == *varstr)
2569 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2570 else
2571 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2572 type_to_str(buf, buf_size, &s->type, buf1);
2573 goto no_var;
2575 pstrcpy(buf1, sizeof(buf1), "*");
2576 if (t & VT_CONSTANT)
2577 pstrcat(buf1, buf_size, "const ");
2578 if (t & VT_VOLATILE)
2579 pstrcat(buf1, buf_size, "volatile ");
2580 if (varstr)
2581 pstrcat(buf1, sizeof(buf1), varstr);
2582 type_to_str(buf, buf_size, &s->type, buf1);
2583 goto no_var;
2585 if (varstr) {
2586 pstrcat(buf, buf_size, " ");
2587 pstrcat(buf, buf_size, varstr);
2589 no_var: ;
2592 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2594 char buf1[256], buf2[256];
2595 type_to_str(buf1, sizeof(buf1), st, NULL);
2596 type_to_str(buf2, sizeof(buf2), dt, NULL);
2597 tcc_error(fmt, buf1, buf2);
2600 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2602 char buf1[256], buf2[256];
2603 type_to_str(buf1, sizeof(buf1), st, NULL);
2604 type_to_str(buf2, sizeof(buf2), dt, NULL);
2605 tcc_warning(fmt, buf1, buf2);
2608 static int pointed_size(CType *type)
2610 int align;
2611 return type_size(pointed_type(type), &align);
2614 static inline int is_null_pointer(SValue *p)
2616 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2617 return 0;
2618 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2619 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2620 ((p->type.t & VT_BTYPE) == VT_PTR &&
2621 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2622 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2623 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2627 /* compare function types. OLD functions match any new functions */
2628 static int is_compatible_func(CType *type1, CType *type2)
2630 Sym *s1, *s2;
2632 s1 = type1->ref;
2633 s2 = type2->ref;
2634 if (s1->f.func_call != s2->f.func_call)
2635 return 0;
2636 if (s1->f.func_type != s2->f.func_type
2637 && s1->f.func_type != FUNC_OLD
2638 && s2->f.func_type != FUNC_OLD)
2639 return 0;
2640 for (;;) {
2641 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2642 return 0;
2643 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2644 return 1;
2645 s1 = s1->next;
2646 s2 = s2->next;
2647 if (!s1)
2648 return !s2;
2649 if (!s2)
2650 return 0;
2654 /* return true if type1 and type2 are the same. If unqualified is
2655 true, qualifiers on the types are ignored.
2657 static int compare_types(CType *type1, CType *type2, int unqualified)
2659 int bt1, t1, t2;
2661 t1 = type1->t & VT_TYPE;
2662 t2 = type2->t & VT_TYPE;
2663 if (unqualified) {
2664 /* strip qualifiers before comparing */
2665 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2666 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2669 /* Default Vs explicit signedness only matters for char */
2670 if ((t1 & VT_BTYPE) != VT_BYTE) {
2671 t1 &= ~VT_DEFSIGN;
2672 t2 &= ~VT_DEFSIGN;
2674 /* XXX: bitfields ? */
2675 if (t1 != t2)
2676 return 0;
2678 if ((t1 & VT_ARRAY)
2679 && !(type1->ref->c < 0
2680 || type2->ref->c < 0
2681 || type1->ref->c == type2->ref->c))
2682 return 0;
2684 /* test more complicated cases */
2685 bt1 = t1 & VT_BTYPE;
2686 if (bt1 == VT_PTR) {
2687 type1 = pointed_type(type1);
2688 type2 = pointed_type(type2);
2689 return is_compatible_types(type1, type2);
2690 } else if (bt1 == VT_STRUCT) {
2691 return (type1->ref == type2->ref);
2692 } else if (bt1 == VT_FUNC) {
2693 return is_compatible_func(type1, type2);
2694 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2695 /* If both are enums then they must be the same, if only one is then
2696 t1 and t2 must be equal, which was checked above already. */
2697 return type1->ref == type2->ref;
2698 } else {
2699 return 1;
2703 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2704 type is stored in DEST if non-null (except for pointer plus/minus) . */
2705 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2707 CType *type1 = &op1->type, *type2 = &op2->type, type;
2708 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2709 int ret = 1;
2711 type.t = VT_VOID;
2712 type.ref = NULL;
2714 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2715 ret = op == '?' ? 1 : 0;
2716 /* NOTE: as an extension, we accept void on only one side */
2717 type.t = VT_VOID;
2718 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2719 if (op == '+') ; /* Handled in caller */
2720 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2721 /* If one is a null ptr constant the result type is the other. */
2722 else if (is_null_pointer (op2)) type = *type1;
2723 else if (is_null_pointer (op1)) type = *type2;
2724 else if (bt1 != bt2) {
2725 /* accept comparison or cond-expr between pointer and integer
2726 with a warning */
2727 if ((op == '?' || TOK_ISCOND(op))
2728 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2729 tcc_warning("pointer/integer mismatch in %s",
2730 op == '?' ? "conditional expression" : "comparison");
2731 else if (op != '-' || !is_integer_btype(bt2))
2732 ret = 0;
2733 type = *(bt1 == VT_PTR ? type1 : type2);
2734 } else {
2735 CType *pt1 = pointed_type(type1);
2736 CType *pt2 = pointed_type(type2);
2737 int pbt1 = pt1->t & VT_BTYPE;
2738 int pbt2 = pt2->t & VT_BTYPE;
2739 int newquals, copied = 0;
2740 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2741 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2742 if (op != '?' && !TOK_ISCOND(op))
2743 ret = 0;
2744 else
2745 type_incompatibility_warning(type1, type2,
2746 op == '?'
2747 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2748 : "pointer type mismatch in comparison('%s' and '%s')");
2750 if (op == '?') {
2751 /* pointers to void get preferred, otherwise the
2752 pointed to types minus qualifs should be compatible */
2753 type = *((pbt1 == VT_VOID) ? type1 : type2);
2754 /* combine qualifs */
2755 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2756 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2757 & newquals)
2759 /* copy the pointer target symbol */
2760 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2761 0, type.ref->c);
2762 copied = 1;
2763 pointed_type(&type)->t |= newquals;
2765 /* pointers to incomplete arrays get converted to
2766 pointers to completed ones if possible */
2767 if (pt1->t & VT_ARRAY
2768 && pt2->t & VT_ARRAY
2769 && pointed_type(&type)->ref->c < 0
2770 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2772 if (!copied)
2773 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2774 0, type.ref->c);
2775 pointed_type(&type)->ref =
2776 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2777 0, pointed_type(&type)->ref->c);
2778 pointed_type(&type)->ref->c =
2779 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2783 if (TOK_ISCOND(op))
2784 type.t = VT_SIZE_T;
2785 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2786 if (op != '?' || !compare_types(type1, type2, 1))
2787 ret = 0;
2788 type = *type1;
2789 } else if (is_float(bt1) || is_float(bt2)) {
2790 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2791 type.t = VT_LDOUBLE;
2792 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2793 type.t = VT_DOUBLE;
2794 } else {
2795 type.t = VT_FLOAT;
2797 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2798 /* cast to biggest op */
2799 type.t = VT_LLONG | VT_LONG;
2800 if (bt1 == VT_LLONG)
2801 type.t &= t1;
2802 if (bt2 == VT_LLONG)
2803 type.t &= t2;
2804 /* convert to unsigned if it does not fit in a long long */
2805 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2806 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2807 type.t |= VT_UNSIGNED;
2808 } else {
2809 /* integer operations */
2810 type.t = VT_INT | (VT_LONG & (t1 | t2));
2811 /* convert to unsigned if it does not fit in an integer */
2812 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2813 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2814 type.t |= VT_UNSIGNED;
2816 if (dest)
2817 *dest = type;
2818 return ret;
2821 /* generic gen_op: handles types problems */
2822 ST_FUNC void gen_op(int op)
2824 int t1, t2, bt1, bt2, t;
2825 CType type1, combtype;
2827 redo:
2828 t1 = vtop[-1].type.t;
2829 t2 = vtop[0].type.t;
2830 bt1 = t1 & VT_BTYPE;
2831 bt2 = t2 & VT_BTYPE;
2833 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2834 if (bt2 == VT_FUNC) {
2835 mk_pointer(&vtop->type);
2836 gaddrof();
2838 if (bt1 == VT_FUNC) {
2839 vswap();
2840 mk_pointer(&vtop->type);
2841 gaddrof();
2842 vswap();
2844 goto redo;
2845 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2846 tcc_error_noabort("invalid operand types for binary operation");
2847 vpop();
2848 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2849 /* at least one operand is a pointer */
2850 /* relational op: must be both pointers */
2851 int align;
2852 if (TOK_ISCOND(op))
2853 goto std_op;
2854 /* if both pointers, then it must be the '-' op */
2855 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2856 if (op != '-')
2857 tcc_error("cannot use pointers here");
2858 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2859 vrott(3);
2860 gen_opic(op);
2861 vtop->type.t = VT_PTRDIFF_T;
2862 vswap();
2863 gen_op(TOK_PDIV);
2864 } else {
2865 /* exactly one pointer : must be '+' or '-'. */
2866 if (op != '-' && op != '+')
2867 tcc_error("cannot use pointers here");
2868 /* Put pointer as first operand */
2869 if (bt2 == VT_PTR) {
2870 vswap();
2871 t = t1, t1 = t2, t2 = t;
2873 #if PTR_SIZE == 4
2874 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2875 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2876 gen_cast_s(VT_INT);
2877 #endif
2878 type1 = vtop[-1].type;
2879 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2880 gen_op('*');
2881 #ifdef CONFIG_TCC_BCHECK
2882 if (tcc_state->do_bounds_check && !const_wanted) {
2883 /* if bounded pointers, we generate a special code to
2884 test bounds */
2885 if (op == '-') {
2886 vpushi(0);
2887 vswap();
2888 gen_op('-');
2890 gen_bounded_ptr_add();
2891 } else
2892 #endif
2894 gen_opic(op);
2896 type1.t &= ~(VT_ARRAY|VT_VLA);
2897 /* put again type if gen_opic() swaped operands */
2898 vtop->type = type1;
2900 } else {
2901 /* floats can only be used for a few operations */
2902 if (is_float(combtype.t)
2903 && op != '+' && op != '-' && op != '*' && op != '/'
2904 && !TOK_ISCOND(op))
2905 tcc_error("invalid operands for binary operation");
2906 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2907 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2908 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2909 t |= VT_UNSIGNED;
2910 t |= (VT_LONG & t1);
2911 combtype.t = t;
2913 std_op:
2914 t = t2 = combtype.t;
2915 /* XXX: currently, some unsigned operations are explicit, so
2916 we modify them here */
2917 if (t & VT_UNSIGNED) {
2918 if (op == TOK_SAR)
2919 op = TOK_SHR;
2920 else if (op == '/')
2921 op = TOK_UDIV;
2922 else if (op == '%')
2923 op = TOK_UMOD;
2924 else if (op == TOK_LT)
2925 op = TOK_ULT;
2926 else if (op == TOK_GT)
2927 op = TOK_UGT;
2928 else if (op == TOK_LE)
2929 op = TOK_ULE;
2930 else if (op == TOK_GE)
2931 op = TOK_UGE;
2933 vswap();
2934 gen_cast_s(t);
2935 vswap();
2936 /* special case for shifts and long long: we keep the shift as
2937 an integer */
2938 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2939 t2 = VT_INT;
2940 gen_cast_s(t2);
2941 if (is_float(t))
2942 gen_opif(op);
2943 else
2944 gen_opic(op);
2945 if (TOK_ISCOND(op)) {
2946 /* relational op: the result is an int */
2947 vtop->type.t = VT_INT;
2948 } else {
2949 vtop->type.t = t;
2952 // Make sure that we have converted to an rvalue:
2953 if (vtop->r & VT_LVAL)
2954 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2957 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2958 #define gen_cvt_itof1 gen_cvt_itof
2959 #else
2960 /* generic itof for unsigned long long case */
2961 static void gen_cvt_itof1(int t)
2963 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2964 (VT_LLONG | VT_UNSIGNED)) {
2966 if (t == VT_FLOAT)
2967 vpush_helper_func(TOK___floatundisf);
2968 #if LDOUBLE_SIZE != 8
2969 else if (t == VT_LDOUBLE)
2970 vpush_helper_func(TOK___floatundixf);
2971 #endif
2972 else
2973 vpush_helper_func(TOK___floatundidf);
2974 vrott(2);
2975 gfunc_call(1);
2976 vpushi(0);
2977 PUT_R_RET(vtop, t);
2978 } else {
2979 gen_cvt_itof(t);
2982 #endif
2984 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2985 #define gen_cvt_ftoi1 gen_cvt_ftoi
2986 #else
2987 /* generic ftoi for unsigned long long case */
2988 static void gen_cvt_ftoi1(int t)
2990 int st;
2991 if (t == (VT_LLONG | VT_UNSIGNED)) {
2992 /* not handled natively */
2993 st = vtop->type.t & VT_BTYPE;
2994 if (st == VT_FLOAT)
2995 vpush_helper_func(TOK___fixunssfdi);
2996 #if LDOUBLE_SIZE != 8
2997 else if (st == VT_LDOUBLE)
2998 vpush_helper_func(TOK___fixunsxfdi);
2999 #endif
3000 else
3001 vpush_helper_func(TOK___fixunsdfdi);
3002 vrott(2);
3003 gfunc_call(1);
3004 vpushi(0);
3005 PUT_R_RET(vtop, t);
3006 } else {
3007 gen_cvt_ftoi(t);
3010 #endif
3012 /* special delayed cast for char/short */
3013 static void force_charshort_cast(void)
3015 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3016 int dbt = vtop->type.t;
3017 vtop->r &= ~VT_MUSTCAST;
3018 vtop->type.t = sbt;
3019 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3020 vtop->type.t = dbt;
3023 static void gen_cast_s(int t)
3025 CType type;
3026 type.t = t;
3027 type.ref = NULL;
3028 gen_cast(&type);
3031 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3032 static void gen_cast(CType *type)
3034 int sbt, dbt, sf, df, c;
3035 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3037 /* special delayed cast for char/short */
3038 if (vtop->r & VT_MUSTCAST)
3039 force_charshort_cast();
3041 /* bitfields first get cast to ints */
3042 if (vtop->type.t & VT_BITFIELD)
3043 gv(RC_INT);
3045 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3046 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3047 if (sbt == VT_FUNC)
3048 sbt = VT_PTR;
3050 again:
3051 if (sbt != dbt) {
3052 sf = is_float(sbt);
3053 df = is_float(dbt);
3054 dbt_bt = dbt & VT_BTYPE;
3055 sbt_bt = sbt & VT_BTYPE;
3056 if (dbt_bt == VT_VOID)
3057 goto done;
3058 if (sbt_bt == VT_VOID) {
3059 error:
3060 cast_error(&vtop->type, type);
3063 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3064 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3065 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3066 #endif
3067 if (c) {
3068 /* constant case: we can do it now */
3069 /* XXX: in ISOC, cannot do it if error in convert */
3070 if (sbt == VT_FLOAT)
3071 vtop->c.ld = vtop->c.f;
3072 else if (sbt == VT_DOUBLE)
3073 vtop->c.ld = vtop->c.d;
3075 if (df) {
3076 if (sbt_bt == VT_LLONG) {
3077 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3078 vtop->c.ld = vtop->c.i;
3079 else
3080 vtop->c.ld = -(long double)-vtop->c.i;
3081 } else if(!sf) {
3082 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3083 vtop->c.ld = (uint32_t)vtop->c.i;
3084 else
3085 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3088 if (dbt == VT_FLOAT)
3089 vtop->c.f = (float)vtop->c.ld;
3090 else if (dbt == VT_DOUBLE)
3091 vtop->c.d = (double)vtop->c.ld;
3092 } else if (sf && dbt == VT_BOOL) {
3093 vtop->c.i = (vtop->c.ld != 0);
3094 } else {
3095 if(sf)
3096 vtop->c.i = vtop->c.ld;
3097 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3099 else if (sbt & VT_UNSIGNED)
3100 vtop->c.i = (uint32_t)vtop->c.i;
3101 else
3102 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3104 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3106 else if (dbt == VT_BOOL)
3107 vtop->c.i = (vtop->c.i != 0);
3108 else {
3109 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3110 dbt_bt == VT_SHORT ? 0xffff :
3111 0xffffffff;
3112 vtop->c.i &= m;
3113 if (!(dbt & VT_UNSIGNED))
3114 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3117 goto done;
3119 } else if (dbt == VT_BOOL
3120 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3121 == (VT_CONST | VT_SYM)) {
3122 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3123 vtop->r = VT_CONST;
3124 vtop->c.i = 1;
3125 goto done;
3128 /* cannot generate code for global or static initializers */
3129 if (STATIC_DATA_WANTED)
3130 goto done;
3132 /* non constant case: generate code */
3133 if (dbt == VT_BOOL) {
3134 gen_test_zero(TOK_NE);
3135 goto done;
3138 if (sf || df) {
3139 if (sf && df) {
3140 /* convert from fp to fp */
3141 gen_cvt_ftof(dbt);
3142 } else if (df) {
3143 /* convert int to fp */
3144 gen_cvt_itof1(dbt);
3145 } else {
3146 /* convert fp to int */
3147 sbt = dbt;
3148 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3149 sbt = VT_INT;
3150 gen_cvt_ftoi1(sbt);
3151 goto again; /* may need char/short cast */
3153 goto done;
3156 ds = btype_size(dbt_bt);
3157 ss = btype_size(sbt_bt);
3158 if (ds == 0 || ss == 0)
3159 goto error;
3161 if (IS_ENUM(type->t) && type->ref->c < 0)
3162 tcc_error("cast to incomplete type");
3164 /* same size and no sign conversion needed */
3165 if (ds == ss && ds >= 4)
3166 goto done;
3167 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3168 tcc_warning("cast between pointer and integer of different size");
3169 if (sbt_bt == VT_PTR) {
3170 /* put integer type to allow logical operations below */
3171 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3175 /* processor allows { int a = 0, b = *(char*)&a; }
3176 That means that if we cast to less width, we can just
3177 change the type and read it still later. */
3178 #define ALLOW_SUBTYPE_ACCESS 1
3180 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3181 /* value still in memory */
3182 if (ds <= ss)
3183 goto done;
3184 /* ss <= 4 here */
3185 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3186 gv(RC_INT);
3187 goto done; /* no 64bit envolved */
3190 gv(RC_INT);
3192 trunc = 0;
3193 #if PTR_SIZE == 4
3194 if (ds == 8) {
3195 /* generate high word */
3196 if (sbt & VT_UNSIGNED) {
3197 vpushi(0);
3198 gv(RC_INT);
3199 } else {
3200 gv_dup();
3201 vpushi(31);
3202 gen_op(TOK_SAR);
3204 lbuild(dbt);
3205 } else if (ss == 8) {
3206 /* from long long: just take low order word */
3207 lexpand();
3208 vpop();
3210 ss = 4;
3212 #elif PTR_SIZE == 8
3213 if (ds == 8) {
3214 /* need to convert from 32bit to 64bit */
3215 if (sbt & VT_UNSIGNED) {
3216 #if defined(TCC_TARGET_RISCV64)
3217 /* RISC-V keeps 32bit vals in registers sign-extended.
3218 So here we need a zero-extension. */
3219 trunc = 32;
3220 #else
3221 goto done;
3222 #endif
3223 } else {
3224 gen_cvt_sxtw();
3225 goto done;
3227 ss = ds, ds = 4, dbt = sbt;
3228 } else if (ss == 8) {
3229 /* RISC-V keeps 32bit vals in registers sign-extended.
3230 So here we need a sign-extension for signed types and
3231 zero-extension. for unsigned types. */
3232 #if !defined(TCC_TARGET_RISCV64)
3233 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3234 #endif
3235 } else {
3236 ss = 4;
3238 #endif
3240 if (ds >= ss)
3241 goto done;
3242 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3243 if (ss == 4) {
3244 gen_cvt_csti(dbt);
3245 goto done;
3247 #endif
3248 bits = (ss - ds) * 8;
3249 /* for unsigned, gen_op will convert SAR to SHR */
3250 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3251 vpushi(bits);
3252 gen_op(TOK_SHL);
3253 vpushi(bits - trunc);
3254 gen_op(TOK_SAR);
3255 vpushi(trunc);
3256 gen_op(TOK_SHR);
3258 done:
3259 vtop->type = *type;
3260 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3263 /* return type size as known at compile time. Put alignment at 'a' */
3264 ST_FUNC int type_size(CType *type, int *a)
3266 Sym *s;
3267 int bt;
3269 bt = type->t & VT_BTYPE;
3270 if (bt == VT_STRUCT) {
3271 /* struct/union */
3272 s = type->ref;
3273 *a = s->r;
3274 return s->c;
3275 } else if (bt == VT_PTR) {
3276 if (type->t & VT_ARRAY) {
3277 int ts;
3279 s = type->ref;
3280 ts = type_size(&s->type, a);
3282 if (ts < 0 && s->c < 0)
3283 ts = -ts;
3285 return ts * s->c;
3286 } else {
3287 *a = PTR_SIZE;
3288 return PTR_SIZE;
3290 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3291 *a = 0;
3292 return -1; /* incomplete enum */
3293 } else if (bt == VT_LDOUBLE) {
3294 *a = LDOUBLE_ALIGN;
3295 return LDOUBLE_SIZE;
3296 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3297 #ifdef TCC_TARGET_I386
3298 #ifdef TCC_TARGET_PE
3299 *a = 8;
3300 #else
3301 *a = 4;
3302 #endif
3303 #elif defined(TCC_TARGET_ARM)
3304 #ifdef TCC_ARM_EABI
3305 *a = 8;
3306 #else
3307 *a = 4;
3308 #endif
3309 #else
3310 *a = 8;
3311 #endif
3312 return 8;
3313 } else if (bt == VT_INT || bt == VT_FLOAT) {
3314 *a = 4;
3315 return 4;
3316 } else if (bt == VT_SHORT) {
3317 *a = 2;
3318 return 2;
3319 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3320 *a = 8;
3321 return 16;
3322 } else {
3323 /* char, void, function, _Bool */
3324 *a = 1;
3325 return 1;
3329 /* push type size as known at runtime time on top of value stack. Put
3330 alignment at 'a' */
3331 static void vpush_type_size(CType *type, int *a)
3333 if (type->t & VT_VLA) {
3334 type_size(&type->ref->type, a);
3335 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3336 } else {
3337 int size = type_size(type, a);
3338 if (size < 0)
3339 tcc_error("unknown type size");
3340 #if PTR_SIZE == 8
3341 vpushll(size);
3342 #else
3343 vpushi(size);
3344 #endif
3348 /* return the pointed type of t */
3349 static inline CType *pointed_type(CType *type)
3351 return &type->ref->type;
3354 /* modify type so that its it is a pointer to type. */
3355 ST_FUNC void mk_pointer(CType *type)
3357 Sym *s;
3358 s = sym_push(SYM_FIELD, type, 0, -1);
3359 type->t = VT_PTR | (type->t & VT_STORAGE);
3360 type->ref = s;
3363 /* return true if type1 and type2 are exactly the same (including
3364 qualifiers).
3366 static int is_compatible_types(CType *type1, CType *type2)
3368 return compare_types(type1,type2,0);
3371 /* return true if type1 and type2 are the same (ignoring qualifiers).
3373 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3375 return compare_types(type1,type2,1);
3378 static void cast_error(CType *st, CType *dt)
3380 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3383 /* verify type compatibility to store vtop in 'dt' type */
3384 static void verify_assign_cast(CType *dt)
3386 CType *st, *type1, *type2;
3387 int dbt, sbt, qualwarn, lvl;
3389 st = &vtop->type; /* source type */
3390 dbt = dt->t & VT_BTYPE;
3391 sbt = st->t & VT_BTYPE;
3392 if (dt->t & VT_CONSTANT)
3393 tcc_warning("assignment of read-only location");
3394 switch(dbt) {
3395 case VT_VOID:
3396 if (sbt != dbt)
3397 tcc_error("assignment to void expression");
3398 break;
3399 case VT_PTR:
3400 /* special cases for pointers */
3401 /* '0' can also be a pointer */
3402 if (is_null_pointer(vtop))
3403 break;
3404 /* accept implicit pointer to integer cast with warning */
3405 if (is_integer_btype(sbt)) {
3406 tcc_warning("assignment makes pointer from integer without a cast");
3407 break;
3409 type1 = pointed_type(dt);
3410 if (sbt == VT_PTR)
3411 type2 = pointed_type(st);
3412 else if (sbt == VT_FUNC)
3413 type2 = st; /* a function is implicitly a function pointer */
3414 else
3415 goto error;
3416 if (is_compatible_types(type1, type2))
3417 break;
3418 for (qualwarn = lvl = 0;; ++lvl) {
3419 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3420 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3421 qualwarn = 1;
3422 dbt = type1->t & (VT_BTYPE|VT_LONG);
3423 sbt = type2->t & (VT_BTYPE|VT_LONG);
3424 if (dbt != VT_PTR || sbt != VT_PTR)
3425 break;
3426 type1 = pointed_type(type1);
3427 type2 = pointed_type(type2);
3429 if (!is_compatible_unqualified_types(type1, type2)) {
3430 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3431 /* void * can match anything */
3432 } else if (dbt == sbt
3433 && is_integer_btype(sbt & VT_BTYPE)
3434 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3435 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3436 /* Like GCC don't warn by default for merely changes
3437 in pointer target signedness. Do warn for different
3438 base types, though, in particular for unsigned enums
3439 and signed int targets. */
3440 } else {
3441 tcc_warning("assignment from incompatible pointer type");
3442 break;
3445 if (qualwarn)
3446 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3447 break;
3448 case VT_BYTE:
3449 case VT_SHORT:
3450 case VT_INT:
3451 case VT_LLONG:
3452 if (sbt == VT_PTR || sbt == VT_FUNC) {
3453 tcc_warning("assignment makes integer from pointer without a cast");
3454 } else if (sbt == VT_STRUCT) {
3455 goto case_VT_STRUCT;
3457 /* XXX: more tests */
3458 break;
3459 case VT_STRUCT:
3460 case_VT_STRUCT:
3461 if (!is_compatible_unqualified_types(dt, st)) {
3462 error:
3463 cast_error(st, dt);
3465 break;
3469 static void gen_assign_cast(CType *dt)
3471 verify_assign_cast(dt);
3472 gen_cast(dt);
3475 /* store vtop in lvalue pushed on stack */
3476 ST_FUNC void vstore(void)
3478 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3480 ft = vtop[-1].type.t;
3481 sbt = vtop->type.t & VT_BTYPE;
3482 dbt = ft & VT_BTYPE;
3483 verify_assign_cast(&vtop[-1].type);
3485 if (sbt == VT_STRUCT) {
3486 /* if structure, only generate pointer */
3487 /* structure assignment : generate memcpy */
3488 size = type_size(&vtop->type, &align);
3489 /* destination, keep on stack() as result */
3490 vpushv(vtop - 1);
3491 #ifdef CONFIG_TCC_BCHECK
3492 if (vtop->r & VT_MUSTBOUND)
3493 gbound(); /* check would be wrong after gaddrof() */
3494 #endif
3495 vtop->type.t = VT_PTR;
3496 gaddrof();
3497 /* source */
3498 vswap();
3499 #ifdef CONFIG_TCC_BCHECK
3500 if (vtop->r & VT_MUSTBOUND)
3501 gbound();
3502 #endif
3503 vtop->type.t = VT_PTR;
3504 gaddrof();
3506 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3507 if (1
3508 #ifdef CONFIG_TCC_BCHECK
3509 && !tcc_state->do_bounds_check
3510 #endif
3512 gen_struct_copy(size);
3513 } else
3514 #endif
3516 /* type size */
3517 vpushi(size);
3518 /* Use memmove, rather than memcpy, as dest and src may be same: */
3519 #ifdef TCC_ARM_EABI
3520 if(!(align & 7))
3521 vpush_helper_func(TOK_memmove8);
3522 else if(!(align & 3))
3523 vpush_helper_func(TOK_memmove4);
3524 else
3525 #endif
3526 vpush_helper_func(TOK_memmove);
3527 vrott(4);
3528 gfunc_call(3);
3531 } else if (ft & VT_BITFIELD) {
3532 /* bitfield store handling */
3534 /* save lvalue as expression result (example: s.b = s.a = n;) */
3535 vdup(), vtop[-1] = vtop[-2];
3537 bit_pos = BIT_POS(ft);
3538 bit_size = BIT_SIZE(ft);
3539 /* remove bit field info to avoid loops */
3540 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3542 if (dbt == VT_BOOL) {
3543 gen_cast(&vtop[-1].type);
3544 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3546 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3547 if (dbt != VT_BOOL) {
3548 gen_cast(&vtop[-1].type);
3549 dbt = vtop[-1].type.t & VT_BTYPE;
3551 if (r == VT_STRUCT) {
3552 store_packed_bf(bit_pos, bit_size);
3553 } else {
3554 unsigned long long mask = (1ULL << bit_size) - 1;
3555 if (dbt != VT_BOOL) {
3556 /* mask source */
3557 if (dbt == VT_LLONG)
3558 vpushll(mask);
3559 else
3560 vpushi((unsigned)mask);
3561 gen_op('&');
3563 /* shift source */
3564 vpushi(bit_pos);
3565 gen_op(TOK_SHL);
3566 vswap();
3567 /* duplicate destination */
3568 vdup();
3569 vrott(3);
3570 /* load destination, mask and or with source */
3571 if (dbt == VT_LLONG)
3572 vpushll(~(mask << bit_pos));
3573 else
3574 vpushi(~((unsigned)mask << bit_pos));
3575 gen_op('&');
3576 gen_op('|');
3577 /* store result */
3578 vstore();
3579 /* ... and discard */
3580 vpop();
3582 } else if (dbt == VT_VOID) {
3583 --vtop;
3584 } else {
3585 /* optimize char/short casts */
3586 delayed_cast = 0;
3587 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3588 && is_integer_btype(sbt)
3590 if ((vtop->r & VT_MUSTCAST)
3591 && btype_size(dbt) > btype_size(sbt)
3593 force_charshort_cast();
3594 delayed_cast = 1;
3595 } else {
3596 gen_cast(&vtop[-1].type);
3599 #ifdef CONFIG_TCC_BCHECK
3600 /* bound check case */
3601 if (vtop[-1].r & VT_MUSTBOUND) {
3602 vswap();
3603 gbound();
3604 vswap();
3606 #endif
3607 gv(RC_TYPE(dbt)); /* generate value */
3609 if (delayed_cast) {
3610 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3611 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3612 vtop->type.t = ft & VT_TYPE;
3615 /* if lvalue was saved on stack, must read it */
3616 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3617 SValue sv;
3618 r = get_reg(RC_INT);
3619 sv.type.t = VT_PTRDIFF_T;
3620 sv.r = VT_LOCAL | VT_LVAL;
3621 sv.c.i = vtop[-1].c.i;
3622 load(r, &sv);
3623 vtop[-1].r = r | VT_LVAL;
3626 r = vtop->r & VT_VALMASK;
3627 /* two word case handling :
3628 store second register at word + 4 (or +8 for x86-64) */
3629 if (USING_TWO_WORDS(dbt)) {
3630 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3631 vtop[-1].type.t = load_type;
3632 store(r, vtop - 1);
3633 vswap();
3634 /* convert to int to increment easily */
3635 vtop->type.t = VT_PTRDIFF_T;
3636 gaddrof();
3637 vpushs(PTR_SIZE);
3638 gen_op('+');
3639 vtop->r |= VT_LVAL;
3640 vswap();
3641 vtop[-1].type.t = load_type;
3642 /* XXX: it works because r2 is spilled last ! */
3643 store(vtop->r2, vtop - 1);
3644 } else {
3645 /* single word */
3646 store(r, vtop - 1);
3648 vswap();
3649 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3653 /* post defines POST/PRE add. c is the token ++ or -- */
3654 ST_FUNC void inc(int post, int c)
3656 test_lvalue();
3657 vdup(); /* save lvalue */
3658 if (post) {
3659 gv_dup(); /* duplicate value */
3660 vrotb(3);
3661 vrotb(3);
3663 /* add constant */
3664 vpushi(c - TOK_MID);
3665 gen_op('+');
3666 vstore(); /* store value */
3667 if (post)
3668 vpop(); /* if post op, return saved value */
3671 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3673 /* read the string */
3674 if (tok != TOK_STR)
3675 expect(msg);
3676 cstr_new(astr);
3677 while (tok == TOK_STR) {
3678 /* XXX: add \0 handling too ? */
3679 cstr_cat(astr, tokc.str.data, -1);
3680 next();
3682 cstr_ccat(astr, '\0');
3685 /* If I is >= 1 and a power of two, returns log2(i)+1.
3686 If I is 0 returns 0. */
3687 ST_FUNC int exact_log2p1(int i)
3689 int ret;
3690 if (!i)
3691 return 0;
3692 for (ret = 1; i >= 1 << 8; ret += 8)
3693 i >>= 8;
3694 if (i >= 1 << 4)
3695 ret += 4, i >>= 4;
3696 if (i >= 1 << 2)
3697 ret += 2, i >>= 2;
3698 if (i >= 1 << 1)
3699 ret++;
3700 return ret;
3703 /* Parse __attribute__((...)) GNUC extension. */
3704 static void parse_attribute(AttributeDef *ad)
3706 int t, n;
3707 CString astr;
3709 redo:
3710 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3711 return;
3712 next();
3713 skip('(');
3714 skip('(');
3715 while (tok != ')') {
3716 if (tok < TOK_IDENT)
3717 expect("attribute name");
3718 t = tok;
3719 next();
3720 switch(t) {
3721 case TOK_CLEANUP1:
3722 case TOK_CLEANUP2:
3724 Sym *s;
3726 skip('(');
3727 s = sym_find(tok);
3728 if (!s) {
3729 tcc_warning_c(warn_implicit_function_declaration)(
3730 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3731 s = external_global_sym(tok, &func_old_type);
3732 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3733 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3734 ad->cleanup_func = s;
3735 next();
3736 skip(')');
3737 break;
3739 case TOK_CONSTRUCTOR1:
3740 case TOK_CONSTRUCTOR2:
3741 ad->f.func_ctor = 1;
3742 break;
3743 case TOK_DESTRUCTOR1:
3744 case TOK_DESTRUCTOR2:
3745 ad->f.func_dtor = 1;
3746 break;
3747 case TOK_ALWAYS_INLINE1:
3748 case TOK_ALWAYS_INLINE2:
3749 ad->f.func_alwinl = 1;
3750 break;
3751 case TOK_SECTION1:
3752 case TOK_SECTION2:
3753 skip('(');
3754 parse_mult_str(&astr, "section name");
3755 ad->section = find_section(tcc_state, (char *)astr.data);
3756 skip(')');
3757 cstr_free(&astr);
3758 break;
3759 case TOK_ALIAS1:
3760 case TOK_ALIAS2:
3761 skip('(');
3762 parse_mult_str(&astr, "alias(\"target\")");
3763 ad->alias_target = /* save string as token, for later */
3764 tok_alloc((char*)astr.data, astr.size-1)->tok;
3765 skip(')');
3766 cstr_free(&astr);
3767 break;
3768 case TOK_VISIBILITY1:
3769 case TOK_VISIBILITY2:
3770 skip('(');
3771 parse_mult_str(&astr,
3772 "visibility(\"default|hidden|internal|protected\")");
3773 if (!strcmp (astr.data, "default"))
3774 ad->a.visibility = STV_DEFAULT;
3775 else if (!strcmp (astr.data, "hidden"))
3776 ad->a.visibility = STV_HIDDEN;
3777 else if (!strcmp (astr.data, "internal"))
3778 ad->a.visibility = STV_INTERNAL;
3779 else if (!strcmp (astr.data, "protected"))
3780 ad->a.visibility = STV_PROTECTED;
3781 else
3782 expect("visibility(\"default|hidden|internal|protected\")");
3783 skip(')');
3784 cstr_free(&astr);
3785 break;
3786 case TOK_ALIGNED1:
3787 case TOK_ALIGNED2:
3788 if (tok == '(') {
3789 next();
3790 n = expr_const();
3791 if (n <= 0 || (n & (n - 1)) != 0)
3792 tcc_error("alignment must be a positive power of two");
3793 skip(')');
3794 } else {
3795 n = MAX_ALIGN;
3797 ad->a.aligned = exact_log2p1(n);
3798 if (n != 1 << (ad->a.aligned - 1))
3799 tcc_error("alignment of %d is larger than implemented", n);
3800 break;
3801 case TOK_PACKED1:
3802 case TOK_PACKED2:
3803 ad->a.packed = 1;
3804 break;
3805 case TOK_WEAK1:
3806 case TOK_WEAK2:
3807 ad->a.weak = 1;
3808 break;
3809 case TOK_UNUSED1:
3810 case TOK_UNUSED2:
3811 /* currently, no need to handle it because tcc does not
3812 track unused objects */
3813 break;
3814 case TOK_NORETURN1:
3815 case TOK_NORETURN2:
3816 ad->f.func_noreturn = 1;
3817 break;
3818 case TOK_CDECL1:
3819 case TOK_CDECL2:
3820 case TOK_CDECL3:
3821 ad->f.func_call = FUNC_CDECL;
3822 break;
3823 case TOK_STDCALL1:
3824 case TOK_STDCALL2:
3825 case TOK_STDCALL3:
3826 ad->f.func_call = FUNC_STDCALL;
3827 break;
3828 #ifdef TCC_TARGET_I386
3829 case TOK_REGPARM1:
3830 case TOK_REGPARM2:
3831 skip('(');
3832 n = expr_const();
3833 if (n > 3)
3834 n = 3;
3835 else if (n < 0)
3836 n = 0;
3837 if (n > 0)
3838 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3839 skip(')');
3840 break;
3841 case TOK_FASTCALL1:
3842 case TOK_FASTCALL2:
3843 case TOK_FASTCALL3:
3844 ad->f.func_call = FUNC_FASTCALLW;
3845 break;
3846 #endif
3847 case TOK_MODE:
3848 skip('(');
3849 switch(tok) {
3850 case TOK_MODE_DI:
3851 ad->attr_mode = VT_LLONG + 1;
3852 break;
3853 case TOK_MODE_QI:
3854 ad->attr_mode = VT_BYTE + 1;
3855 break;
3856 case TOK_MODE_HI:
3857 ad->attr_mode = VT_SHORT + 1;
3858 break;
3859 case TOK_MODE_SI:
3860 case TOK_MODE_word:
3861 ad->attr_mode = VT_INT + 1;
3862 break;
3863 default:
3864 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3865 break;
3867 next();
3868 skip(')');
3869 break;
3870 case TOK_DLLEXPORT:
3871 ad->a.dllexport = 1;
3872 break;
3873 case TOK_NODECORATE:
3874 ad->a.nodecorate = 1;
3875 break;
3876 case TOK_DLLIMPORT:
3877 ad->a.dllimport = 1;
3878 break;
3879 default:
3880 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3881 /* skip parameters */
3882 if (tok == '(') {
3883 int parenthesis = 0;
3884 do {
3885 if (tok == '(')
3886 parenthesis++;
3887 else if (tok == ')')
3888 parenthesis--;
3889 next();
3890 } while (parenthesis && tok != -1);
3892 break;
3894 if (tok != ',')
3895 break;
3896 next();
3898 skip(')');
3899 skip(')');
3900 goto redo;
3903 static Sym * find_field (CType *type, int v, int *cumofs)
3905 Sym *s = type->ref;
3906 v |= SYM_FIELD;
3907 while ((s = s->next) != NULL) {
3908 if ((s->v & SYM_FIELD) &&
3909 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3910 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3911 Sym *ret = find_field (&s->type, v, cumofs);
3912 if (ret) {
3913 *cumofs += s->c;
3914 return ret;
3917 if (s->v == v)
3918 break;
3920 return s;
3923 static void check_fields (CType *type, int check)
3925 Sym *s = type->ref;
3927 while ((s = s->next) != NULL) {
3928 int v = s->v & ~SYM_FIELD;
3929 if (v < SYM_FIRST_ANOM) {
3930 TokenSym *ts = table_ident[v - TOK_IDENT];
3931 if (check && (ts->tok & SYM_FIELD))
3932 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3933 ts->tok ^= SYM_FIELD;
3934 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3935 check_fields (&s->type, check);
3939 static void struct_layout(CType *type, AttributeDef *ad)
3941 int size, align, maxalign, offset, c, bit_pos, bit_size;
3942 int packed, a, bt, prevbt, prev_bit_size;
3943 int pcc = !tcc_state->ms_bitfields;
3944 int pragma_pack = *tcc_state->pack_stack_ptr;
3945 Sym *f;
3947 maxalign = 1;
3948 offset = 0;
3949 c = 0;
3950 bit_pos = 0;
3951 prevbt = VT_STRUCT; /* make it never match */
3952 prev_bit_size = 0;
3954 //#define BF_DEBUG
3956 for (f = type->ref->next; f; f = f->next) {
3957 if (f->type.t & VT_BITFIELD)
3958 bit_size = BIT_SIZE(f->type.t);
3959 else
3960 bit_size = -1;
3961 size = type_size(&f->type, &align);
3962 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3963 packed = 0;
3965 if (pcc && bit_size == 0) {
3966 /* in pcc mode, packing does not affect zero-width bitfields */
3968 } else {
3969 /* in pcc mode, attribute packed overrides if set. */
3970 if (pcc && (f->a.packed || ad->a.packed))
3971 align = packed = 1;
3973 /* pragma pack overrides align if lesser and packs bitfields always */
3974 if (pragma_pack) {
3975 packed = 1;
3976 if (pragma_pack < align)
3977 align = pragma_pack;
3978 /* in pcc mode pragma pack also overrides individual align */
3979 if (pcc && pragma_pack < a)
3980 a = 0;
3983 /* some individual align was specified */
3984 if (a)
3985 align = a;
3987 if (type->ref->type.t == VT_UNION) {
3988 if (pcc && bit_size >= 0)
3989 size = (bit_size + 7) >> 3;
3990 offset = 0;
3991 if (size > c)
3992 c = size;
3994 } else if (bit_size < 0) {
3995 if (pcc)
3996 c += (bit_pos + 7) >> 3;
3997 c = (c + align - 1) & -align;
3998 offset = c;
3999 if (size > 0)
4000 c += size;
4001 bit_pos = 0;
4002 prevbt = VT_STRUCT;
4003 prev_bit_size = 0;
4005 } else {
4006 /* A bit-field. Layout is more complicated. There are two
4007 options: PCC (GCC) compatible and MS compatible */
4008 if (pcc) {
4009 /* In PCC layout a bit-field is placed adjacent to the
4010 preceding bit-fields, except if:
4011 - it has zero-width
4012 - an individual alignment was given
4013 - it would overflow its base type container and
4014 there is no packing */
4015 if (bit_size == 0) {
4016 new_field:
4017 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4018 bit_pos = 0;
4019 } else if (f->a.aligned) {
4020 goto new_field;
4021 } else if (!packed) {
4022 int a8 = align * 8;
4023 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4024 if (ofs > size / align)
4025 goto new_field;
4028 /* in pcc mode, long long bitfields have type int if they fit */
4029 if (size == 8 && bit_size <= 32)
4030 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4032 while (bit_pos >= align * 8)
4033 c += align, bit_pos -= align * 8;
4034 offset = c;
4036 /* In PCC layout named bit-fields influence the alignment
4037 of the containing struct using the base types alignment,
4038 except for packed fields (which here have correct align). */
4039 if (f->v & SYM_FIRST_ANOM
4040 // && bit_size // ??? gcc on ARM/rpi does that
4042 align = 1;
4044 } else {
4045 bt = f->type.t & VT_BTYPE;
4046 if ((bit_pos + bit_size > size * 8)
4047 || (bit_size > 0) == (bt != prevbt)
4049 c = (c + align - 1) & -align;
4050 offset = c;
4051 bit_pos = 0;
4052 /* In MS bitfield mode a bit-field run always uses
4053 at least as many bits as the underlying type.
4054 To start a new run it's also required that this
4055 or the last bit-field had non-zero width. */
4056 if (bit_size || prev_bit_size)
4057 c += size;
4059 /* In MS layout the records alignment is normally
4060 influenced by the field, except for a zero-width
4061 field at the start of a run (but by further zero-width
4062 fields it is again). */
4063 if (bit_size == 0 && prevbt != bt)
4064 align = 1;
4065 prevbt = bt;
4066 prev_bit_size = bit_size;
4069 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4070 | (bit_pos << VT_STRUCT_SHIFT);
4071 bit_pos += bit_size;
4073 if (align > maxalign)
4074 maxalign = align;
4076 #ifdef BF_DEBUG
4077 printf("set field %s offset %-2d size %-2d align %-2d",
4078 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4079 if (f->type.t & VT_BITFIELD) {
4080 printf(" pos %-2d bits %-2d",
4081 BIT_POS(f->type.t),
4082 BIT_SIZE(f->type.t)
4085 printf("\n");
4086 #endif
4088 f->c = offset;
4089 f->r = 0;
4092 if (pcc)
4093 c += (bit_pos + 7) >> 3;
4095 /* store size and alignment */
4096 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4097 if (a < maxalign)
4098 a = maxalign;
4099 type->ref->r = a;
4100 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4101 /* can happen if individual align for some member was given. In
4102 this case MSVC ignores maxalign when aligning the size */
4103 a = pragma_pack;
4104 if (a < bt)
4105 a = bt;
4107 c = (c + a - 1) & -a;
4108 type->ref->c = c;
4110 #ifdef BF_DEBUG
4111 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4112 #endif
4114 /* check whether we can access bitfields by their type */
4115 for (f = type->ref->next; f; f = f->next) {
4116 int s, px, cx, c0;
4117 CType t;
4119 if (0 == (f->type.t & VT_BITFIELD))
4120 continue;
4121 f->type.ref = f;
4122 f->auxtype = -1;
4123 bit_size = BIT_SIZE(f->type.t);
4124 if (bit_size == 0)
4125 continue;
4126 bit_pos = BIT_POS(f->type.t);
4127 size = type_size(&f->type, &align);
4129 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4130 #ifdef TCC_TARGET_ARM
4131 && !(f->c & (align - 1))
4132 #endif
4134 continue;
4136 /* try to access the field using a different type */
4137 c0 = -1, s = align = 1;
4138 t.t = VT_BYTE;
4139 for (;;) {
4140 px = f->c * 8 + bit_pos;
4141 cx = (px >> 3) & -align;
4142 px = px - (cx << 3);
4143 if (c0 == cx)
4144 break;
4145 s = (px + bit_size + 7) >> 3;
4146 if (s > 4) {
4147 t.t = VT_LLONG;
4148 } else if (s > 2) {
4149 t.t = VT_INT;
4150 } else if (s > 1) {
4151 t.t = VT_SHORT;
4152 } else {
4153 t.t = VT_BYTE;
4155 s = type_size(&t, &align);
4156 c0 = cx;
4159 if (px + bit_size <= s * 8 && cx + s <= c
4160 #ifdef TCC_TARGET_ARM
4161 && !(cx & (align - 1))
4162 #endif
4164 /* update offset and bit position */
4165 f->c = cx;
4166 bit_pos = px;
4167 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4168 | (bit_pos << VT_STRUCT_SHIFT);
4169 if (s != size)
4170 f->auxtype = t.t;
4171 #ifdef BF_DEBUG
4172 printf("FIX field %s offset %-2d size %-2d align %-2d "
4173 "pos %-2d bits %-2d\n",
4174 get_tok_str(f->v & ~SYM_FIELD, NULL),
4175 cx, s, align, px, bit_size);
4176 #endif
4177 } else {
4178 /* fall back to load/store single-byte wise */
4179 f->auxtype = VT_STRUCT;
4180 #ifdef BF_DEBUG
4181 printf("FIX field %s : load byte-wise\n",
4182 get_tok_str(f->v & ~SYM_FIELD, NULL));
4183 #endif
4188 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4189 static void struct_decl(CType *type, int u)
4191 int v, c, size, align, flexible;
4192 int bit_size, bsize, bt;
4193 Sym *s, *ss, **ps;
4194 AttributeDef ad, ad1;
4195 CType type1, btype;
4197 memset(&ad, 0, sizeof ad);
4198 next();
4199 parse_attribute(&ad);
4200 if (tok != '{') {
4201 v = tok;
4202 next();
4203 /* struct already defined ? return it */
4204 if (v < TOK_IDENT)
4205 expect("struct/union/enum name");
4206 s = struct_find(v);
4207 if (s && (s->sym_scope == local_scope || tok != '{')) {
4208 if (u == s->type.t)
4209 goto do_decl;
4210 if (u == VT_ENUM && IS_ENUM(s->type.t))
4211 goto do_decl;
4212 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4214 } else {
4215 v = anon_sym++;
4217 /* Record the original enum/struct/union token. */
4218 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4219 type1.ref = NULL;
4220 /* we put an undefined size for struct/union */
4221 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4222 s->r = 0; /* default alignment is zero as gcc */
4223 do_decl:
4224 type->t = s->type.t;
4225 type->ref = s;
4227 if (tok == '{') {
4228 next();
4229 if (s->c != -1)
4230 tcc_error("struct/union/enum already defined");
4231 s->c = -2;
4232 /* cannot be empty */
4233 /* non empty enums are not allowed */
4234 ps = &s->next;
4235 if (u == VT_ENUM) {
4236 long long ll = 0, pl = 0, nl = 0;
4237 CType t;
4238 t.ref = s;
4239 /* enum symbols have static storage */
4240 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4241 for(;;) {
4242 v = tok;
4243 if (v < TOK_UIDENT)
4244 expect("identifier");
4245 ss = sym_find(v);
4246 if (ss && !local_stack)
4247 tcc_error("redefinition of enumerator '%s'",
4248 get_tok_str(v, NULL));
4249 next();
4250 if (tok == '=') {
4251 next();
4252 ll = expr_const64();
4254 ss = sym_push(v, &t, VT_CONST, 0);
4255 ss->enum_val = ll;
4256 *ps = ss, ps = &ss->next;
4257 if (ll < nl)
4258 nl = ll;
4259 if (ll > pl)
4260 pl = ll;
4261 if (tok != ',')
4262 break;
4263 next();
4264 ll++;
4265 /* NOTE: we accept a trailing comma */
4266 if (tok == '}')
4267 break;
4269 skip('}');
4270 /* set integral type of the enum */
4271 t.t = VT_INT;
4272 if (nl >= 0) {
4273 if (pl != (unsigned)pl)
4274 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4275 t.t |= VT_UNSIGNED;
4276 } else if (pl != (int)pl || nl != (int)nl)
4277 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4278 s->type.t = type->t = t.t | VT_ENUM;
4279 s->c = 0;
4280 /* set type for enum members */
4281 for (ss = s->next; ss; ss = ss->next) {
4282 ll = ss->enum_val;
4283 if (ll == (int)ll) /* default is int if it fits */
4284 continue;
4285 if (t.t & VT_UNSIGNED) {
4286 ss->type.t |= VT_UNSIGNED;
4287 if (ll == (unsigned)ll)
4288 continue;
4290 ss->type.t = (ss->type.t & ~VT_BTYPE)
4291 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4293 } else {
4294 c = 0;
4295 flexible = 0;
4296 while (tok != '}') {
4297 if (!parse_btype(&btype, &ad1, 0)) {
4298 skip(';');
4299 continue;
4301 while (1) {
4302 if (flexible)
4303 tcc_error("flexible array member '%s' not at the end of struct",
4304 get_tok_str(v, NULL));
4305 bit_size = -1;
4306 v = 0;
4307 type1 = btype;
4308 if (tok != ':') {
4309 if (tok != ';')
4310 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4311 if (v == 0) {
4312 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4313 expect("identifier");
4314 else {
4315 int v = btype.ref->v;
4316 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4317 if (tcc_state->ms_extensions == 0)
4318 expect("identifier");
4322 if (type_size(&type1, &align) < 0) {
4323 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4324 flexible = 1;
4325 else
4326 tcc_error("field '%s' has incomplete type",
4327 get_tok_str(v, NULL));
4329 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4330 (type1.t & VT_BTYPE) == VT_VOID ||
4331 (type1.t & VT_STORAGE))
4332 tcc_error("invalid type for '%s'",
4333 get_tok_str(v, NULL));
4335 if (tok == ':') {
4336 next();
4337 bit_size = expr_const();
4338 /* XXX: handle v = 0 case for messages */
4339 if (bit_size < 0)
4340 tcc_error("negative width in bit-field '%s'",
4341 get_tok_str(v, NULL));
4342 if (v && bit_size == 0)
4343 tcc_error("zero width for bit-field '%s'",
4344 get_tok_str(v, NULL));
4345 parse_attribute(&ad1);
4347 size = type_size(&type1, &align);
4348 if (bit_size >= 0) {
4349 bt = type1.t & VT_BTYPE;
4350 if (bt != VT_INT &&
4351 bt != VT_BYTE &&
4352 bt != VT_SHORT &&
4353 bt != VT_BOOL &&
4354 bt != VT_LLONG)
4355 tcc_error("bitfields must have scalar type");
4356 bsize = size * 8;
4357 if (bit_size > bsize) {
4358 tcc_error("width of '%s' exceeds its type",
4359 get_tok_str(v, NULL));
4360 } else if (bit_size == bsize
4361 && !ad.a.packed && !ad1.a.packed) {
4362 /* no need for bit fields */
4364 } else if (bit_size == 64) {
4365 tcc_error("field width 64 not implemented");
4366 } else {
4367 type1.t = (type1.t & ~VT_STRUCT_MASK)
4368 | VT_BITFIELD
4369 | (bit_size << (VT_STRUCT_SHIFT + 6));
4372 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4373 /* Remember we've seen a real field to check
4374 for placement of flexible array member. */
4375 c = 1;
4377 /* If member is a struct or bit-field, enforce
4378 placing into the struct (as anonymous). */
4379 if (v == 0 &&
4380 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4381 bit_size >= 0)) {
4382 v = anon_sym++;
4384 if (v) {
4385 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4386 ss->a = ad1.a;
4387 *ps = ss;
4388 ps = &ss->next;
4390 if (tok == ';' || tok == TOK_EOF)
4391 break;
4392 skip(',');
4394 skip(';');
4396 skip('}');
4397 parse_attribute(&ad);
4398 if (ad.cleanup_func) {
4399 tcc_warning("attribute '__cleanup__' ignored on type");
4401 check_fields(type, 1);
4402 check_fields(type, 0);
4403 struct_layout(type, &ad);
4404 if (debug_modes)
4405 tcc_debug_fix_anon(tcc_state, type);
4410 static void sym_to_attr(AttributeDef *ad, Sym *s)
4412 merge_symattr(&ad->a, &s->a);
4413 merge_funcattr(&ad->f, &s->f);
4416 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4417 are added to the element type, copied because it could be a typedef. */
4418 static void parse_btype_qualify(CType *type, int qualifiers)
4420 while (type->t & VT_ARRAY) {
4421 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4422 type = &type->ref->type;
4424 type->t |= qualifiers;
4427 /* return 0 if no type declaration. otherwise, return the basic type
4428 and skip it.
4430 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4432 int t, u, bt, st, type_found, typespec_found, g, n;
4433 Sym *s;
4434 CType type1;
4436 memset(ad, 0, sizeof(AttributeDef));
4437 type_found = 0;
4438 typespec_found = 0;
4439 t = VT_INT;
4440 bt = st = -1;
4441 type->ref = NULL;
4443 while(1) {
4444 switch(tok) {
4445 case TOK_EXTENSION:
4446 /* currently, we really ignore extension */
4447 next();
4448 continue;
4450 /* basic types */
4451 case TOK_CHAR:
4452 u = VT_BYTE;
4453 basic_type:
4454 next();
4455 basic_type1:
4456 if (u == VT_SHORT || u == VT_LONG) {
4457 if (st != -1 || (bt != -1 && bt != VT_INT))
4458 tmbt: tcc_error("too many basic types");
4459 st = u;
4460 } else {
4461 if (bt != -1 || (st != -1 && u != VT_INT))
4462 goto tmbt;
4463 bt = u;
4465 if (u != VT_INT)
4466 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4467 typespec_found = 1;
4468 break;
4469 case TOK_VOID:
4470 u = VT_VOID;
4471 goto basic_type;
4472 case TOK_SHORT:
4473 u = VT_SHORT;
4474 goto basic_type;
4475 case TOK_INT:
4476 u = VT_INT;
4477 goto basic_type;
4478 case TOK_ALIGNAS:
4479 { int n;
4480 AttributeDef ad1;
4481 next();
4482 skip('(');
4483 memset(&ad1, 0, sizeof(AttributeDef));
4484 if (parse_btype(&type1, &ad1, 0)) {
4485 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4486 if (ad1.a.aligned)
4487 n = 1 << (ad1.a.aligned - 1);
4488 else
4489 type_size(&type1, &n);
4490 } else {
4491 n = expr_const();
4492 if (n < 0 || (n & (n - 1)) != 0)
4493 tcc_error("alignment must be a positive power of two");
4495 skip(')');
4496 ad->a.aligned = exact_log2p1(n);
4498 continue;
4499 case TOK_LONG:
4500 if ((t & VT_BTYPE) == VT_DOUBLE) {
4501 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4502 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4503 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4504 } else {
4505 u = VT_LONG;
4506 goto basic_type;
4508 next();
4509 break;
4510 #ifdef TCC_TARGET_ARM64
4511 case TOK_UINT128:
4512 /* GCC's __uint128_t appears in some Linux header files. Make it a
4513 synonym for long double to get the size and alignment right. */
4514 u = VT_LDOUBLE;
4515 goto basic_type;
4516 #endif
4517 case TOK_BOOL:
4518 u = VT_BOOL;
4519 goto basic_type;
4520 case TOK_FLOAT:
4521 u = VT_FLOAT;
4522 goto basic_type;
4523 case TOK_DOUBLE:
4524 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4525 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4526 } else {
4527 u = VT_DOUBLE;
4528 goto basic_type;
4530 next();
4531 break;
4532 case TOK_ENUM:
4533 struct_decl(&type1, VT_ENUM);
4534 basic_type2:
4535 u = type1.t;
4536 type->ref = type1.ref;
4537 goto basic_type1;
4538 case TOK_STRUCT:
4539 struct_decl(&type1, VT_STRUCT);
4540 goto basic_type2;
4541 case TOK_UNION:
4542 struct_decl(&type1, VT_UNION);
4543 goto basic_type2;
4545 /* type modifiers */
4546 case TOK__Atomic:
4547 next();
4548 type->t = t;
4549 parse_btype_qualify(type, VT_ATOMIC);
4550 t = type->t;
4551 if (tok == '(') {
4552 parse_expr_type(&type1);
4553 /* remove all storage modifiers except typedef */
4554 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4555 if (type1.ref)
4556 sym_to_attr(ad, type1.ref);
4557 goto basic_type2;
4559 break;
4560 case TOK_CONST1:
4561 case TOK_CONST2:
4562 case TOK_CONST3:
4563 type->t = t;
4564 parse_btype_qualify(type, VT_CONSTANT);
4565 t = type->t;
4566 next();
4567 break;
4568 case TOK_VOLATILE1:
4569 case TOK_VOLATILE2:
4570 case TOK_VOLATILE3:
4571 type->t = t;
4572 parse_btype_qualify(type, VT_VOLATILE);
4573 t = type->t;
4574 next();
4575 break;
4576 case TOK_SIGNED1:
4577 case TOK_SIGNED2:
4578 case TOK_SIGNED3:
4579 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4580 tcc_error("signed and unsigned modifier");
4581 t |= VT_DEFSIGN;
4582 next();
4583 typespec_found = 1;
4584 break;
4585 case TOK_REGISTER:
4586 case TOK_AUTO:
4587 case TOK_RESTRICT1:
4588 case TOK_RESTRICT2:
4589 case TOK_RESTRICT3:
4590 next();
4591 break;
4592 case TOK_UNSIGNED:
4593 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4594 tcc_error("signed and unsigned modifier");
4595 t |= VT_DEFSIGN | VT_UNSIGNED;
4596 next();
4597 typespec_found = 1;
4598 break;
4600 /* storage */
4601 case TOK_EXTERN:
4602 g = VT_EXTERN;
4603 goto storage;
4604 case TOK_STATIC:
4605 g = VT_STATIC;
4606 goto storage;
4607 case TOK_TYPEDEF:
4608 g = VT_TYPEDEF;
4609 goto storage;
4610 storage:
4611 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4612 tcc_error("multiple storage classes");
4613 t |= g;
4614 next();
4615 break;
4616 case TOK_INLINE1:
4617 case TOK_INLINE2:
4618 case TOK_INLINE3:
4619 t |= VT_INLINE;
4620 next();
4621 break;
4622 case TOK_NORETURN3:
4623 next();
4624 ad->f.func_noreturn = 1;
4625 break;
4626 /* GNUC attribute */
4627 case TOK_ATTRIBUTE1:
4628 case TOK_ATTRIBUTE2:
4629 parse_attribute(ad);
4630 if (ad->attr_mode) {
4631 u = ad->attr_mode -1;
4632 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4634 continue;
4635 /* GNUC typeof */
4636 case TOK_TYPEOF1:
4637 case TOK_TYPEOF2:
4638 case TOK_TYPEOF3:
4639 next();
4640 parse_expr_type(&type1);
4641 /* remove all storage modifiers except typedef */
4642 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4643 if (type1.ref)
4644 sym_to_attr(ad, type1.ref);
4645 goto basic_type2;
4646 default:
4647 if (typespec_found)
4648 goto the_end;
4649 s = sym_find(tok);
4650 if (!s || !(s->type.t & VT_TYPEDEF))
4651 goto the_end;
4653 n = tok, next();
4654 if (tok == ':' && ignore_label) {
4655 /* ignore if it's a label */
4656 unget_tok(n);
4657 goto the_end;
4660 t &= ~(VT_BTYPE|VT_LONG);
4661 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4662 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4663 type->ref = s->type.ref;
4664 if (t)
4665 parse_btype_qualify(type, t);
4666 t = type->t;
4667 /* get attributes from typedef */
4668 sym_to_attr(ad, s);
4669 typespec_found = 1;
4670 st = bt = -2;
4671 break;
4673 type_found = 1;
4675 the_end:
4676 if (tcc_state->char_is_unsigned) {
4677 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4678 t |= VT_UNSIGNED;
4680 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4681 bt = t & (VT_BTYPE|VT_LONG);
4682 if (bt == VT_LONG)
4683 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4684 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4685 if (bt == VT_LDOUBLE)
4686 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4687 #endif
4688 type->t = t;
4689 return type_found;
4692 /* convert a function parameter type (array to pointer and function to
4693 function pointer) */
4694 static inline void convert_parameter_type(CType *pt)
4696 /* remove const and volatile qualifiers (XXX: const could be used
4697 to indicate a const function parameter */
4698 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4699 /* array must be transformed to pointer according to ANSI C */
4700 pt->t &= ~VT_ARRAY;
4701 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4702 mk_pointer(pt);
4706 ST_FUNC void parse_asm_str(CString *astr)
4708 skip('(');
4709 parse_mult_str(astr, "string constant");
4712 /* Parse an asm label and return the token */
4713 static int asm_label_instr(void)
4715 int v;
4716 CString astr;
4718 next();
4719 parse_asm_str(&astr);
4720 skip(')');
4721 #ifdef ASM_DEBUG
4722 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4723 #endif
4724 v = tok_alloc(astr.data, astr.size - 1)->tok;
4725 cstr_free(&astr);
4726 return v;
4729 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4731 int n, l, t1, arg_size, align, unused_align;
4732 Sym **plast, *s, *first;
4733 AttributeDef ad1;
4734 CType pt;
4735 TokenString *vla_array_tok = NULL;
4736 int *vla_array_str = NULL;
4738 if (tok == '(') {
4739 /* function type, or recursive declarator (return if so) */
4740 next();
4741 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4742 return 0;
4743 if (tok == ')')
4744 l = 0;
4745 else if (parse_btype(&pt, &ad1, 0))
4746 l = FUNC_NEW;
4747 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4748 merge_attr (ad, &ad1);
4749 return 0;
4750 } else
4751 l = FUNC_OLD;
4753 first = NULL;
4754 plast = &first;
4755 arg_size = 0;
4756 ++local_scope;
4757 if (l) {
4758 for(;;) {
4759 /* read param name and compute offset */
4760 if (l != FUNC_OLD) {
4761 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4762 break;
4763 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4764 if ((pt.t & VT_BTYPE) == VT_VOID)
4765 tcc_error("parameter declared as void");
4766 if (n == 0)
4767 n = SYM_FIELD;
4768 } else {
4769 n = tok;
4770 pt.t = VT_VOID; /* invalid type */
4771 pt.ref = NULL;
4772 next();
4774 if (n < TOK_UIDENT)
4775 expect("identifier");
4776 convert_parameter_type(&pt);
4777 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4778 s = sym_push(n, &pt, 0, 0);
4779 *plast = s;
4780 plast = &s->next;
4781 if (tok == ')')
4782 break;
4783 skip(',');
4784 if (l == FUNC_NEW && tok == TOK_DOTS) {
4785 l = FUNC_ELLIPSIS;
4786 next();
4787 break;
4789 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4790 tcc_error("invalid type");
4792 } else
4793 /* if no parameters, then old type prototype */
4794 l = FUNC_OLD;
4795 skip(')');
4796 /* remove parameter symbols from token table, keep on stack */
4797 if (first) {
4798 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4799 for (s = first; s; s = s->next)
4800 s->v |= SYM_FIELD;
4802 --local_scope;
4803 /* NOTE: const is ignored in returned type as it has a special
4804 meaning in gcc / C++ */
4805 type->t &= ~VT_CONSTANT;
4806 /* some ancient pre-K&R C allows a function to return an array
4807 and the array brackets to be put after the arguments, such
4808 that "int c()[]" means something like "int[] c()" */
4809 if (tok == '[') {
4810 next();
4811 skip(']'); /* only handle simple "[]" */
4812 mk_pointer(type);
4814 /* we push a anonymous symbol which will contain the function prototype */
4815 ad->f.func_args = arg_size;
4816 ad->f.func_type = l;
4817 s = sym_push(SYM_FIELD, type, 0, 0);
4818 s->a = ad->a;
4819 s->f = ad->f;
4820 s->next = first;
4821 type->t = VT_FUNC;
4822 type->ref = s;
4823 } else if (tok == '[') {
4824 int saved_nocode_wanted = nocode_wanted;
4825 /* array definition */
4826 next();
4827 n = -1;
4828 t1 = 0;
4829 if (td & TYPE_PARAM) while (1) {
4830 /* XXX The optional type-quals and static should only be accepted
4831 in parameter decls. The '*' as well, and then even only
4832 in prototypes (not function defs). */
4833 switch (tok) {
4834 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4835 case TOK_CONST1:
4836 case TOK_VOLATILE1:
4837 case TOK_STATIC:
4838 case '*':
4839 next();
4840 continue;
4841 default:
4842 break;
4844 if (tok != ']') {
4845 int nest = 1;
4847 /* Code generation is not done now but has to be done
4848 at start of function. Save code here for later use. */
4849 nocode_wanted = 1;
4850 vla_array_tok = tok_str_alloc();
4851 for (;;) {
4852 if (tok == ']') {
4853 nest--;
4854 if (nest == 0)
4855 break;
4857 if (tok == '[')
4858 nest++;
4859 tok_str_add_tok(vla_array_tok);
4860 next();
4862 unget_tok(0);
4863 tok_str_add(vla_array_tok, -1);
4864 tok_str_add(vla_array_tok, 0);
4865 vla_array_str = vla_array_tok->str;
4866 begin_macro(vla_array_tok, 2);
4867 next();
4868 gexpr();
4869 end_macro();
4870 next();
4871 goto check;
4873 break;
4875 } else if (tok != ']') {
4876 if (!local_stack || (storage & VT_STATIC))
4877 vpushi(expr_const());
4878 else {
4879 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4880 length must always be evaluated, even under nocode_wanted,
4881 so that its size slot is initialized (e.g. under sizeof
4882 or typeof). */
4883 nocode_wanted = 0;
4884 gexpr();
4886 check:
4887 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4888 n = vtop->c.i;
4889 if (n < 0)
4890 tcc_error("invalid array size");
4891 } else {
4892 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4893 tcc_error("size of variable length array should be an integer");
4894 n = 0;
4895 t1 = VT_VLA;
4898 skip(']');
4899 /* parse next post type */
4900 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4902 if ((type->t & VT_BTYPE) == VT_FUNC)
4903 tcc_error("declaration of an array of functions");
4904 if ((type->t & VT_BTYPE) == VT_VOID
4905 || type_size(type, &unused_align) < 0)
4906 tcc_error("declaration of an array of incomplete type elements");
4908 t1 |= type->t & VT_VLA;
4910 if (t1 & VT_VLA) {
4911 if (n < 0) {
4912 if (td & TYPE_NEST)
4913 tcc_error("need explicit inner array size in VLAs");
4915 else {
4916 loc -= type_size(&int_type, &align);
4917 loc &= -align;
4918 n = loc;
4920 vpush_type_size(type, &align);
4921 gen_op('*');
4922 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4923 vswap();
4924 vstore();
4927 if (n != -1)
4928 vpop();
4929 nocode_wanted = saved_nocode_wanted;
4931 /* we push an anonymous symbol which will contain the array
4932 element type */
4933 s = sym_push(SYM_FIELD, type, 0, n);
4934 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4935 type->ref = s;
4936 if (vla_array_str) {
4937 if (t1 & VT_VLA)
4938 s->vla_array_str = vla_array_str;
4939 else
4940 tok_str_free_str(vla_array_str);
4943 return 1;
4946 /* Parse a type declarator (except basic type), and return the type
4947 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4948 expected. 'type' should contain the basic type. 'ad' is the
4949 attribute definition of the basic type. It can be modified by
4950 type_decl(). If this (possibly abstract) declarator is a pointer chain
4951 it returns the innermost pointed to type (equals *type, but is a different
4952 pointer), otherwise returns type itself, that's used for recursive calls. */
4953 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4955 CType *post, *ret;
4956 int qualifiers, storage;
4958 /* recursive type, remove storage bits first, apply them later again */
4959 storage = type->t & VT_STORAGE;
4960 type->t &= ~VT_STORAGE;
4961 post = ret = type;
4963 while (tok == '*') {
4964 qualifiers = 0;
4965 redo:
4966 next();
4967 switch(tok) {
4968 case TOK__Atomic:
4969 qualifiers |= VT_ATOMIC;
4970 goto redo;
4971 case TOK_CONST1:
4972 case TOK_CONST2:
4973 case TOK_CONST3:
4974 qualifiers |= VT_CONSTANT;
4975 goto redo;
4976 case TOK_VOLATILE1:
4977 case TOK_VOLATILE2:
4978 case TOK_VOLATILE3:
4979 qualifiers |= VT_VOLATILE;
4980 goto redo;
4981 case TOK_RESTRICT1:
4982 case TOK_RESTRICT2:
4983 case TOK_RESTRICT3:
4984 goto redo;
4985 /* XXX: clarify attribute handling */
4986 case TOK_ATTRIBUTE1:
4987 case TOK_ATTRIBUTE2:
4988 parse_attribute(ad);
4989 break;
4991 mk_pointer(type);
4992 type->t |= qualifiers;
4993 if (ret == type)
4994 /* innermost pointed to type is the one for the first derivation */
4995 ret = pointed_type(type);
4998 if (tok == '(') {
4999 /* This is possibly a parameter type list for abstract declarators
5000 ('int ()'), use post_type for testing this. */
5001 if (!post_type(type, ad, 0, td)) {
5002 /* It's not, so it's a nested declarator, and the post operations
5003 apply to the innermost pointed to type (if any). */
5004 /* XXX: this is not correct to modify 'ad' at this point, but
5005 the syntax is not clear */
5006 parse_attribute(ad);
5007 post = type_decl(type, ad, v, td);
5008 skip(')');
5009 } else
5010 goto abstract;
5011 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5012 /* type identifier */
5013 *v = tok;
5014 next();
5015 } else {
5016 abstract:
5017 if (!(td & TYPE_ABSTRACT))
5018 expect("identifier");
5019 *v = 0;
5021 post_type(post, ad, storage, td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5022 parse_attribute(ad);
5023 type->t |= storage;
5024 return ret;
5027 /* indirection with full error checking and bound check */
5028 ST_FUNC void indir(void)
5030 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5031 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5032 return;
5033 expect("pointer");
5035 if (vtop->r & VT_LVAL)
5036 gv(RC_INT);
5037 vtop->type = *pointed_type(&vtop->type);
5038 /* Arrays and functions are never lvalues */
5039 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5040 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5041 vtop->r |= VT_LVAL;
5042 /* if bound checking, the referenced pointer must be checked */
5043 #ifdef CONFIG_TCC_BCHECK
5044 if (tcc_state->do_bounds_check)
5045 vtop->r |= VT_MUSTBOUND;
5046 #endif
5050 /* pass a parameter to a function and do type checking and casting */
5051 static void gfunc_param_typed(Sym *func, Sym *arg)
5053 int func_type;
5054 CType type;
5056 func_type = func->f.func_type;
5057 if (func_type == FUNC_OLD ||
5058 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5059 /* default casting : only need to convert float to double */
5060 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5061 gen_cast_s(VT_DOUBLE);
5062 } else if (vtop->type.t & VT_BITFIELD) {
5063 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5064 type.ref = vtop->type.ref;
5065 gen_cast(&type);
5066 } else if (vtop->r & VT_MUSTCAST) {
5067 force_charshort_cast();
5069 } else if (arg == NULL) {
5070 tcc_error("too many arguments to function");
5071 } else {
5072 type = arg->type;
5073 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5074 gen_assign_cast(&type);
5078 /* parse an expression and return its type without any side effect. */
5079 static void expr_type(CType *type, void (*expr_fn)(void))
5081 nocode_wanted++;
5082 expr_fn();
5083 *type = vtop->type;
5084 vpop();
5085 nocode_wanted--;
5088 /* parse an expression of the form '(type)' or '(expr)' and return its
5089 type */
5090 static void parse_expr_type(CType *type)
5092 int n;
5093 AttributeDef ad;
5095 skip('(');
5096 if (parse_btype(type, &ad, 0)) {
5097 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5098 } else {
5099 expr_type(type, gexpr);
5101 skip(')');
5104 static void parse_type(CType *type)
5106 AttributeDef ad;
5107 int n;
5109 if (!parse_btype(type, &ad, 0)) {
5110 expect("type");
5112 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5115 static void parse_builtin_params(int nc, const char *args)
5117 char c, sep = '(';
5118 CType type;
5119 if (nc)
5120 nocode_wanted++;
5121 next();
5122 if (*args == 0)
5123 skip(sep);
5124 while ((c = *args++)) {
5125 skip(sep);
5126 sep = ',';
5127 if (c == 't') {
5128 parse_type(&type);
5129 vpush(&type);
5130 continue;
5132 expr_eq();
5133 type.ref = NULL;
5134 type.t = 0;
5135 switch (c) {
5136 case 'e':
5137 continue;
5138 case 'V':
5139 type.t = VT_CONSTANT;
5140 case 'v':
5141 type.t |= VT_VOID;
5142 mk_pointer (&type);
5143 break;
5144 case 'S':
5145 type.t = VT_CONSTANT;
5146 case 's':
5147 type.t |= char_type.t;
5148 mk_pointer (&type);
5149 break;
5150 case 'i':
5151 type.t = VT_INT;
5152 break;
5153 case 'l':
5154 type.t = VT_SIZE_T;
5155 break;
5156 default:
5157 break;
5159 gen_assign_cast(&type);
5161 skip(')');
5162 if (nc)
5163 nocode_wanted--;
5166 static void parse_atomic(int atok)
5168 int size, align, arg;
5169 CType *atom, *atom_ptr, ct = {0};
5170 char buf[40];
5171 static const char *const templates[] = {
5173 * Each entry consists of callback and function template.
5174 * The template represents argument types and return type.
5176 * ? void (return-only)
5177 * b bool
5178 * a atomic
5179 * A read-only atomic
5180 * p pointer to memory
5181 * v value
5182 * m memory model
5185 /* keep in order of appearance in tcctok.h: */
5186 /* __atomic_store */ "avm.?",
5187 /* __atomic_load */ "Am.v",
5188 /* __atomic_exchange */ "avm.v",
5189 /* __atomic_compare_exchange */ "apvbmm.b",
5190 /* __atomic_fetch_add */ "avm.v",
5191 /* __atomic_fetch_sub */ "avm.v",
5192 /* __atomic_fetch_or */ "avm.v",
5193 /* __atomic_fetch_xor */ "avm.v",
5194 /* __atomic_fetch_and */ "avm.v"
5196 const char *template = templates[(atok - TOK___atomic_store)];
5198 atom = atom_ptr = NULL;
5199 size = 0; /* pacify compiler */
5200 next();
5201 skip('(');
5202 for (arg = 0;;) {
5203 expr_eq();
5204 switch (template[arg]) {
5205 case 'a':
5206 case 'A':
5207 atom_ptr = &vtop->type;
5208 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5209 expect("pointer");
5210 atom = pointed_type(atom_ptr);
5211 size = type_size(atom, &align);
5212 if (size > 8
5213 || (size & (size - 1))
5214 || (atok > TOK___atomic_compare_exchange
5215 && (0 == btype_size(atom->t & VT_BTYPE)
5216 || (atom->t & VT_BTYPE) == VT_PTR)))
5217 expect("integral or integer-sized pointer target type");
5218 /* GCC does not care either: */
5219 /* if (!(atom->t & VT_ATOMIC))
5220 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5221 break;
5223 case 'p':
5224 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5225 || type_size(pointed_type(&vtop->type), &align) != size)
5226 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5227 gen_assign_cast(atom_ptr);
5228 break;
5229 case 'v':
5230 gen_assign_cast(atom);
5231 break;
5232 case 'm':
5233 gen_assign_cast(&int_type);
5234 break;
5235 case 'b':
5236 ct.t = VT_BOOL;
5237 gen_assign_cast(&ct);
5238 break;
5240 if ('.' == template[++arg])
5241 break;
5242 skip(',');
5244 skip(')');
5246 ct.t = VT_VOID;
5247 switch (template[arg + 1]) {
5248 case 'b':
5249 ct.t = VT_BOOL;
5250 break;
5251 case 'v':
5252 ct = *atom;
5253 break;
5256 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5257 vpush_helper_func(tok_alloc_const(buf));
5258 vrott(arg + 1);
5259 gfunc_call(arg);
5261 vpush(&ct);
5262 PUT_R_RET(vtop, ct.t);
5263 if (ct.t == VT_BOOL) {
5264 #ifdef PROMOTE_RET
5265 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5266 #else
5267 vtop->type.t = VT_INT;
5268 #endif
5272 ST_FUNC void unary(void)
5274 int n, t, align, size, r, sizeof_caller;
5275 CType type;
5276 Sym *s;
5277 AttributeDef ad;
5279 /* generate line number info */
5280 if (debug_modes)
5281 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5283 sizeof_caller = in_sizeof;
5284 in_sizeof = 0;
5285 type.ref = NULL;
5286 /* XXX: GCC 2.95.3 does not generate a table although it should be
5287 better here */
5288 tok_next:
5289 switch(tok) {
5290 case TOK_EXTENSION:
5291 next();
5292 goto tok_next;
5293 case TOK_LCHAR:
5294 #ifdef TCC_TARGET_PE
5295 t = VT_SHORT|VT_UNSIGNED;
5296 goto push_tokc;
5297 #endif
5298 case TOK_CINT:
5299 case TOK_CCHAR:
5300 t = VT_INT;
5301 push_tokc:
5302 type.t = t;
5303 vsetc(&type, VT_CONST, &tokc);
5304 next();
5305 break;
5306 case TOK_CUINT:
5307 t = VT_INT | VT_UNSIGNED;
5308 goto push_tokc;
5309 case TOK_CLLONG:
5310 t = VT_LLONG;
5311 goto push_tokc;
5312 case TOK_CULLONG:
5313 t = VT_LLONG | VT_UNSIGNED;
5314 goto push_tokc;
5315 case TOK_CFLOAT:
5316 t = VT_FLOAT;
5317 goto push_tokc;
5318 case TOK_CDOUBLE:
5319 t = VT_DOUBLE;
5320 goto push_tokc;
5321 case TOK_CLDOUBLE:
5322 t = VT_LDOUBLE;
5323 goto push_tokc;
5324 case TOK_CLONG:
5325 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5326 goto push_tokc;
5327 case TOK_CULONG:
5328 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5329 goto push_tokc;
5330 case TOK___FUNCTION__:
5331 if (!gnu_ext)
5332 goto tok_identifier;
5333 /* fall thru */
5334 case TOK___FUNC__:
5336 Section *sec;
5337 int len;
5338 /* special function name identifier */
5339 len = strlen(funcname) + 1;
5340 /* generate char[len] type */
5341 type.t = char_type.t;
5342 if (tcc_state->warn_write_strings & WARN_ON)
5343 type.t |= VT_CONSTANT;
5344 mk_pointer(&type);
5345 type.t |= VT_ARRAY;
5346 type.ref->c = len;
5347 sec = rodata_section;
5348 vpush_ref(&type, sec, sec->data_offset, len);
5349 if (!NODATA_WANTED)
5350 memcpy(section_ptr_add(sec, len), funcname, len);
5351 next();
5353 break;
5354 case TOK_LSTR:
5355 #ifdef TCC_TARGET_PE
5356 t = VT_SHORT | VT_UNSIGNED;
5357 #else
5358 t = VT_INT;
5359 #endif
5360 goto str_init;
5361 case TOK_STR:
5362 /* string parsing */
5363 t = char_type.t;
5364 str_init:
5365 if (tcc_state->warn_write_strings & WARN_ON)
5366 t |= VT_CONSTANT;
5367 type.t = t;
5368 mk_pointer(&type);
5369 type.t |= VT_ARRAY;
5370 memset(&ad, 0, sizeof(AttributeDef));
5371 ad.section = rodata_section;
5372 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5373 break;
5374 case '(':
5375 next();
5376 /* cast ? */
5377 if (parse_btype(&type, &ad, 0)) {
5378 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5379 skip(')');
5380 /* check ISOC99 compound literal */
5381 if (tok == '{') {
5382 /* data is allocated locally by default */
5383 if (global_expr)
5384 r = VT_CONST;
5385 else
5386 r = VT_LOCAL;
5387 /* all except arrays are lvalues */
5388 if (!(type.t & VT_ARRAY))
5389 r |= VT_LVAL;
5390 memset(&ad, 0, sizeof(AttributeDef));
5391 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5392 } else {
5393 if (sizeof_caller) {
5394 vpush(&type);
5395 return;
5397 unary();
5398 gen_cast(&type);
5400 } else if (tok == '{') {
5401 int saved_nocode_wanted = nocode_wanted;
5402 if (const_wanted && !(nocode_wanted & unevalmask))
5403 expect("constant");
5404 if (0 == local_scope)
5405 tcc_error("statement expression outside of function");
5406 /* save all registers */
5407 save_regs(0);
5408 /* statement expression : we do not accept break/continue
5409 inside as GCC does. We do retain the nocode_wanted state,
5410 as statement expressions can't ever be entered from the
5411 outside, so any reactivation of code emission (from labels
5412 or loop heads) can be disabled again after the end of it. */
5413 block(1);
5414 /* If the statement expr can be entered, then we retain the current
5415 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5416 If it can't be entered then the state is that from before the
5417 statement expression. */
5418 if (saved_nocode_wanted)
5419 nocode_wanted = saved_nocode_wanted;
5420 skip(')');
5421 } else {
5422 gexpr();
5423 skip(')');
5425 break;
5426 case '*':
5427 next();
5428 unary();
5429 indir();
5430 break;
5431 case '&':
5432 next();
5433 unary();
5434 /* functions names must be treated as function pointers,
5435 except for unary '&' and sizeof. Since we consider that
5436 functions are not lvalues, we only have to handle it
5437 there and in function calls. */
5438 /* arrays can also be used although they are not lvalues */
5439 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5440 !(vtop->type.t & VT_ARRAY))
5441 test_lvalue();
5442 if (vtop->sym)
5443 vtop->sym->a.addrtaken = 1;
5444 mk_pointer(&vtop->type);
5445 gaddrof();
5446 break;
5447 case '!':
5448 next();
5449 unary();
5450 gen_test_zero(TOK_EQ);
5451 break;
5452 case '~':
5453 next();
5454 unary();
5455 vpushi(-1);
5456 gen_op('^');
5457 break;
5458 case '+':
5459 next();
5460 unary();
5461 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5462 tcc_error("pointer not accepted for unary plus");
5463 /* In order to force cast, we add zero, except for floating point
5464 where we really need an noop (otherwise -0.0 will be transformed
5465 into +0.0). */
5466 if (!is_float(vtop->type.t)) {
5467 vpushi(0);
5468 gen_op('+');
5470 break;
5471 case TOK_SIZEOF:
5472 case TOK_ALIGNOF1:
5473 case TOK_ALIGNOF2:
5474 case TOK_ALIGNOF3:
5475 t = tok;
5476 next();
5477 in_sizeof++;
5478 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5479 if (t == TOK_SIZEOF) {
5480 vpush_type_size(&type, &align);
5481 gen_cast_s(VT_SIZE_T);
5482 } else {
5483 type_size(&type, &align);
5484 s = NULL;
5485 if (vtop[1].r & VT_SYM)
5486 s = vtop[1].sym; /* hack: accessing previous vtop */
5487 if (s && s->a.aligned)
5488 align = 1 << (s->a.aligned - 1);
5489 vpushs(align);
5491 break;
5493 case TOK_builtin_expect:
5494 /* __builtin_expect is a no-op for now */
5495 parse_builtin_params(0, "ee");
5496 vpop();
5497 break;
5498 case TOK_builtin_types_compatible_p:
5499 parse_builtin_params(0, "tt");
5500 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5501 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5502 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5503 vtop -= 2;
5504 vpushi(n);
5505 break;
5506 case TOK_builtin_choose_expr:
5508 int64_t c;
5509 next();
5510 skip('(');
5511 c = expr_const64();
5512 skip(',');
5513 if (!c) {
5514 nocode_wanted++;
5516 expr_eq();
5517 if (!c) {
5518 vpop();
5519 nocode_wanted--;
5521 skip(',');
5522 if (c) {
5523 nocode_wanted++;
5525 expr_eq();
5526 if (c) {
5527 vpop();
5528 nocode_wanted--;
5530 skip(')');
5532 break;
5533 case TOK_builtin_constant_p:
5534 parse_builtin_params(1, "e");
5535 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5536 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5537 vtop--;
5538 vpushi(n);
5539 break;
5540 case TOK_builtin_frame_address:
5541 case TOK_builtin_return_address:
5543 int tok1 = tok;
5544 int level;
5545 next();
5546 skip('(');
5547 if (tok != TOK_CINT) {
5548 tcc_error("%s only takes positive integers",
5549 tok1 == TOK_builtin_return_address ?
5550 "__builtin_return_address" :
5551 "__builtin_frame_address");
5553 level = (uint32_t)tokc.i;
5554 next();
5555 skip(')');
5556 type.t = VT_VOID;
5557 mk_pointer(&type);
5558 vset(&type, VT_LOCAL, 0); /* local frame */
5559 while (level--) {
5560 #ifdef TCC_TARGET_RISCV64
5561 vpushi(2*PTR_SIZE);
5562 gen_op('-');
5563 #endif
5564 mk_pointer(&vtop->type);
5565 indir(); /* -> parent frame */
5567 if (tok1 == TOK_builtin_return_address) {
5568 // assume return address is just above frame pointer on stack
5569 #ifdef TCC_TARGET_ARM
5570 vpushi(2*PTR_SIZE);
5571 gen_op('+');
5572 #elif defined TCC_TARGET_RISCV64
5573 vpushi(PTR_SIZE);
5574 gen_op('-');
5575 #else
5576 vpushi(PTR_SIZE);
5577 gen_op('+');
5578 #endif
5579 mk_pointer(&vtop->type);
5580 indir();
5583 break;
5584 #ifdef TCC_TARGET_RISCV64
5585 case TOK_builtin_va_start:
5586 parse_builtin_params(0, "ee");
5587 r = vtop->r & VT_VALMASK;
5588 if (r == VT_LLOCAL)
5589 r = VT_LOCAL;
5590 if (r != VT_LOCAL)
5591 tcc_error("__builtin_va_start expects a local variable");
5592 gen_va_start();
5593 vstore();
5594 break;
5595 #endif
5596 #ifdef TCC_TARGET_X86_64
5597 #ifdef TCC_TARGET_PE
5598 case TOK_builtin_va_start:
5599 parse_builtin_params(0, "ee");
5600 r = vtop->r & VT_VALMASK;
5601 if (r == VT_LLOCAL)
5602 r = VT_LOCAL;
5603 if (r != VT_LOCAL)
5604 tcc_error("__builtin_va_start expects a local variable");
5605 vtop->r = r;
5606 vtop->type = char_pointer_type;
5607 vtop->c.i += 8;
5608 vstore();
5609 break;
5610 #else
5611 case TOK_builtin_va_arg_types:
5612 parse_builtin_params(0, "t");
5613 vpushi(classify_x86_64_va_arg(&vtop->type));
5614 vswap();
5615 vpop();
5616 break;
5617 #endif
5618 #endif
5620 #ifdef TCC_TARGET_ARM64
5621 case TOK_builtin_va_start: {
5622 parse_builtin_params(0, "ee");
5623 //xx check types
5624 gen_va_start();
5625 vpushi(0);
5626 vtop->type.t = VT_VOID;
5627 break;
5629 case TOK_builtin_va_arg: {
5630 parse_builtin_params(0, "et");
5631 type = vtop->type;
5632 vpop();
5633 //xx check types
5634 gen_va_arg(&type);
5635 vtop->type = type;
5636 break;
5638 case TOK___arm64_clear_cache: {
5639 parse_builtin_params(0, "ee");
5640 gen_clear_cache();
5641 vpushi(0);
5642 vtop->type.t = VT_VOID;
5643 break;
5645 #endif
5647 /* atomic operations */
5648 case TOK___atomic_store:
5649 case TOK___atomic_load:
5650 case TOK___atomic_exchange:
5651 case TOK___atomic_compare_exchange:
5652 case TOK___atomic_fetch_add:
5653 case TOK___atomic_fetch_sub:
5654 case TOK___atomic_fetch_or:
5655 case TOK___atomic_fetch_xor:
5656 case TOK___atomic_fetch_and:
5657 parse_atomic(tok);
5658 break;
5660 /* pre operations */
5661 case TOK_INC:
5662 case TOK_DEC:
5663 t = tok;
5664 next();
5665 unary();
5666 inc(0, t);
5667 break;
5668 case '-':
5669 next();
5670 unary();
5671 if (is_float(vtop->type.t)) {
5672 gen_opif(TOK_NEG);
5673 } else {
5674 vpushi(0);
5675 vswap();
5676 gen_op('-');
5678 break;
5679 case TOK_LAND:
5680 if (!gnu_ext)
5681 goto tok_identifier;
5682 next();
5683 /* allow to take the address of a label */
5684 if (tok < TOK_UIDENT)
5685 expect("label identifier");
5686 s = label_find(tok);
5687 if (!s) {
5688 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5689 } else {
5690 if (s->r == LABEL_DECLARED)
5691 s->r = LABEL_FORWARD;
5693 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5694 s->type.t = VT_VOID;
5695 mk_pointer(&s->type);
5696 s->type.t |= VT_STATIC;
5698 vpushsym(&s->type, s);
5699 next();
5700 break;
5702 case TOK_GENERIC:
5704 CType controlling_type;
5705 int has_default = 0;
5706 int has_match = 0;
5707 int learn = 0;
5708 TokenString *str = NULL;
5709 int saved_const_wanted = const_wanted;
5711 next();
5712 skip('(');
5713 const_wanted = 0;
5714 expr_type(&controlling_type, expr_eq);
5715 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5716 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5717 mk_pointer(&controlling_type);
5718 const_wanted = saved_const_wanted;
5719 for (;;) {
5720 learn = 0;
5721 skip(',');
5722 if (tok == TOK_DEFAULT) {
5723 if (has_default)
5724 tcc_error("too many 'default'");
5725 has_default = 1;
5726 if (!has_match)
5727 learn = 1;
5728 next();
5729 } else {
5730 AttributeDef ad_tmp;
5731 int itmp;
5732 CType cur_type;
5734 parse_btype(&cur_type, &ad_tmp, 0);
5735 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5736 if (compare_types(&controlling_type, &cur_type, 0)) {
5737 if (has_match) {
5738 tcc_error("type match twice");
5740 has_match = 1;
5741 learn = 1;
5744 skip(':');
5745 if (learn) {
5746 if (str)
5747 tok_str_free(str);
5748 skip_or_save_block(&str);
5749 } else {
5750 skip_or_save_block(NULL);
5752 if (tok == ')')
5753 break;
5755 if (!str) {
5756 char buf[60];
5757 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5758 tcc_error("type '%s' does not match any association", buf);
5760 begin_macro(str, 1);
5761 next();
5762 expr_eq();
5763 if (tok != TOK_EOF)
5764 expect(",");
5765 end_macro();
5766 next();
5767 break;
5769 // special qnan , snan and infinity values
5770 case TOK___NAN__:
5771 n = 0x7fc00000;
5772 special_math_val:
5773 vpushi(n);
5774 vtop->type.t = VT_FLOAT;
5775 next();
5776 break;
5777 case TOK___SNAN__:
5778 n = 0x7f800001;
5779 goto special_math_val;
5780 case TOK___INF__:
5781 n = 0x7f800000;
5782 goto special_math_val;
5784 default:
5785 tok_identifier:
5786 t = tok;
5787 next();
5788 if (t < TOK_UIDENT)
5789 expect("identifier");
5790 s = sym_find(t);
5791 if (!s || IS_ASM_SYM(s)) {
5792 const char *name = get_tok_str(t, NULL);
5793 if (tok != '(')
5794 tcc_error("'%s' undeclared", name);
5795 /* for simple function calls, we tolerate undeclared
5796 external reference to int() function */
5797 tcc_warning_c(warn_implicit_function_declaration)(
5798 "implicit declaration of function '%s'", name);
5799 s = external_global_sym(t, &func_old_type);
5802 r = s->r;
5803 /* A symbol that has a register is a local register variable,
5804 which starts out as VT_LOCAL value. */
5805 if ((r & VT_VALMASK) < VT_CONST)
5806 r = (r & ~VT_VALMASK) | VT_LOCAL;
5808 vset(&s->type, r, s->c);
5809 /* Point to s as backpointer (even without r&VT_SYM).
5810 Will be used by at least the x86 inline asm parser for
5811 regvars. */
5812 vtop->sym = s;
5814 if (r & VT_SYM) {
5815 vtop->c.i = 0;
5816 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5817 vtop->c.i = s->enum_val;
5819 break;
5822 /* post operations */
5823 while (1) {
5824 if (tok == TOK_INC || tok == TOK_DEC) {
5825 inc(1, tok);
5826 next();
5827 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5828 int qualifiers, cumofs = 0;
5829 /* field */
5830 if (tok == TOK_ARROW)
5831 indir();
5832 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5833 test_lvalue();
5834 gaddrof();
5835 /* expect pointer on structure */
5836 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5837 expect("struct or union");
5838 if (tok == TOK_CDOUBLE)
5839 expect("field name");
5840 next();
5841 if (tok == TOK_CINT || tok == TOK_CUINT)
5842 expect("field name");
5843 s = find_field(&vtop->type, tok, &cumofs);
5844 if (!s)
5845 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5846 /* add field offset to pointer */
5847 vtop->type = char_pointer_type; /* change type to 'char *' */
5848 vpushi(cumofs + s->c);
5849 gen_op('+');
5850 /* change type to field type, and set to lvalue */
5851 vtop->type = s->type;
5852 vtop->type.t |= qualifiers;
5853 /* an array is never an lvalue */
5854 if (!(vtop->type.t & VT_ARRAY)) {
5855 vtop->r |= VT_LVAL;
5856 #ifdef CONFIG_TCC_BCHECK
5857 /* if bound checking, the referenced pointer must be checked */
5858 if (tcc_state->do_bounds_check)
5859 vtop->r |= VT_MUSTBOUND;
5860 #endif
5862 next();
5863 } else if (tok == '[') {
5864 next();
5865 gexpr();
5866 gen_op('+');
5867 indir();
5868 skip(']');
5869 } else if (tok == '(') {
5870 SValue ret;
5871 Sym *sa;
5872 int nb_args, ret_nregs, ret_align, regsize, variadic;
5874 /* function call */
5875 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5876 /* pointer test (no array accepted) */
5877 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5878 vtop->type = *pointed_type(&vtop->type);
5879 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5880 goto error_func;
5881 } else {
5882 error_func:
5883 expect("function pointer");
5885 } else {
5886 vtop->r &= ~VT_LVAL; /* no lvalue */
5888 /* get return type */
5889 s = vtop->type.ref;
5890 next();
5891 sa = s->next; /* first parameter */
5892 nb_args = regsize = 0;
5893 ret.r2 = VT_CONST;
5894 /* compute first implicit argument if a structure is returned */
5895 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5896 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5897 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5898 &ret_align, &regsize);
5899 if (ret_nregs <= 0) {
5900 /* get some space for the returned structure */
5901 size = type_size(&s->type, &align);
5902 #ifdef TCC_TARGET_ARM64
5903 /* On arm64, a small struct is return in registers.
5904 It is much easier to write it to memory if we know
5905 that we are allowed to write some extra bytes, so
5906 round the allocated space up to a power of 2: */
5907 if (size < 16)
5908 while (size & (size - 1))
5909 size = (size | (size - 1)) + 1;
5910 #endif
5911 loc = (loc - size) & -align;
5912 ret.type = s->type;
5913 ret.r = VT_LOCAL | VT_LVAL;
5914 /* pass it as 'int' to avoid structure arg passing
5915 problems */
5916 vseti(VT_LOCAL, loc);
5917 #ifdef CONFIG_TCC_BCHECK
5918 if (tcc_state->do_bounds_check)
5919 --loc;
5920 #endif
5921 ret.c = vtop->c;
5922 if (ret_nregs < 0)
5923 vtop--;
5924 else
5925 nb_args++;
5927 } else {
5928 ret_nregs = 1;
5929 ret.type = s->type;
5932 if (ret_nregs > 0) {
5933 /* return in register */
5934 ret.c.i = 0;
5935 PUT_R_RET(&ret, ret.type.t);
5937 if (tok != ')') {
5938 for(;;) {
5939 expr_eq();
5940 gfunc_param_typed(s, sa);
5941 nb_args++;
5942 if (sa)
5943 sa = sa->next;
5944 if (tok == ')')
5945 break;
5946 skip(',');
5949 if (sa)
5950 tcc_error("too few arguments to function");
5951 skip(')');
5952 gfunc_call(nb_args);
5954 if (ret_nregs < 0) {
5955 vsetc(&ret.type, ret.r, &ret.c);
5956 #ifdef TCC_TARGET_RISCV64
5957 arch_transfer_ret_regs(1);
5958 #endif
5959 } else {
5960 /* return value */
5961 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5962 vsetc(&ret.type, r, &ret.c);
5963 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5966 /* handle packed struct return */
5967 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5968 int addr, offset;
5970 size = type_size(&s->type, &align);
5971 /* We're writing whole regs often, make sure there's enough
5972 space. Assume register size is power of 2. */
5973 if (regsize > align)
5974 align = regsize;
5975 loc = (loc - size) & -align;
5976 addr = loc;
5977 offset = 0;
5978 for (;;) {
5979 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5980 vswap();
5981 vstore();
5982 vtop--;
5983 if (--ret_nregs == 0)
5984 break;
5985 offset += regsize;
5987 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5990 /* Promote char/short return values. This is matters only
5991 for calling function that were not compiled by TCC and
5992 only on some architectures. For those where it doesn't
5993 matter we expect things to be already promoted to int,
5994 but not larger. */
5995 t = s->type.t & VT_BTYPE;
5996 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5997 #ifdef PROMOTE_RET
5998 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5999 #else
6000 vtop->type.t = VT_INT;
6001 #endif
6004 if (s->f.func_noreturn) {
6005 if (debug_modes)
6006 tcc_tcov_block_end(tcc_state, -1);
6007 CODE_OFF();
6009 } else {
6010 break;
6015 #ifndef precedence_parser /* original top-down parser */
6017 static void expr_prod(void)
6019 int t;
6021 unary();
6022 while ((t = tok) == '*' || t == '/' || t == '%') {
6023 next();
6024 unary();
6025 gen_op(t);
6029 static void expr_sum(void)
6031 int t;
6033 expr_prod();
6034 while ((t = tok) == '+' || t == '-') {
6035 next();
6036 expr_prod();
6037 gen_op(t);
6041 static void expr_shift(void)
6043 int t;
6045 expr_sum();
6046 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6047 next();
6048 expr_sum();
6049 gen_op(t);
6053 static void expr_cmp(void)
6055 int t;
6057 expr_shift();
6058 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6059 t == TOK_ULT || t == TOK_UGE) {
6060 next();
6061 expr_shift();
6062 gen_op(t);
6066 static void expr_cmpeq(void)
6068 int t;
6070 expr_cmp();
6071 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6072 next();
6073 expr_cmp();
6074 gen_op(t);
6078 static void expr_and(void)
6080 expr_cmpeq();
6081 while (tok == '&') {
6082 next();
6083 expr_cmpeq();
6084 gen_op('&');
6088 static void expr_xor(void)
6090 expr_and();
6091 while (tok == '^') {
6092 next();
6093 expr_and();
6094 gen_op('^');
6098 static void expr_or(void)
6100 expr_xor();
6101 while (tok == '|') {
6102 next();
6103 expr_xor();
6104 gen_op('|');
6108 static void expr_landor(int op);
6110 static void expr_land(void)
6112 expr_or();
6113 if (tok == TOK_LAND)
6114 expr_landor(tok);
6117 static void expr_lor(void)
6119 expr_land();
6120 if (tok == TOK_LOR)
6121 expr_landor(tok);
6124 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6125 #else /* defined precedence_parser */
6126 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6127 # define expr_lor() unary(), expr_infix(1)
6129 static int precedence(int tok)
6131 switch (tok) {
6132 case TOK_LOR: return 1;
6133 case TOK_LAND: return 2;
6134 case '|': return 3;
6135 case '^': return 4;
6136 case '&': return 5;
6137 case TOK_EQ: case TOK_NE: return 6;
6138 relat: case TOK_ULT: case TOK_UGE: return 7;
6139 case TOK_SHL: case TOK_SAR: return 8;
6140 case '+': case '-': return 9;
6141 case '*': case '/': case '%': return 10;
6142 default:
6143 if (tok >= TOK_ULE && tok <= TOK_GT)
6144 goto relat;
6145 return 0;
6148 static unsigned char prec[256];
6149 static void init_prec(void)
6151 int i;
6152 for (i = 0; i < 256; i++)
6153 prec[i] = precedence(i);
6155 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6157 static void expr_landor(int op);
6159 static void expr_infix(int p)
6161 int t = tok, p2;
6162 while ((p2 = precedence(t)) >= p) {
6163 if (t == TOK_LOR || t == TOK_LAND) {
6164 expr_landor(t);
6165 } else {
6166 next();
6167 unary();
6168 if (precedence(tok) > p2)
6169 expr_infix(p2 + 1);
6170 gen_op(t);
6172 t = tok;
6175 #endif
6177 /* Assuming vtop is a value used in a conditional context
6178 (i.e. compared with zero) return 0 if it's false, 1 if
6179 true and -1 if it can't be statically determined. */
6180 static int condition_3way(void)
6182 int c = -1;
6183 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6184 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6185 vdup();
6186 gen_cast_s(VT_BOOL);
6187 c = vtop->c.i;
6188 vpop();
6190 return c;
6193 static void expr_landor(int op)
6195 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6196 for(;;) {
6197 c = f ? i : condition_3way();
6198 if (c < 0)
6199 save_regs(1), cc = 0;
6200 else if (c != i)
6201 nocode_wanted++, f = 1;
6202 if (tok != op)
6203 break;
6204 if (c < 0)
6205 t = gvtst(i, t);
6206 else
6207 vpop();
6208 next();
6209 expr_landor_next(op);
6211 if (cc || f) {
6212 vpop();
6213 vpushi(i ^ f);
6214 gsym(t);
6215 nocode_wanted -= f;
6216 } else {
6217 gvtst_set(i, t);
6221 static int is_cond_bool(SValue *sv)
6223 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6224 && (sv->type.t & VT_BTYPE) == VT_INT)
6225 return (unsigned)sv->c.i < 2;
6226 if (sv->r == VT_CMP)
6227 return 1;
6228 return 0;
6231 static void expr_cond(void)
6233 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6234 SValue sv;
6235 CType type;
6236 int ncw_prev;
6238 expr_lor();
6239 if (tok == '?') {
6240 next();
6241 c = condition_3way();
6242 g = (tok == ':' && gnu_ext);
6243 tt = 0;
6244 if (!g) {
6245 if (c < 0) {
6246 save_regs(1);
6247 tt = gvtst(1, 0);
6248 } else {
6249 vpop();
6251 } else if (c < 0) {
6252 /* needed to avoid having different registers saved in
6253 each branch */
6254 save_regs(1);
6255 gv_dup();
6256 tt = gvtst(0, 0);
6259 ncw_prev = nocode_wanted;
6260 if (c == 0)
6261 nocode_wanted++;
6262 if (!g)
6263 gexpr();
6265 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6266 mk_pointer(&vtop->type);
6267 sv = *vtop; /* save value to handle it later */
6268 vtop--; /* no vpop so that FP stack is not flushed */
6270 if (g) {
6271 u = tt;
6272 } else if (c < 0) {
6273 u = gjmp(0);
6274 gsym(tt);
6275 } else
6276 u = 0;
6278 nocode_wanted = ncw_prev;
6279 if (c == 1)
6280 nocode_wanted++;
6281 skip(':');
6282 expr_cond();
6284 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6285 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6286 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6287 this code jumps directly to the if's then/else branches. */
6288 t1 = gvtst(0, 0);
6289 t2 = gjmp(0);
6290 gsym(u);
6291 vpushv(&sv);
6292 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6293 gvtst_set(0, t1);
6294 gvtst_set(1, t2);
6295 nocode_wanted = ncw_prev;
6296 // tcc_warning("two conditions expr_cond");
6297 return;
6300 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6301 mk_pointer(&vtop->type);
6303 /* cast operands to correct type according to ISOC rules */
6304 if (!combine_types(&type, &sv, vtop, '?'))
6305 type_incompatibility_error(&sv.type, &vtop->type,
6306 "type mismatch in conditional expression (have '%s' and '%s')");
6307 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6308 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6309 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6311 /* now we convert second operand */
6312 if (c != 1) {
6313 gen_cast(&type);
6314 if (islv) {
6315 mk_pointer(&vtop->type);
6316 gaddrof();
6317 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6318 gaddrof();
6321 rc = RC_TYPE(type.t);
6322 /* for long longs, we use fixed registers to avoid having
6323 to handle a complicated move */
6324 if (USING_TWO_WORDS(type.t))
6325 rc = RC_RET(type.t);
6327 tt = r2 = 0;
6328 if (c < 0) {
6329 r2 = gv(rc);
6330 tt = gjmp(0);
6332 gsym(u);
6333 nocode_wanted = ncw_prev;
6335 /* this is horrible, but we must also convert first
6336 operand */
6337 if (c != 0) {
6338 *vtop = sv;
6339 gen_cast(&type);
6340 if (islv) {
6341 mk_pointer(&vtop->type);
6342 gaddrof();
6343 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6344 gaddrof();
6347 if (c < 0) {
6348 r1 = gv(rc);
6349 move_reg(r2, r1, islv ? VT_PTR : type.t);
6350 vtop->r = r2;
6351 gsym(tt);
6354 if (islv)
6355 indir();
6359 static void expr_eq(void)
6361 int t;
6363 expr_cond();
6364 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6365 test_lvalue();
6366 next();
6367 if (t == '=') {
6368 expr_eq();
6369 } else {
6370 vdup();
6371 expr_eq();
6372 gen_op(TOK_ASSIGN_OP(t));
6374 vstore();
6378 ST_FUNC void gexpr(void)
6380 while (1) {
6381 expr_eq();
6382 if (tok != ',')
6383 break;
6384 vpop();
6385 next();
6389 /* parse a constant expression and return value in vtop. */
6390 static void expr_const1(void)
6392 const_wanted++;
6393 nocode_wanted += unevalmask + 1;
6394 expr_cond();
6395 nocode_wanted -= unevalmask + 1;
6396 const_wanted--;
6399 /* parse an integer constant and return its value. */
6400 static inline int64_t expr_const64(void)
6402 int64_t c;
6403 expr_const1();
6404 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6405 expect("constant expression");
6406 c = vtop->c.i;
6407 vpop();
6408 return c;
6411 /* parse an integer constant and return its value.
6412 Complain if it doesn't fit 32bit (signed or unsigned). */
6413 ST_FUNC int expr_const(void)
6415 int c;
6416 int64_t wc = expr_const64();
6417 c = wc;
6418 if (c != wc && (unsigned)c != wc)
6419 tcc_error("constant exceeds 32 bit");
6420 return c;
6423 /* ------------------------------------------------------------------------- */
6424 /* return from function */
6426 #ifndef TCC_TARGET_ARM64
6427 static void gfunc_return(CType *func_type)
6429 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6430 CType type, ret_type;
6431 int ret_align, ret_nregs, regsize;
6432 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6433 &ret_align, &regsize);
6434 if (ret_nregs < 0) {
6435 #ifdef TCC_TARGET_RISCV64
6436 arch_transfer_ret_regs(0);
6437 #endif
6438 } else if (0 == ret_nregs) {
6439 /* if returning structure, must copy it to implicit
6440 first pointer arg location */
6441 type = *func_type;
6442 mk_pointer(&type);
6443 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6444 indir();
6445 vswap();
6446 /* copy structure value to pointer */
6447 vstore();
6448 } else {
6449 /* returning structure packed into registers */
6450 int size, addr, align, rc;
6451 size = type_size(func_type,&align);
6452 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6453 (vtop->c.i & (ret_align-1)))
6454 && (align & (ret_align-1))) {
6455 loc = (loc - size) & -ret_align;
6456 addr = loc;
6457 type = *func_type;
6458 vset(&type, VT_LOCAL | VT_LVAL, addr);
6459 vswap();
6460 vstore();
6461 vpop();
6462 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6464 vtop->type = ret_type;
6465 rc = RC_RET(ret_type.t);
6466 if (ret_nregs == 1)
6467 gv(rc);
6468 else {
6469 for (;;) {
6470 vdup();
6471 gv(rc);
6472 vpop();
6473 if (--ret_nregs == 0)
6474 break;
6475 /* We assume that when a structure is returned in multiple
6476 registers, their classes are consecutive values of the
6477 suite s(n) = 2^n */
6478 rc <<= 1;
6479 vtop->c.i += regsize;
6483 } else {
6484 gv(RC_RET(func_type->t));
6486 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6488 #endif
6490 static void check_func_return(void)
6492 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6493 return;
6494 if (!strcmp (funcname, "main")
6495 && (func_vt.t & VT_BTYPE) == VT_INT) {
6496 /* main returns 0 by default */
6497 vpushi(0);
6498 gen_assign_cast(&func_vt);
6499 gfunc_return(&func_vt);
6500 } else {
6501 tcc_warning("function might return no value: '%s'", funcname);
6505 /* ------------------------------------------------------------------------- */
6506 /* switch/case */
6508 static int case_cmpi(const void *pa, const void *pb)
6510 int64_t a = (*(struct case_t**) pa)->v1;
6511 int64_t b = (*(struct case_t**) pb)->v1;
6512 return a < b ? -1 : a > b;
6515 static int case_cmpu(const void *pa, const void *pb)
6517 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6518 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6519 return a < b ? -1 : a > b;
6522 static void gtst_addr(int t, int a)
6524 gsym_addr(gvtst(0, t), a);
6527 static void gcase(struct case_t **base, int len, int *bsym)
6529 struct case_t *p;
6530 int e;
6531 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6532 while (len > 8) {
6533 /* binary search */
6534 p = base[len/2];
6535 vdup();
6536 if (ll)
6537 vpushll(p->v2);
6538 else
6539 vpushi(p->v2);
6540 gen_op(TOK_LE);
6541 e = gvtst(1, 0);
6542 vdup();
6543 if (ll)
6544 vpushll(p->v1);
6545 else
6546 vpushi(p->v1);
6547 gen_op(TOK_GE);
6548 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6549 /* x < v1 */
6550 gcase(base, len/2, bsym);
6551 /* x > v2 */
6552 gsym(e);
6553 e = len/2 + 1;
6554 base += e; len -= e;
6556 /* linear scan */
6557 while (len--) {
6558 p = *base++;
6559 vdup();
6560 if (ll)
6561 vpushll(p->v2);
6562 else
6563 vpushi(p->v2);
6564 if (p->v1 == p->v2) {
6565 gen_op(TOK_EQ);
6566 gtst_addr(0, p->sym);
6567 } else {
6568 gen_op(TOK_LE);
6569 e = gvtst(1, 0);
6570 vdup();
6571 if (ll)
6572 vpushll(p->v1);
6573 else
6574 vpushi(p->v1);
6575 gen_op(TOK_GE);
6576 gtst_addr(0, p->sym);
6577 gsym(e);
6580 *bsym = gjmp(*bsym);
6583 /* ------------------------------------------------------------------------- */
6584 /* __attribute__((cleanup(fn))) */
6586 static void try_call_scope_cleanup(Sym *stop)
6588 Sym *cls = cur_scope->cl.s;
6590 for (; cls != stop; cls = cls->ncl) {
6591 Sym *fs = cls->next;
6592 Sym *vs = cls->prev_tok;
6594 vpushsym(&fs->type, fs);
6595 vset(&vs->type, vs->r, vs->c);
6596 vtop->sym = vs;
6597 mk_pointer(&vtop->type);
6598 gaddrof();
6599 gfunc_call(1);
6603 static void try_call_cleanup_goto(Sym *cleanupstate)
6605 Sym *oc, *cc;
6606 int ocd, ccd;
6608 if (!cur_scope->cl.s)
6609 return;
6611 /* search NCA of both cleanup chains given parents and initial depth */
6612 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6613 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6615 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6617 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6620 try_call_scope_cleanup(cc);
6623 /* call 'func' for each __attribute__((cleanup(func))) */
6624 static void block_cleanup(struct scope *o)
6626 int jmp = 0;
6627 Sym *g, **pg;
6628 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6629 if (g->prev_tok->r & LABEL_FORWARD) {
6630 Sym *pcl = g->next;
6631 if (!jmp)
6632 jmp = gjmp(0);
6633 gsym(pcl->jnext);
6634 try_call_scope_cleanup(o->cl.s);
6635 pcl->jnext = gjmp(0);
6636 if (!o->cl.n)
6637 goto remove_pending;
6638 g->c = o->cl.n;
6639 pg = &g->prev;
6640 } else {
6641 remove_pending:
6642 *pg = g->prev;
6643 sym_free(g);
6646 gsym(jmp);
6647 try_call_scope_cleanup(o->cl.s);
6650 /* ------------------------------------------------------------------------- */
6651 /* VLA */
6653 static void vla_restore(int loc)
6655 if (loc)
6656 gen_vla_sp_restore(loc);
6659 static void vla_leave(struct scope *o)
6661 struct scope *c = cur_scope, *v = NULL;
6662 for (; c != o && c; c = c->prev)
6663 if (c->vla.num)
6664 v = c;
6665 if (v)
6666 vla_restore(v->vla.locorig);
6669 /* ------------------------------------------------------------------------- */
6670 /* local scopes */
6672 static void new_scope(struct scope *o)
6674 /* copy and link previous scope */
6675 *o = *cur_scope;
6676 o->prev = cur_scope;
6677 cur_scope = o;
6678 cur_scope->vla.num = 0;
6680 /* record local declaration stack position */
6681 o->lstk = local_stack;
6682 o->llstk = local_label_stack;
6683 ++local_scope;
6685 if (debug_modes)
6686 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6689 static void prev_scope(struct scope *o, int is_expr)
6691 vla_leave(o->prev);
6693 if (o->cl.s != o->prev->cl.s)
6694 block_cleanup(o->prev);
6696 /* pop locally defined labels */
6697 label_pop(&local_label_stack, o->llstk, is_expr);
6699 /* In the is_expr case (a statement expression is finished here),
6700 vtop might refer to symbols on the local_stack. Either via the
6701 type or via vtop->sym. We can't pop those nor any that in turn
6702 might be referred to. To make it easier we don't roll back
6703 any symbols in that case; some upper level call to block() will
6704 do that. We do have to remove such symbols from the lookup
6705 tables, though. sym_pop will do that. */
6707 /* pop locally defined symbols */
6708 pop_local_syms(o->lstk, is_expr);
6709 cur_scope = o->prev;
6710 --local_scope;
6712 if (debug_modes)
6713 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6716 /* leave a scope via break/continue(/goto) */
6717 static void leave_scope(struct scope *o)
6719 if (!o)
6720 return;
6721 try_call_scope_cleanup(o->cl.s);
6722 vla_leave(o);
6725 /* ------------------------------------------------------------------------- */
6726 /* call block from 'for do while' loops */
6728 static void lblock(int *bsym, int *csym)
6730 struct scope *lo = loop_scope, *co = cur_scope;
6731 int *b = co->bsym, *c = co->csym;
6732 if (csym) {
6733 co->csym = csym;
6734 loop_scope = co;
6736 co->bsym = bsym;
6737 block(0);
6738 co->bsym = b;
6739 if (csym) {
6740 co->csym = c;
6741 loop_scope = lo;
6745 static void block(int is_expr)
6747 int a, b, c, d, e, t;
6748 struct scope o;
6749 Sym *s;
6751 if (is_expr) {
6752 /* default return value is (void) */
6753 vpushi(0);
6754 vtop->type.t = VT_VOID;
6757 again:
6758 t = tok;
6759 /* If the token carries a value, next() might destroy it. Only with
6760 invalid code such as f(){"123"4;} */
6761 if (TOK_HAS_VALUE(t))
6762 goto expr;
6763 next();
6765 if (debug_modes)
6766 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6768 if (t == TOK_IF) {
6769 skip('(');
6770 gexpr();
6771 skip(')');
6772 a = gvtst(1, 0);
6773 block(0);
6774 if (tok == TOK_ELSE) {
6775 d = gjmp(0);
6776 gsym(a);
6777 next();
6778 block(0);
6779 gsym(d); /* patch else jmp */
6780 } else {
6781 gsym(a);
6784 } else if (t == TOK_WHILE) {
6785 d = gind(0);
6786 skip('(');
6787 gexpr();
6788 skip(')');
6789 a = gvtst(1, 0);
6790 b = 0;
6791 lblock(&a, &b);
6792 gjmp_addr(d);
6793 gsym_addr(b, d);
6794 gsym(a);
6796 } else if (t == '{') {
6797 new_scope(&o);
6799 /* handle local labels declarations */
6800 while (tok == TOK_LABEL) {
6801 do {
6802 next();
6803 if (tok < TOK_UIDENT)
6804 expect("label identifier");
6805 label_push(&local_label_stack, tok, LABEL_DECLARED);
6806 next();
6807 } while (tok == ',');
6808 skip(';');
6811 while (tok != '}') {
6812 decl(VT_LOCAL);
6813 if (tok != '}') {
6814 if (is_expr)
6815 vpop();
6816 block(is_expr);
6820 prev_scope(&o, is_expr);
6821 if (local_scope)
6822 next();
6823 else if (!nocode_wanted)
6824 check_func_return();
6826 } else if (t == TOK_RETURN) {
6827 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6828 if (tok != ';') {
6829 gexpr();
6830 if (b) {
6831 gen_assign_cast(&func_vt);
6832 } else {
6833 if (vtop->type.t != VT_VOID)
6834 tcc_warning("void function returns a value");
6835 vtop--;
6837 } else if (b) {
6838 tcc_warning("'return' with no value");
6839 b = 0;
6841 leave_scope(root_scope);
6842 if (b)
6843 gfunc_return(&func_vt);
6844 skip(';');
6845 /* jump unless last stmt in top-level block */
6846 if (tok != '}' || local_scope != 1)
6847 rsym = gjmp(rsym);
6848 if (debug_modes)
6849 tcc_tcov_block_end (tcc_state, -1);
6850 CODE_OFF();
6852 } else if (t == TOK_BREAK) {
6853 /* compute jump */
6854 if (!cur_scope->bsym)
6855 tcc_error("cannot break");
6856 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6857 leave_scope(cur_switch->scope);
6858 else
6859 leave_scope(loop_scope);
6860 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6861 skip(';');
6863 } else if (t == TOK_CONTINUE) {
6864 /* compute jump */
6865 if (!cur_scope->csym)
6866 tcc_error("cannot continue");
6867 leave_scope(loop_scope);
6868 *cur_scope->csym = gjmp(*cur_scope->csym);
6869 skip(';');
6871 } else if (t == TOK_FOR) {
6872 new_scope(&o);
6874 skip('(');
6875 if (tok != ';') {
6876 /* c99 for-loop init decl? */
6877 if (!decl0(VT_LOCAL, 1, NULL)) {
6878 /* no, regular for-loop init expr */
6879 gexpr();
6880 vpop();
6883 skip(';');
6884 a = b = 0;
6885 c = d = gind(0);
6886 if (tok != ';') {
6887 gexpr();
6888 a = gvtst(1, 0);
6890 skip(';');
6891 if (tok != ')') {
6892 e = gjmp(0);
6893 d = gind(0);
6894 gexpr();
6895 vpop();
6896 gjmp_addr(c);
6897 gsym(e);
6899 skip(')');
6900 lblock(&a, &b);
6901 gjmp_addr(d);
6902 gsym_addr(b, d);
6903 gsym(a);
6904 prev_scope(&o, 0);
6906 } else if (t == TOK_DO) {
6907 a = b = 0;
6908 d = gind(0);
6909 lblock(&a, &b);
6910 gsym(b);
6911 skip(TOK_WHILE);
6912 skip('(');
6913 gexpr();
6914 skip(')');
6915 skip(';');
6916 c = gvtst(0, 0);
6917 gsym_addr(c, d);
6918 gsym(a);
6920 } else if (t == TOK_SWITCH) {
6921 struct switch_t *sw;
6923 sw = tcc_mallocz(sizeof *sw);
6924 sw->bsym = &a;
6925 sw->scope = cur_scope;
6926 sw->prev = cur_switch;
6927 sw->nocode_wanted = nocode_wanted;
6928 cur_switch = sw;
6930 skip('(');
6931 gexpr();
6932 skip(')');
6933 sw->sv = *vtop--; /* save switch value */
6935 a = 0;
6936 b = gjmp(0); /* jump to first case */
6937 lblock(&a, NULL);
6938 a = gjmp(a); /* add implicit break */
6939 /* case lookup */
6940 gsym(b);
6942 if (sw->sv.type.t & VT_UNSIGNED)
6943 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
6944 else
6945 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
6947 for (b = 1; b < sw->n; b++)
6948 if (sw->sv.type.t & VT_UNSIGNED
6949 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
6950 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
6951 tcc_error("duplicate case value");
6953 vpushv(&sw->sv);
6954 gv(RC_INT);
6955 d = 0, gcase(sw->p, sw->n, &d);
6956 vpop();
6957 if (sw->def_sym)
6958 gsym_addr(d, sw->def_sym);
6959 else
6960 gsym(d);
6961 /* break label */
6962 gsym(a);
6964 dynarray_reset(&sw->p, &sw->n);
6965 cur_switch = sw->prev;
6966 tcc_free(sw);
6968 } else if (t == TOK_CASE) {
6969 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6970 if (!cur_switch)
6971 expect("switch");
6972 cr->v1 = cr->v2 = expr_const64();
6973 if (gnu_ext && tok == TOK_DOTS) {
6974 next();
6975 cr->v2 = expr_const64();
6976 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
6977 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
6978 tcc_warning("empty case range");
6980 if (debug_modes)
6981 tcc_tcov_reset_ind(tcc_state);
6982 cr->sym = gind(cur_switch->nocode_wanted);
6983 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6984 skip(':');
6985 is_expr = 0;
6986 goto block_after_label;
6988 } else if (t == TOK_DEFAULT) {
6989 if (!cur_switch)
6990 expect("switch");
6991 if (cur_switch->def_sym)
6992 tcc_error("too many 'default'");
6993 if (debug_modes)
6994 tcc_tcov_reset_ind(tcc_state);
6995 cur_switch->def_sym = gind(cur_switch->nocode_wanted);
6996 skip(':');
6997 is_expr = 0;
6998 goto block_after_label;
7000 } else if (t == TOK_GOTO) {
7001 if (cur_scope->vla.num)
7002 vla_restore(cur_scope->vla.locorig);
7003 if (tok == '*' && gnu_ext) {
7004 /* computed goto */
7005 next();
7006 gexpr();
7007 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7008 expect("pointer");
7009 ggoto();
7011 } else if (tok >= TOK_UIDENT) {
7012 s = label_find(tok);
7013 /* put forward definition if needed */
7014 if (!s)
7015 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7016 else if (s->r == LABEL_DECLARED)
7017 s->r = LABEL_FORWARD;
7019 if (s->r & LABEL_FORWARD) {
7020 /* start new goto chain for cleanups, linked via label->next */
7021 if (cur_scope->cl.s && !nocode_wanted) {
7022 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7023 pending_gotos->prev_tok = s;
7024 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7025 pending_gotos->next = s;
7027 s->jnext = gjmp(s->jnext);
7028 } else {
7029 try_call_cleanup_goto(s->cleanupstate);
7030 gjmp_addr(s->jnext);
7032 next();
7034 } else {
7035 expect("label identifier");
7037 skip(';');
7039 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7040 asm_instr();
7042 } else {
7043 if (tok == ':' && t >= TOK_UIDENT) {
7044 /* label case */
7045 next();
7046 s = label_find(t);
7047 if (s) {
7048 if (s->r == LABEL_DEFINED)
7049 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7050 s->r = LABEL_DEFINED;
7051 if (s->next) {
7052 Sym *pcl; /* pending cleanup goto */
7053 for (pcl = s->next; pcl; pcl = pcl->prev)
7054 gsym(pcl->jnext);
7055 sym_pop(&s->next, NULL, 0);
7056 } else
7057 gsym(s->jnext);
7058 } else {
7059 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7061 s->jnext = gind(0);
7062 s->cleanupstate = cur_scope->cl.s;
7064 block_after_label:
7066 /* Accept attributes after labels (e.g. 'unused') */
7067 AttributeDef ad_tmp;
7068 parse_attribute(&ad_tmp);
7070 vla_restore(cur_scope->vla.loc);
7071 if (tok != '}')
7072 goto again;
7073 /* we accept this, but it is a mistake */
7074 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7076 } else {
7077 /* expression case */
7078 if (t != ';') {
7079 unget_tok(t);
7080 expr:
7081 if (is_expr) {
7082 vpop();
7083 gexpr();
7084 } else {
7085 gexpr();
7086 vpop();
7088 skip(';');
7093 if (debug_modes)
7094 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7097 /* This skips over a stream of tokens containing balanced {} and ()
7098 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7099 with a '{'). If STR then allocates and stores the skipped tokens
7100 in *STR. This doesn't check if () and {} are nested correctly,
7101 i.e. "({)}" is accepted. */
7102 static void skip_or_save_block(TokenString **str)
7104 int braces = tok == '{';
7105 int level = 0;
7106 if (str)
7107 *str = tok_str_alloc();
7109 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7110 int t;
7111 if (tok == TOK_EOF) {
7112 if (str || level > 0)
7113 tcc_error("unexpected end of file");
7114 else
7115 break;
7117 if (str)
7118 tok_str_add_tok(*str);
7119 t = tok;
7120 next();
7121 if (t == '{' || t == '(') {
7122 level++;
7123 } else if (t == '}' || t == ')') {
7124 level--;
7125 if (level == 0 && braces && t == '}')
7126 break;
7129 if (str) {
7130 tok_str_add(*str, -1);
7131 tok_str_add(*str, 0);
7135 #define EXPR_CONST 1
7136 #define EXPR_ANY 2
7138 static void parse_init_elem(int expr_type)
7140 int saved_global_expr;
7141 switch(expr_type) {
7142 case EXPR_CONST:
7143 /* compound literals must be allocated globally in this case */
7144 saved_global_expr = global_expr;
7145 global_expr = 1;
7146 expr_const1();
7147 global_expr = saved_global_expr;
7148 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7149 (compound literals). */
7150 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7151 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7152 || vtop->sym->v < SYM_FIRST_ANOM))
7153 #ifdef TCC_TARGET_PE
7154 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7155 #endif
7157 tcc_error("initializer element is not constant");
7158 break;
7159 case EXPR_ANY:
7160 expr_eq();
7161 break;
7165 #if 1
7166 static void init_assert(init_params *p, int offset)
7168 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7169 : !nocode_wanted && offset > p->local_offset)
7170 tcc_internal_error("initializer overflow");
7172 #else
7173 #define init_assert(sec, offset)
7174 #endif
7176 /* put zeros for variable based init */
7177 static void init_putz(init_params *p, unsigned long c, int size)
7179 init_assert(p, c + size);
7180 if (p->sec) {
7181 /* nothing to do because globals are already set to zero */
7182 } else {
7183 vpush_helper_func(TOK_memset);
7184 vseti(VT_LOCAL, c);
7185 #ifdef TCC_TARGET_ARM
7186 vpushs(size);
7187 vpushi(0);
7188 #else
7189 vpushi(0);
7190 vpushs(size);
7191 #endif
7192 gfunc_call(3);
7196 #define DIF_FIRST 1
7197 #define DIF_SIZE_ONLY 2
7198 #define DIF_HAVE_ELEM 4
7199 #define DIF_CLEAR 8
7201 /* delete relocations for specified range c ... c + size. Unfortunatly
7202 in very special cases, relocations may occur unordered */
7203 static void decl_design_delrels(Section *sec, int c, int size)
7205 ElfW_Rel *rel, *rel2, *rel_end;
7206 if (!sec || !sec->reloc)
7207 return;
7208 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7209 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7210 while (rel < rel_end) {
7211 if (rel->r_offset >= c && rel->r_offset < c + size) {
7212 sec->reloc->data_offset -= sizeof *rel;
7213 } else {
7214 if (rel2 != rel)
7215 memcpy(rel2, rel, sizeof *rel);
7216 ++rel2;
7218 ++rel;
7222 static void decl_design_flex(init_params *p, Sym *ref, int index)
7224 if (ref == p->flex_array_ref) {
7225 if (index >= ref->c)
7226 ref->c = index + 1;
7227 } else if (ref->c < 0)
7228 tcc_error("flexible array has zero size in this context");
7231 /* t is the array or struct type. c is the array or struct
7232 address. cur_field is the pointer to the current
7233 field, for arrays the 'c' member contains the current start
7234 index. 'flags' is as in decl_initializer.
7235 'al' contains the already initialized length of the
7236 current container (starting at c). This returns the new length of that. */
7237 static int decl_designator(init_params *p, CType *type, unsigned long c,
7238 Sym **cur_field, int flags, int al)
7240 Sym *s, *f;
7241 int index, index_last, align, l, nb_elems, elem_size;
7242 unsigned long corig = c;
7244 elem_size = 0;
7245 nb_elems = 1;
7247 if (flags & DIF_HAVE_ELEM)
7248 goto no_designator;
7250 if (gnu_ext && tok >= TOK_UIDENT) {
7251 l = tok, next();
7252 if (tok == ':')
7253 goto struct_field;
7254 unget_tok(l);
7257 /* NOTE: we only support ranges for last designator */
7258 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7259 if (tok == '[') {
7260 if (!(type->t & VT_ARRAY))
7261 expect("array type");
7262 next();
7263 index = index_last = expr_const();
7264 if (tok == TOK_DOTS && gnu_ext) {
7265 next();
7266 index_last = expr_const();
7268 skip(']');
7269 s = type->ref;
7270 decl_design_flex(p, s, index_last);
7271 if (index < 0 || index_last >= s->c || index_last < index)
7272 tcc_error("index exceeds array bounds or range is empty");
7273 if (cur_field)
7274 (*cur_field)->c = index_last;
7275 type = pointed_type(type);
7276 elem_size = type_size(type, &align);
7277 c += index * elem_size;
7278 nb_elems = index_last - index + 1;
7279 } else {
7280 int cumofs;
7281 next();
7282 l = tok;
7283 struct_field:
7284 next();
7285 if ((type->t & VT_BTYPE) != VT_STRUCT)
7286 expect("struct/union type");
7287 cumofs = 0;
7288 f = find_field(type, l, &cumofs);
7289 if (!f)
7290 expect("field");
7291 if (cur_field)
7292 *cur_field = f;
7293 type = &f->type;
7294 c += cumofs + f->c;
7296 cur_field = NULL;
7298 if (!cur_field) {
7299 if (tok == '=') {
7300 next();
7301 } else if (!gnu_ext) {
7302 expect("=");
7304 } else {
7305 no_designator:
7306 if (type->t & VT_ARRAY) {
7307 index = (*cur_field)->c;
7308 s = type->ref;
7309 decl_design_flex(p, s, index);
7310 if (index >= s->c)
7311 tcc_error("too many initializers");
7312 type = pointed_type(type);
7313 elem_size = type_size(type, &align);
7314 c += index * elem_size;
7315 } else {
7316 f = *cur_field;
7317 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7318 *cur_field = f = f->next;
7319 if (!f)
7320 tcc_error("too many initializers");
7321 type = &f->type;
7322 c += f->c;
7326 if (!elem_size) /* for structs */
7327 elem_size = type_size(type, &align);
7329 /* Using designators the same element can be initialized more
7330 than once. In that case we need to delete possibly already
7331 existing relocations. */
7332 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7333 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7334 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7337 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7339 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7340 Sym aref = {0};
7341 CType t1;
7342 int i;
7343 if (p->sec || (type->t & VT_ARRAY)) {
7344 /* make init_putv/vstore believe it were a struct */
7345 aref.c = elem_size;
7346 t1.t = VT_STRUCT, t1.ref = &aref;
7347 type = &t1;
7349 if (p->sec)
7350 vpush_ref(type, p->sec, c, elem_size);
7351 else
7352 vset(type, VT_LOCAL|VT_LVAL, c);
7353 for (i = 1; i < nb_elems; i++) {
7354 vdup();
7355 init_putv(p, type, c + elem_size * i);
7357 vpop();
7360 c += nb_elems * elem_size;
7361 if (c - corig > al)
7362 al = c - corig;
7363 return al;
7366 /* store a value or an expression directly in global data or in local array */
7367 static void init_putv(init_params *p, CType *type, unsigned long c)
7369 int bt;
7370 void *ptr;
7371 CType dtype;
7372 int size, align;
7373 Section *sec = p->sec;
7374 uint64_t val;
7376 dtype = *type;
7377 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7379 size = type_size(type, &align);
7380 if (type->t & VT_BITFIELD)
7381 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7382 init_assert(p, c + size);
7384 if (sec) {
7385 /* XXX: not portable */
7386 /* XXX: generate error if incorrect relocation */
7387 gen_assign_cast(&dtype);
7388 bt = type->t & VT_BTYPE;
7390 if ((vtop->r & VT_SYM)
7391 && bt != VT_PTR
7392 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7393 || (type->t & VT_BITFIELD))
7394 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7396 tcc_error("initializer element is not computable at load time");
7398 if (NODATA_WANTED) {
7399 vtop--;
7400 return;
7403 ptr = sec->data + c;
7404 val = vtop->c.i;
7406 /* XXX: make code faster ? */
7407 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7408 vtop->sym->v >= SYM_FIRST_ANOM &&
7409 /* XXX This rejects compound literals like
7410 '(void *){ptr}'. The problem is that '&sym' is
7411 represented the same way, which would be ruled out
7412 by the SYM_FIRST_ANOM check above, but also '"string"'
7413 in 'char *p = "string"' is represented the same
7414 with the type being VT_PTR and the symbol being an
7415 anonymous one. That is, there's no difference in vtop
7416 between '(void *){x}' and '&(void *){x}'. Ignore
7417 pointer typed entities here. Hopefully no real code
7418 will ever use compound literals with scalar type. */
7419 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7420 /* These come from compound literals, memcpy stuff over. */
7421 Section *ssec;
7422 ElfSym *esym;
7423 ElfW_Rel *rel;
7424 esym = elfsym(vtop->sym);
7425 ssec = tcc_state->sections[esym->st_shndx];
7426 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7427 if (ssec->reloc) {
7428 /* We need to copy over all memory contents, and that
7429 includes relocations. Use the fact that relocs are
7430 created it order, so look from the end of relocs
7431 until we hit one before the copied region. */
7432 unsigned long relofs = ssec->reloc->data_offset;
7433 while (relofs >= sizeof(*rel)) {
7434 relofs -= sizeof(*rel);
7435 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7436 if (rel->r_offset >= esym->st_value + size)
7437 continue;
7438 if (rel->r_offset < esym->st_value)
7439 break;
7440 put_elf_reloca(symtab_section, sec,
7441 c + rel->r_offset - esym->st_value,
7442 ELFW(R_TYPE)(rel->r_info),
7443 ELFW(R_SYM)(rel->r_info),
7444 #if PTR_SIZE == 8
7445 rel->r_addend
7446 #else
7448 #endif
7452 } else {
7453 if (type->t & VT_BITFIELD) {
7454 int bit_pos, bit_size, bits, n;
7455 unsigned char *p, v, m;
7456 bit_pos = BIT_POS(vtop->type.t);
7457 bit_size = BIT_SIZE(vtop->type.t);
7458 p = (unsigned char*)ptr + (bit_pos >> 3);
7459 bit_pos &= 7, bits = 0;
7460 while (bit_size) {
7461 n = 8 - bit_pos;
7462 if (n > bit_size)
7463 n = bit_size;
7464 v = val >> bits << bit_pos;
7465 m = ((1 << n) - 1) << bit_pos;
7466 *p = (*p & ~m) | (v & m);
7467 bits += n, bit_size -= n, bit_pos = 0, ++p;
7469 } else
7470 switch(bt) {
7471 case VT_BOOL:
7472 *(char *)ptr = val != 0;
7473 break;
7474 case VT_BYTE:
7475 *(char *)ptr = val;
7476 break;
7477 case VT_SHORT:
7478 write16le(ptr, val);
7479 break;
7480 case VT_FLOAT:
7481 write32le(ptr, val);
7482 break;
7483 case VT_DOUBLE:
7484 write64le(ptr, val);
7485 break;
7486 case VT_LDOUBLE:
7487 #if defined TCC_IS_NATIVE_387
7488 /* Host and target platform may be different but both have x87.
7489 On windows, tcc does not use VT_LDOUBLE, except when it is a
7490 cross compiler. In this case a mingw gcc as host compiler
7491 comes here with 10-byte long doubles, while msvc or tcc won't.
7492 tcc itself can still translate by asm.
7493 In any case we avoid possibly random bytes 11 and 12.
7495 if (sizeof (long double) >= 10)
7496 memcpy(ptr, &vtop->c.ld, 10);
7497 #ifdef __TINYC__
7498 else if (sizeof (long double) == sizeof (double))
7499 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7500 #endif
7501 else if (vtop->c.ld == 0.0)
7503 else
7504 #endif
7505 /* For other platforms it should work natively, but may not work
7506 for cross compilers */
7507 if (sizeof(long double) == LDOUBLE_SIZE)
7508 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7509 else if (sizeof(double) == LDOUBLE_SIZE)
7510 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7511 #ifndef TCC_CROSS_TEST
7512 else
7513 tcc_error("can't cross compile long double constants");
7514 #endif
7515 break;
7517 #if PTR_SIZE == 8
7518 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7519 case VT_LLONG:
7520 case VT_PTR:
7521 if (vtop->r & VT_SYM)
7522 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7523 else
7524 write64le(ptr, val);
7525 break;
7526 case VT_INT:
7527 write32le(ptr, val);
7528 break;
7529 #else
7530 case VT_LLONG:
7531 write64le(ptr, val);
7532 break;
7533 case VT_PTR:
7534 case VT_INT:
7535 if (vtop->r & VT_SYM)
7536 greloc(sec, vtop->sym, c, R_DATA_PTR);
7537 write32le(ptr, val);
7538 break;
7539 #endif
7540 default:
7541 //tcc_internal_error("unexpected type");
7542 break;
7545 vtop--;
7546 } else {
7547 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7548 vswap();
7549 vstore();
7550 vpop();
7554 /* 't' contains the type and storage info. 'c' is the offset of the
7555 object in section 'sec'. If 'sec' is NULL, it means stack based
7556 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7557 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7558 size only evaluation is wanted (only for arrays). */
7559 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7561 int len, n, no_oblock, i;
7562 int size1, align1;
7563 Sym *s, *f;
7564 Sym indexsym;
7565 CType *t1;
7567 /* generate line number info */
7568 if (debug_modes && !p->sec)
7569 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7571 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7572 /* In case of strings we have special handling for arrays, so
7573 don't consume them as initializer value (which would commit them
7574 to some anonymous symbol). */
7575 tok != TOK_LSTR && tok != TOK_STR &&
7576 (!(flags & DIF_SIZE_ONLY)
7577 /* a struct may be initialized from a struct of same type, as in
7578 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7579 In that case we need to parse the element in order to check
7580 it for compatibility below */
7581 || (type->t & VT_BTYPE) == VT_STRUCT)
7583 int ncw_prev = nocode_wanted;
7584 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7585 ++nocode_wanted;
7586 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7587 nocode_wanted = ncw_prev;
7588 flags |= DIF_HAVE_ELEM;
7591 if (type->t & VT_ARRAY) {
7592 no_oblock = 1;
7593 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7594 tok == '{') {
7595 skip('{');
7596 no_oblock = 0;
7599 s = type->ref;
7600 n = s->c;
7601 t1 = pointed_type(type);
7602 size1 = type_size(t1, &align1);
7604 /* only parse strings here if correct type (otherwise: handle
7605 them as ((w)char *) expressions */
7606 if ((tok == TOK_LSTR &&
7607 #ifdef TCC_TARGET_PE
7608 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7609 #else
7610 (t1->t & VT_BTYPE) == VT_INT
7611 #endif
7612 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7613 len = 0;
7614 cstr_reset(&initstr);
7615 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7616 tcc_error("unhandled string literal merging");
7617 while (tok == TOK_STR || tok == TOK_LSTR) {
7618 if (initstr.size)
7619 initstr.size -= size1;
7620 if (tok == TOK_STR)
7621 len += tokc.str.size;
7622 else
7623 len += tokc.str.size / sizeof(nwchar_t);
7624 len--;
7625 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7626 next();
7628 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7629 && tok != TOK_EOF) {
7630 /* Not a lone literal but part of a bigger expression. */
7631 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7632 tokc.str.size = initstr.size;
7633 tokc.str.data = initstr.data;
7634 goto do_init_array;
7637 decl_design_flex(p, s, len);
7638 if (!(flags & DIF_SIZE_ONLY)) {
7639 int nb = n;
7640 if (len < nb)
7641 nb = len;
7642 if (len > nb)
7643 tcc_warning("initializer-string for array is too long");
7644 /* in order to go faster for common case (char
7645 string in global variable, we handle it
7646 specifically */
7647 if (p->sec && size1 == 1) {
7648 init_assert(p, c + nb);
7649 if (!NODATA_WANTED)
7650 memcpy(p->sec->data + c, initstr.data, nb);
7651 } else {
7652 for(i=0;i<n;i++) {
7653 if (i >= nb) {
7654 /* only add trailing zero if enough storage (no
7655 warning in this case since it is standard) */
7656 if (flags & DIF_CLEAR)
7657 break;
7658 if (n - i >= 4) {
7659 init_putz(p, c + i * size1, (n - i) * size1);
7660 break;
7662 ch = 0;
7663 } else if (size1 == 1)
7664 ch = ((unsigned char *)initstr.data)[i];
7665 else
7666 ch = ((nwchar_t *)initstr.data)[i];
7667 vpushi(ch);
7668 init_putv(p, t1, c + i * size1);
7672 } else {
7674 do_init_array:
7675 indexsym.c = 0;
7676 f = &indexsym;
7678 do_init_list:
7679 /* zero memory once in advance */
7680 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7681 init_putz(p, c, n*size1);
7682 flags |= DIF_CLEAR;
7685 len = 0;
7686 /* GNU extension: if the initializer is empty for a flex array,
7687 it's size is zero. We won't enter the loop, so set the size
7688 now. */
7689 decl_design_flex(p, s, len);
7690 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7691 len = decl_designator(p, type, c, &f, flags, len);
7692 flags &= ~DIF_HAVE_ELEM;
7693 if (type->t & VT_ARRAY) {
7694 ++indexsym.c;
7695 /* special test for multi dimensional arrays (may not
7696 be strictly correct if designators are used at the
7697 same time) */
7698 if (no_oblock && len >= n*size1)
7699 break;
7700 } else {
7701 if (s->type.t == VT_UNION)
7702 f = NULL;
7703 else
7704 f = f->next;
7705 if (no_oblock && f == NULL)
7706 break;
7709 if (tok == '}')
7710 break;
7711 skip(',');
7714 if (!no_oblock)
7715 skip('}');
7717 } else if ((flags & DIF_HAVE_ELEM)
7718 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7719 The source type might have VT_CONSTANT set, which is
7720 of course assignable to non-const elements. */
7721 && is_compatible_unqualified_types(type, &vtop->type)) {
7722 goto one_elem;
7724 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7725 no_oblock = 1;
7726 if ((flags & DIF_FIRST) || tok == '{') {
7727 skip('{');
7728 no_oblock = 0;
7730 s = type->ref;
7731 f = s->next;
7732 n = s->c;
7733 size1 = 1;
7734 goto do_init_list;
7736 } else if (tok == '{') {
7737 if (flags & DIF_HAVE_ELEM)
7738 skip(';');
7739 next();
7740 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7741 skip('}');
7743 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7744 /* If we supported only ISO C we wouldn't have to accept calling
7745 this on anything than an array if DIF_SIZE_ONLY (and even then
7746 only on the outermost level, so no recursion would be needed),
7747 because initializing a flex array member isn't supported.
7748 But GNU C supports it, so we need to recurse even into
7749 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7750 /* just skip expression */
7751 if (flags & DIF_HAVE_ELEM)
7752 vpop();
7753 else
7754 skip_or_save_block(NULL);
7756 } else {
7757 if (!(flags & DIF_HAVE_ELEM)) {
7758 /* This should happen only when we haven't parsed
7759 the init element above for fear of committing a
7760 string constant to memory too early. */
7761 if (tok != TOK_STR && tok != TOK_LSTR)
7762 expect("string constant");
7763 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7765 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7766 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7767 && vtop->c.i == 0
7768 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7770 vpop();
7771 else
7772 init_putv(p, type, c);
7776 /* parse an initializer for type 't' if 'has_init' is non zero, and
7777 allocate space in local or global data space ('r' is either
7778 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7779 variable 'v' of scope 'scope' is declared before initializers
7780 are parsed. If 'v' is zero, then a reference to the new object
7781 is put in the value stack. If 'has_init' is 2, a special parsing
7782 is done to handle string constants. */
7783 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7784 int has_init, int v, int scope)
7786 int size, align, addr;
7787 TokenString *init_str = NULL;
7789 Section *sec;
7790 Sym *flexible_array;
7791 Sym *sym;
7792 int saved_nocode_wanted = nocode_wanted;
7793 #ifdef CONFIG_TCC_BCHECK
7794 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7795 #endif
7796 init_params p = {0};
7798 /* Always allocate static or global variables */
7799 if (v && (r & VT_VALMASK) == VT_CONST)
7800 nocode_wanted |= 0x80000000;
7802 flexible_array = NULL;
7803 size = type_size(type, &align);
7805 /* exactly one flexible array may be initialized, either the
7806 toplevel array or the last member of the toplevel struct */
7808 if (size < 0) {
7809 /* If the base type itself was an array type of unspecified size
7810 (like in 'typedef int arr[]; arr x = {1};') then we will
7811 overwrite the unknown size by the real one for this decl.
7812 We need to unshare the ref symbol holding that size. */
7813 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7814 p.flex_array_ref = type->ref;
7816 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7817 Sym *field = type->ref->next;
7818 if (field) {
7819 while (field->next)
7820 field = field->next;
7821 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7822 flexible_array = field;
7823 p.flex_array_ref = field->type.ref;
7824 size = -1;
7829 if (size < 0) {
7830 /* If unknown size, do a dry-run 1st pass */
7831 if (!has_init)
7832 tcc_error("unknown type size");
7833 if (has_init == 2) {
7834 /* only get strings */
7835 init_str = tok_str_alloc();
7836 while (tok == TOK_STR || tok == TOK_LSTR) {
7837 tok_str_add_tok(init_str);
7838 next();
7840 tok_str_add(init_str, -1);
7841 tok_str_add(init_str, 0);
7842 } else
7843 skip_or_save_block(&init_str);
7844 unget_tok(0);
7846 /* compute size */
7847 begin_macro(init_str, 1);
7848 next();
7849 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7850 /* prepare second initializer parsing */
7851 macro_ptr = init_str->str;
7852 next();
7854 /* if still unknown size, error */
7855 size = type_size(type, &align);
7856 if (size < 0)
7857 tcc_error("unknown type size");
7859 /* If there's a flex member and it was used in the initializer
7860 adjust size. */
7861 if (flexible_array && flexible_array->type.ref->c > 0)
7862 size += flexible_array->type.ref->c
7863 * pointed_size(&flexible_array->type);
7866 /* take into account specified alignment if bigger */
7867 if (ad->a.aligned) {
7868 int speca = 1 << (ad->a.aligned - 1);
7869 if (speca > align)
7870 align = speca;
7871 } else if (ad->a.packed) {
7872 align = 1;
7875 if (!v && NODATA_WANTED)
7876 size = 0, align = 1;
7878 if ((r & VT_VALMASK) == VT_LOCAL) {
7879 sec = NULL;
7880 #ifdef CONFIG_TCC_BCHECK
7881 if (bcheck && v) {
7882 /* add padding between stack variables for bound checking */
7883 loc -= align;
7885 #endif
7886 loc = (loc - size) & -align;
7887 addr = loc;
7888 p.local_offset = addr + size;
7889 #ifdef CONFIG_TCC_BCHECK
7890 if (bcheck && v) {
7891 /* add padding between stack variables for bound checking */
7892 loc -= align;
7894 #endif
7895 if (v) {
7896 /* local variable */
7897 #ifdef CONFIG_TCC_ASM
7898 if (ad->asm_label) {
7899 int reg = asm_parse_regvar(ad->asm_label);
7900 if (reg >= 0)
7901 r = (r & ~VT_VALMASK) | reg;
7903 #endif
7904 sym = sym_push(v, type, r, addr);
7905 if (ad->cleanup_func) {
7906 Sym *cls = sym_push2(&all_cleanups,
7907 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7908 cls->prev_tok = sym;
7909 cls->next = ad->cleanup_func;
7910 cls->ncl = cur_scope->cl.s;
7911 cur_scope->cl.s = cls;
7914 sym->a = ad->a;
7915 } else {
7916 /* push local reference */
7917 vset(type, r, addr);
7919 } else {
7920 sym = NULL;
7921 if (v && scope == VT_CONST) {
7922 /* see if the symbol was already defined */
7923 sym = sym_find(v);
7924 if (sym) {
7925 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
7926 && sym->type.ref->c > type->ref->c) {
7927 /* flex array was already declared with explicit size
7928 extern int arr[10];
7929 int arr[] = { 1,2,3 }; */
7930 type->ref->c = sym->type.ref->c;
7931 size = type_size(type, &align);
7933 patch_storage(sym, ad, type);
7934 /* we accept several definitions of the same global variable. */
7935 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7936 goto no_alloc;
7940 /* allocate symbol in corresponding section */
7941 sec = ad->section;
7942 if (!sec) {
7943 CType *tp = type;
7944 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
7945 tp = &tp->ref->type;
7946 if (tp->t & VT_CONSTANT) {
7947 sec = rodata_section;
7948 } else if (has_init) {
7949 sec = data_section;
7950 /*if (tcc_state->g_debug & 4)
7951 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7952 } else if (tcc_state->nocommon)
7953 sec = bss_section;
7956 if (sec) {
7957 addr = section_add(sec, size, align);
7958 #ifdef CONFIG_TCC_BCHECK
7959 /* add padding if bound check */
7960 if (bcheck)
7961 section_add(sec, 1, 1);
7962 #endif
7963 } else {
7964 addr = align; /* SHN_COMMON is special, symbol value is align */
7965 sec = common_section;
7968 if (v) {
7969 if (!sym) {
7970 sym = sym_push(v, type, r | VT_SYM, 0);
7971 patch_storage(sym, ad, NULL);
7973 /* update symbol definition */
7974 put_extern_sym(sym, sec, addr, size);
7975 } else {
7976 /* push global reference */
7977 vpush_ref(type, sec, addr, size);
7978 sym = vtop->sym;
7979 vtop->r |= r;
7982 #ifdef CONFIG_TCC_BCHECK
7983 /* handles bounds now because the symbol must be defined
7984 before for the relocation */
7985 if (bcheck) {
7986 addr_t *bounds_ptr;
7988 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7989 /* then add global bound info */
7990 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7991 bounds_ptr[0] = 0; /* relocated */
7992 bounds_ptr[1] = size;
7994 #endif
7997 if (type->t & VT_VLA) {
7998 int a;
8000 if (NODATA_WANTED)
8001 goto no_alloc;
8003 /* save before-VLA stack pointer if needed */
8004 if (cur_scope->vla.num == 0) {
8005 if (cur_scope->prev && cur_scope->prev->vla.num) {
8006 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8007 } else {
8008 gen_vla_sp_save(loc -= PTR_SIZE);
8009 cur_scope->vla.locorig = loc;
8013 vpush_type_size(type, &a);
8014 gen_vla_alloc(type, a);
8015 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8016 /* on _WIN64, because of the function args scratch area, the
8017 result of alloca differs from RSP and is returned in RAX. */
8018 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8019 #endif
8020 gen_vla_sp_save(addr);
8021 cur_scope->vla.loc = addr;
8022 cur_scope->vla.num++;
8023 } else if (has_init) {
8024 p.sec = sec;
8025 decl_initializer(&p, type, addr, DIF_FIRST);
8026 /* patch flexible array member size back to -1, */
8027 /* for possible subsequent similar declarations */
8028 if (flexible_array)
8029 flexible_array->type.ref->c = -1;
8032 no_alloc:
8033 /* restore parse state if needed */
8034 if (init_str) {
8035 end_macro();
8036 next();
8039 nocode_wanted = saved_nocode_wanted;
8042 /* generate vla code saved in post_type() */
8043 static void func_vla_arg_code(Sym *arg)
8045 int align;
8046 TokenString *vla_array_tok = NULL;
8048 if (arg->type.ref)
8049 func_vla_arg_code(arg->type.ref);
8051 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8052 loc -= type_size(&int_type, &align);
8053 loc &= -align;
8054 arg->type.ref->c = loc;
8056 unget_tok(0);
8057 vla_array_tok = tok_str_alloc();
8058 vla_array_tok->str = arg->type.ref->vla_array_str;
8059 begin_macro(vla_array_tok, 1);
8060 next();
8061 gexpr();
8062 end_macro();
8063 next();
8064 vpush_type_size(&arg->type.ref->type, &align);
8065 gen_op('*');
8066 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8067 vswap();
8068 vstore();
8069 vpop();
8073 static void func_vla_arg(Sym *sym)
8075 Sym *arg;
8077 for (arg = sym->type.ref->next; arg; arg = arg->next)
8078 if (arg->type.t & VT_VLA)
8079 func_vla_arg_code(arg);
8082 /* parse a function defined by symbol 'sym' and generate its code in
8083 'cur_text_section' */
8084 static void gen_function(Sym *sym)
8086 struct scope f = { 0 };
8087 cur_scope = root_scope = &f;
8088 nocode_wanted = 0;
8089 ind = cur_text_section->data_offset;
8090 if (sym->a.aligned) {
8091 size_t newoff = section_add(cur_text_section, 0,
8092 1 << (sym->a.aligned - 1));
8093 gen_fill_nops(newoff - ind);
8095 /* NOTE: we patch the symbol size later */
8096 put_extern_sym(sym, cur_text_section, ind, 0);
8097 if (sym->type.ref->f.func_ctor)
8098 add_array (tcc_state, ".init_array", sym->c);
8099 if (sym->type.ref->f.func_dtor)
8100 add_array (tcc_state, ".fini_array", sym->c);
8102 funcname = get_tok_str(sym->v, NULL);
8103 func_ind = ind;
8104 func_vt = sym->type.ref->type;
8105 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8107 /* put debug symbol */
8108 tcc_debug_funcstart(tcc_state, sym);
8109 /* push a dummy symbol to enable local sym storage */
8110 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8111 local_scope = 1; /* for function parameters */
8112 gfunc_prolog(sym);
8113 local_scope = 0;
8114 rsym = 0;
8115 clear_temp_local_var_list();
8116 func_vla_arg(sym);
8117 block(0);
8118 gsym(rsym);
8119 nocode_wanted = 0;
8120 /* reset local stack */
8121 pop_local_syms(NULL, 0);
8122 gfunc_epilog();
8123 cur_text_section->data_offset = ind;
8124 local_scope = 0;
8125 label_pop(&global_label_stack, NULL, 0);
8126 sym_pop(&all_cleanups, NULL, 0);
8127 /* patch symbol size */
8128 elfsym(sym)->st_size = ind - func_ind;
8129 /* end of function */
8130 tcc_debug_funcend(tcc_state, ind - func_ind);
8131 /* It's better to crash than to generate wrong code */
8132 cur_text_section = NULL;
8133 funcname = ""; /* for safety */
8134 func_vt.t = VT_VOID; /* for safety */
8135 func_var = 0; /* for safety */
8136 ind = 0; /* for safety */
8137 func_ind = -1;
8138 nocode_wanted = 0x80000000;
8139 check_vstack();
8140 /* do this after funcend debug info */
8141 next();
8144 static void gen_inline_functions(TCCState *s)
8146 Sym *sym;
8147 int inline_generated, i;
8148 struct InlineFunc *fn;
8150 tcc_open_bf(s, ":inline:", 0);
8151 /* iterate while inline function are referenced */
8152 do {
8153 inline_generated = 0;
8154 for (i = 0; i < s->nb_inline_fns; ++i) {
8155 fn = s->inline_fns[i];
8156 sym = fn->sym;
8157 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8158 /* the function was used or forced (and then not internal):
8159 generate its code and convert it to a normal function */
8160 fn->sym = NULL;
8161 tcc_debug_putfile(s, fn->filename);
8162 begin_macro(fn->func_str, 1);
8163 next();
8164 cur_text_section = text_section;
8165 gen_function(sym);
8166 end_macro();
8168 inline_generated = 1;
8171 } while (inline_generated);
8172 tcc_close();
8175 static void free_inline_functions(TCCState *s)
8177 int i;
8178 /* free tokens of unused inline functions */
8179 for (i = 0; i < s->nb_inline_fns; ++i) {
8180 struct InlineFunc *fn = s->inline_fns[i];
8181 if (fn->sym)
8182 tok_str_free(fn->func_str);
8184 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8187 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8188 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8189 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8191 int v, has_init, r, oldint;
8192 CType type, btype;
8193 Sym *sym;
8194 AttributeDef ad, adbase;
8196 while (1) {
8197 if (tok == TOK_STATIC_ASSERT) {
8198 CString error_str;
8199 int c;
8201 next();
8202 skip('(');
8203 c = expr_const();
8205 if (tok == ')') {
8206 if (!c)
8207 tcc_error("_Static_assert fail");
8208 next();
8209 goto static_assert_out;
8212 skip(',');
8213 parse_mult_str(&error_str, "string constant");
8214 if (c == 0)
8215 tcc_error("%s", (char *)error_str.data);
8216 cstr_free(&error_str);
8217 skip(')');
8218 static_assert_out:
8219 skip(';');
8220 continue;
8223 oldint = 0;
8224 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8225 if (is_for_loop_init)
8226 return 0;
8227 /* skip redundant ';' if not in old parameter decl scope */
8228 if (tok == ';' && l != VT_CMP) {
8229 next();
8230 continue;
8232 if (l != VT_CONST)
8233 break;
8234 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8235 /* global asm block */
8236 asm_global_instr();
8237 continue;
8239 if (tok >= TOK_UIDENT) {
8240 /* special test for old K&R protos without explicit int
8241 type. Only accepted when defining global data */
8242 btype.t = VT_INT;
8243 oldint = 1;
8244 } else {
8245 if (tok != TOK_EOF)
8246 expect("declaration");
8247 break;
8251 if (tok == ';') {
8252 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8253 v = btype.ref->v;
8254 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8255 tcc_warning("unnamed struct/union that defines no instances");
8256 next();
8257 continue;
8259 if (IS_ENUM(btype.t)) {
8260 next();
8261 continue;
8265 while (1) { /* iterate thru each declaration */
8266 type = btype;
8267 ad = adbase;
8268 type_decl(&type, &ad, &v, TYPE_DIRECT);
8269 #if 0
8271 char buf[500];
8272 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8273 printf("type = '%s'\n", buf);
8275 #endif
8276 if ((type.t & VT_BTYPE) == VT_FUNC) {
8277 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8278 tcc_error("function without file scope cannot be static");
8279 /* if old style function prototype, we accept a
8280 declaration list */
8281 sym = type.ref;
8282 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8283 decl0(VT_CMP, 0, sym);
8284 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8285 if (sym->f.func_alwinl
8286 && ((type.t & (VT_EXTERN | VT_INLINE))
8287 == (VT_EXTERN | VT_INLINE))) {
8288 /* always_inline functions must be handled as if they
8289 don't generate multiple global defs, even if extern
8290 inline, i.e. GNU inline semantics for those. Rewrite
8291 them into static inline. */
8292 type.t &= ~VT_EXTERN;
8293 type.t |= VT_STATIC;
8295 #endif
8296 /* always compile 'extern inline' */
8297 if (type.t & VT_EXTERN)
8298 type.t &= ~VT_INLINE;
8300 } else if (oldint) {
8301 tcc_warning("type defaults to int");
8304 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8305 ad.asm_label = asm_label_instr();
8306 /* parse one last attribute list, after asm label */
8307 parse_attribute(&ad);
8308 #if 0
8309 /* gcc does not allow __asm__("label") with function definition,
8310 but why not ... */
8311 if (tok == '{')
8312 expect(";");
8313 #endif
8316 #ifdef TCC_TARGET_PE
8317 if (ad.a.dllimport || ad.a.dllexport) {
8318 if (type.t & VT_STATIC)
8319 tcc_error("cannot have dll linkage with static");
8320 if (type.t & VT_TYPEDEF) {
8321 tcc_warning("'%s' attribute ignored for typedef",
8322 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8323 (ad.a.dllexport = 0, "dllexport"));
8324 } else if (ad.a.dllimport) {
8325 if ((type.t & VT_BTYPE) == VT_FUNC)
8326 ad.a.dllimport = 0;
8327 else
8328 type.t |= VT_EXTERN;
8331 #endif
8332 if (tok == '{') {
8333 if (l != VT_CONST)
8334 tcc_error("cannot use local functions");
8335 if ((type.t & VT_BTYPE) != VT_FUNC)
8336 expect("function definition");
8338 /* reject abstract declarators in function definition
8339 make old style params without decl have int type */
8340 sym = type.ref;
8341 while ((sym = sym->next) != NULL) {
8342 if (!(sym->v & ~SYM_FIELD))
8343 expect("identifier");
8344 if (sym->type.t == VT_VOID)
8345 sym->type = int_type;
8348 /* apply post-declaraton attributes */
8349 merge_funcattr(&type.ref->f, &ad.f);
8351 /* put function symbol */
8352 type.t &= ~VT_EXTERN;
8353 sym = external_sym(v, &type, 0, &ad);
8355 /* static inline functions are just recorded as a kind
8356 of macro. Their code will be emitted at the end of
8357 the compilation unit only if they are used */
8358 if (sym->type.t & VT_INLINE) {
8359 struct InlineFunc *fn;
8360 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8361 strcpy(fn->filename, file->filename);
8362 fn->sym = sym;
8363 skip_or_save_block(&fn->func_str);
8364 dynarray_add(&tcc_state->inline_fns,
8365 &tcc_state->nb_inline_fns, fn);
8366 } else {
8367 /* compute text section */
8368 cur_text_section = ad.section;
8369 if (!cur_text_section)
8370 cur_text_section = text_section;
8371 gen_function(sym);
8373 break;
8374 } else {
8375 if (l == VT_CMP) {
8376 /* find parameter in function parameter list */
8377 for (sym = func_sym->next; sym; sym = sym->next)
8378 if ((sym->v & ~SYM_FIELD) == v)
8379 goto found;
8380 tcc_error("declaration for parameter '%s' but no such parameter",
8381 get_tok_str(v, NULL));
8382 found:
8383 if (type.t & VT_STORAGE) /* 'register' is okay */
8384 tcc_error("storage class specified for '%s'",
8385 get_tok_str(v, NULL));
8386 if (sym->type.t != VT_VOID)
8387 tcc_error("redefinition of parameter '%s'",
8388 get_tok_str(v, NULL));
8389 convert_parameter_type(&type);
8390 sym->type = type;
8391 } else if (type.t & VT_TYPEDEF) {
8392 /* save typedefed type */
8393 /* XXX: test storage specifiers ? */
8394 sym = sym_find(v);
8395 if (sym && sym->sym_scope == local_scope) {
8396 if (!is_compatible_types(&sym->type, &type)
8397 || !(sym->type.t & VT_TYPEDEF))
8398 tcc_error("incompatible redefinition of '%s'",
8399 get_tok_str(v, NULL));
8400 sym->type = type;
8401 } else {
8402 sym = sym_push(v, &type, 0, 0);
8404 sym->a = ad.a;
8405 sym->f = ad.f;
8406 if (debug_modes)
8407 tcc_debug_typedef (tcc_state, sym);
8408 } else if ((type.t & VT_BTYPE) == VT_VOID
8409 && !(type.t & VT_EXTERN)) {
8410 tcc_error("declaration of void object");
8411 } else {
8412 r = 0;
8413 if ((type.t & VT_BTYPE) == VT_FUNC) {
8414 /* external function definition */
8415 /* specific case for func_call attribute */
8416 type.ref->f = ad.f;
8417 } else if (!(type.t & VT_ARRAY)) {
8418 /* not lvalue if array */
8419 r |= VT_LVAL;
8421 has_init = (tok == '=');
8422 if (has_init && (type.t & VT_VLA))
8423 tcc_error("variable length array cannot be initialized");
8424 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8425 || (type.t & VT_BTYPE) == VT_FUNC
8426 /* as with GCC, uninitialized global arrays with no size
8427 are considered extern: */
8428 || ((type.t & VT_ARRAY) && !has_init
8429 && l == VT_CONST && type.ref->c < 0)
8431 /* external variable or function */
8432 type.t |= VT_EXTERN;
8433 sym = external_sym(v, &type, r, &ad);
8434 if (ad.alias_target) {
8435 /* Aliases need to be emitted when their target
8436 symbol is emitted, even if perhaps unreferenced.
8437 We only support the case where the base is
8438 already defined, otherwise we would need
8439 deferring to emit the aliases until the end of
8440 the compile unit. */
8441 Sym *alias_target = sym_find(ad.alias_target);
8442 ElfSym *esym = elfsym(alias_target);
8443 if (!esym)
8444 tcc_error("unsupported forward __alias__ attribute");
8445 put_extern_sym2(sym, esym->st_shndx,
8446 esym->st_value, esym->st_size, 1);
8448 } else {
8449 if (type.t & VT_STATIC)
8450 r |= VT_CONST;
8451 else
8452 r |= l;
8453 if (has_init)
8454 next();
8455 else if (l == VT_CONST)
8456 /* uninitialized global variables may be overridden */
8457 type.t |= VT_EXTERN;
8458 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8461 if (tok != ',') {
8462 if (is_for_loop_init)
8463 return 1;
8464 skip(';');
8465 break;
8467 next();
8471 return 0;
8474 static void decl(int l)
8476 decl0(l, 0, NULL);
8479 /* ------------------------------------------------------------------------- */
8480 #undef gjmp_addr
8481 #undef gjmp
8482 /* ------------------------------------------------------------------------- */