small scopes cleanup etc.
[tinycc.git] / tccgen.c
blob2b4d4ca2b887a946929ea3e0e4fbf704493bc291
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
64 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
65 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
66 ST_DATA int func_vc;
67 ST_DATA int func_ind;
68 ST_DATA const char *funcname;
69 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
70 static CString initstr;
72 #if PTR_SIZE == 4
73 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
74 #define VT_PTRDIFF_T VT_INT
75 #elif LONG_SIZE == 4
76 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
77 #define VT_PTRDIFF_T VT_LLONG
78 #else
79 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
80 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
81 #endif
83 static struct switch_t {
84 struct case_t {
85 int64_t v1, v2;
86 int sym;
87 } **p; int n; /* list of case ranges */
88 int def_sym; /* default symbol */
89 int nocode_wanted;
90 int *bsym;
91 struct scope *scope;
92 struct switch_t *prev;
93 SValue sv;
94 } *cur_switch; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
97 /*list of temporary local variables on the stack in current function. */
98 static struct temp_local_variable {
99 int location; //offset on stack. Svalue.c.i
100 short size;
101 short align;
102 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
103 static int nb_temp_local_vars;
105 static struct scope {
106 struct scope *prev;
107 struct { int loc, locorig, num; } vla;
108 struct { Sym *s; int n; } cl;
109 int *bsym, *csym;
110 Sym *lstk, *llstk;
111 } *cur_scope, *loop_scope, *root_scope;
113 typedef struct {
114 Section *sec;
115 int local_offset;
116 Sym *flex_array_ref;
117 } init_params;
119 #if 1
120 #define precedence_parser
121 static void init_prec(void);
122 #endif
124 static void gen_cast(CType *type);
125 static void gen_cast_s(int t);
126 static inline CType *pointed_type(CType *type);
127 static int is_compatible_types(CType *type1, CType *type2);
128 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
129 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
130 static void parse_expr_type(CType *type);
131 static void init_putv(init_params *p, CType *type, unsigned long c);
132 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
133 static void block(int is_expr);
134 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
135 static int decl(int l);
136 static void expr_eq(void);
137 static void vpush_type_size(CType *type, int *a);
138 static int is_compatible_unqualified_types(CType *type1, CType *type2);
139 static inline int64_t expr_const64(void);
140 static void vpush64(int ty, unsigned long long v);
141 static void vpush(CType *type);
142 static int gvtst(int inv, int t);
143 static void gen_inline_functions(TCCState *s);
144 static void free_inline_functions(TCCState *s);
145 static void skip_or_save_block(TokenString **str);
146 static void gv_dup(void);
147 static int get_temp_local_var(int size,int align);
148 static void clear_temp_local_var_list();
149 static void cast_error(CType *st, CType *dt);
151 /* ------------------------------------------------------------------------- */
152 /* Automagical code suppression */
154 /* Clear 'nocode_wanted' at forward label if it was used */
155 ST_FUNC void gsym(int t)
157 if (t) {
158 gsym_addr(t, ind);
159 CODE_ON();
163 /* Clear 'nocode_wanted' if current pc is a label */
164 static int gind()
166 int t = ind;
167 CODE_ON();
168 if (debug_modes)
169 tcc_tcov_block_begin(tcc_state);
170 return t;
173 /* Set 'nocode_wanted' after unconditional (backwards) jump */
174 static void gjmp_addr_acs(int t)
176 gjmp_addr(t);
177 CODE_OFF();
180 /* Set 'nocode_wanted' after unconditional (forwards) jump */
181 static int gjmp_acs(int t)
183 t = gjmp(t);
184 CODE_OFF();
185 return t;
188 /* These are #undef'd at the end of this file */
189 #define gjmp_addr gjmp_addr_acs
190 #define gjmp gjmp_acs
191 /* ------------------------------------------------------------------------- */
193 ST_INLN int is_float(int t)
195 int bt = t & VT_BTYPE;
196 return bt == VT_LDOUBLE
197 || bt == VT_DOUBLE
198 || bt == VT_FLOAT
199 || bt == VT_QFLOAT;
202 static inline int is_integer_btype(int bt)
204 return bt == VT_BYTE
205 || bt == VT_BOOL
206 || bt == VT_SHORT
207 || bt == VT_INT
208 || bt == VT_LLONG;
211 static int btype_size(int bt)
213 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
214 bt == VT_SHORT ? 2 :
215 bt == VT_INT ? 4 :
216 bt == VT_LLONG ? 8 :
217 bt == VT_PTR ? PTR_SIZE : 0;
220 /* returns function return register from type */
221 static int R_RET(int t)
223 if (!is_float(t))
224 return REG_IRET;
225 #ifdef TCC_TARGET_X86_64
226 if ((t & VT_BTYPE) == VT_LDOUBLE)
227 return TREG_ST0;
228 #elif defined TCC_TARGET_RISCV64
229 if ((t & VT_BTYPE) == VT_LDOUBLE)
230 return REG_IRET;
231 #endif
232 return REG_FRET;
235 /* returns 2nd function return register, if any */
236 static int R2_RET(int t)
238 t &= VT_BTYPE;
239 #if PTR_SIZE == 4
240 if (t == VT_LLONG)
241 return REG_IRE2;
242 #elif defined TCC_TARGET_X86_64
243 if (t == VT_QLONG)
244 return REG_IRE2;
245 if (t == VT_QFLOAT)
246 return REG_FRE2;
247 #elif defined TCC_TARGET_RISCV64
248 if (t == VT_LDOUBLE)
249 return REG_IRE2;
250 #endif
251 return VT_CONST;
254 /* returns true for two-word types */
255 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
257 /* put function return registers to stack value */
258 static void PUT_R_RET(SValue *sv, int t)
260 sv->r = R_RET(t), sv->r2 = R2_RET(t);
263 /* returns function return register class for type t */
264 static int RC_RET(int t)
266 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
269 /* returns generic register class for type t */
270 static int RC_TYPE(int t)
272 if (!is_float(t))
273 return RC_INT;
274 #ifdef TCC_TARGET_X86_64
275 if ((t & VT_BTYPE) == VT_LDOUBLE)
276 return RC_ST0;
277 if ((t & VT_BTYPE) == VT_QFLOAT)
278 return RC_FRET;
279 #elif defined TCC_TARGET_RISCV64
280 if ((t & VT_BTYPE) == VT_LDOUBLE)
281 return RC_INT;
282 #endif
283 return RC_FLOAT;
286 /* returns 2nd register class corresponding to t and rc */
287 static int RC2_TYPE(int t, int rc)
289 if (!USING_TWO_WORDS(t))
290 return 0;
291 #ifdef RC_IRE2
292 if (rc == RC_IRET)
293 return RC_IRE2;
294 #endif
295 #ifdef RC_FRE2
296 if (rc == RC_FRET)
297 return RC_FRE2;
298 #endif
299 if (rc & RC_FLOAT)
300 return RC_FLOAT;
301 return RC_INT;
304 /* we use our own 'finite' function to avoid potential problems with
305 non standard math libs */
306 /* XXX: endianness dependent */
307 ST_FUNC int ieee_finite(double d)
309 int p[4];
310 memcpy(p, &d, sizeof(double));
311 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
314 /* compiling intel long double natively */
315 #if (defined __i386__ || defined __x86_64__) \
316 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
317 # define TCC_IS_NATIVE_387
318 #endif
320 ST_FUNC void test_lvalue(void)
322 if (!(vtop->r & VT_LVAL))
323 expect("lvalue");
326 ST_FUNC void check_vstack(void)
328 if (vtop != vstack - 1)
329 tcc_error("internal compiler error: vstack leak (%d)",
330 (int)(vtop - vstack + 1));
333 /* vstack debugging aid */
334 #if 0
335 void pv (const char *lbl, int a, int b)
337 int i;
338 for (i = a; i < a + b; ++i) {
339 SValue *p = &vtop[-i];
340 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
341 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
344 #endif
346 /* ------------------------------------------------------------------------- */
347 /* initialize vstack and types. This must be done also for tcc -E */
348 ST_FUNC void tccgen_init(TCCState *s1)
350 vtop = vstack - 1;
351 memset(vtop, 0, sizeof *vtop);
353 /* define some often used types */
354 int_type.t = VT_INT;
356 char_type.t = VT_BYTE;
357 if (s1->char_is_unsigned)
358 char_type.t |= VT_UNSIGNED;
359 char_pointer_type = char_type;
360 mk_pointer(&char_pointer_type);
362 func_old_type.t = VT_FUNC;
363 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
364 func_old_type.ref->f.func_call = FUNC_CDECL;
365 func_old_type.ref->f.func_type = FUNC_OLD;
366 #ifdef precedence_parser
367 init_prec();
368 #endif
369 cstr_new(&initstr);
372 ST_FUNC int tccgen_compile(TCCState *s1)
374 cur_text_section = NULL;
375 funcname = "";
376 func_ind = -1;
377 anon_sym = SYM_FIRST_ANOM;
378 const_wanted = 0;
379 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
380 local_scope = 0;
381 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
383 tcc_debug_start(s1);
384 tcc_tcov_start (s1);
385 #ifdef TCC_TARGET_ARM
386 arm_init(s1);
387 #endif
388 #ifdef INC_DEBUG
389 printf("%s: **** new file\n", file->filename);
390 #endif
391 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
392 next();
393 decl(VT_CONST);
394 gen_inline_functions(s1);
395 check_vstack();
396 /* end of translation unit info */
397 tcc_debug_end(s1);
398 tcc_tcov_end(s1);
399 return 0;
402 ST_FUNC void tccgen_finish(TCCState *s1)
404 cstr_free(&initstr);
405 free_inline_functions(s1);
406 sym_pop(&global_stack, NULL, 0);
407 sym_pop(&local_stack, NULL, 0);
408 /* free preprocessor macros */
409 free_defines(NULL);
410 /* free sym_pools */
411 dynarray_reset(&sym_pools, &nb_sym_pools);
412 sym_free_first = NULL;
415 /* ------------------------------------------------------------------------- */
416 ST_FUNC ElfSym *elfsym(Sym *s)
418 if (!s || !s->c)
419 return NULL;
420 return &((ElfSym *)symtab_section->data)[s->c];
423 /* apply storage attributes to Elf symbol */
424 ST_FUNC void update_storage(Sym *sym)
426 ElfSym *esym;
427 int sym_bind, old_sym_bind;
429 esym = elfsym(sym);
430 if (!esym)
431 return;
433 if (sym->a.visibility)
434 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
435 | sym->a.visibility;
437 if (sym->type.t & (VT_STATIC | VT_INLINE))
438 sym_bind = STB_LOCAL;
439 else if (sym->a.weak)
440 sym_bind = STB_WEAK;
441 else
442 sym_bind = STB_GLOBAL;
443 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
444 if (sym_bind != old_sym_bind) {
445 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
448 #ifdef TCC_TARGET_PE
449 if (sym->a.dllimport)
450 esym->st_other |= ST_PE_IMPORT;
451 if (sym->a.dllexport)
452 esym->st_other |= ST_PE_EXPORT;
453 #endif
455 #if 0
456 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
457 get_tok_str(sym->v, NULL),
458 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
459 sym->a.visibility,
460 sym->a.dllexport,
461 sym->a.dllimport
463 #endif
466 /* ------------------------------------------------------------------------- */
467 /* update sym->c so that it points to an external symbol in section
468 'section' with value 'value' */
470 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
471 addr_t value, unsigned long size,
472 int can_add_underscore)
474 int sym_type, sym_bind, info, other, t;
475 ElfSym *esym;
476 const char *name;
477 char buf1[256];
479 if (!sym->c) {
480 name = get_tok_str(sym->v, NULL);
481 t = sym->type.t;
482 if ((t & VT_BTYPE) == VT_FUNC) {
483 sym_type = STT_FUNC;
484 } else if ((t & VT_BTYPE) == VT_VOID) {
485 sym_type = STT_NOTYPE;
486 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
487 sym_type = STT_FUNC;
488 } else {
489 sym_type = STT_OBJECT;
491 if (t & (VT_STATIC | VT_INLINE))
492 sym_bind = STB_LOCAL;
493 else
494 sym_bind = STB_GLOBAL;
495 other = 0;
497 #ifdef TCC_TARGET_PE
498 if (sym_type == STT_FUNC && sym->type.ref) {
499 Sym *ref = sym->type.ref;
500 if (ref->a.nodecorate) {
501 can_add_underscore = 0;
503 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
504 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
505 name = buf1;
506 other |= ST_PE_STDCALL;
507 can_add_underscore = 0;
510 #endif
512 if (sym->asm_label) {
513 name = get_tok_str(sym->asm_label, NULL);
514 can_add_underscore = 0;
517 if (tcc_state->leading_underscore && can_add_underscore) {
518 buf1[0] = '_';
519 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
520 name = buf1;
523 info = ELFW(ST_INFO)(sym_bind, sym_type);
524 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
526 if (debug_modes)
527 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
529 } else {
530 esym = elfsym(sym);
531 esym->st_value = value;
532 esym->st_size = size;
533 esym->st_shndx = sh_num;
535 update_storage(sym);
538 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
540 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
541 return;
542 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
545 /* add a new relocation entry to symbol 'sym' in section 's' */
546 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
547 addr_t addend)
549 int c = 0;
551 if (nocode_wanted && s == cur_text_section)
552 return;
554 if (sym) {
555 if (0 == sym->c)
556 put_extern_sym(sym, NULL, 0, 0);
557 c = sym->c;
560 /* now we can add ELF relocation info */
561 put_elf_reloca(symtab_section, s, offset, type, c, addend);
564 #if PTR_SIZE == 4
565 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
567 greloca(s, sym, offset, type, 0);
569 #endif
571 /* ------------------------------------------------------------------------- */
572 /* symbol allocator */
573 static Sym *__sym_malloc(void)
575 Sym *sym_pool, *sym, *last_sym;
576 int i;
578 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
579 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
581 last_sym = sym_free_first;
582 sym = sym_pool;
583 for(i = 0; i < SYM_POOL_NB; i++) {
584 sym->next = last_sym;
585 last_sym = sym;
586 sym++;
588 sym_free_first = last_sym;
589 return last_sym;
592 static inline Sym *sym_malloc(void)
594 Sym *sym;
595 #ifndef SYM_DEBUG
596 sym = sym_free_first;
597 if (!sym)
598 sym = __sym_malloc();
599 sym_free_first = sym->next;
600 return sym;
601 #else
602 sym = tcc_malloc(sizeof(Sym));
603 return sym;
604 #endif
607 ST_INLN void sym_free(Sym *sym)
609 #ifndef SYM_DEBUG
610 sym->next = sym_free_first;
611 sym_free_first = sym;
612 #else
613 tcc_free(sym);
614 #endif
617 /* push, without hashing */
618 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
620 Sym *s;
622 s = sym_malloc();
623 memset(s, 0, sizeof *s);
624 s->v = v;
625 s->type.t = t;
626 s->c = c;
627 /* add in stack */
628 s->prev = *ps;
629 *ps = s;
630 return s;
633 /* find a symbol and return its associated structure. 's' is the top
634 of the symbol stack */
635 ST_FUNC Sym *sym_find2(Sym *s, int v)
637 while (s) {
638 if (s->v == v)
639 return s;
640 else if (s->v == -1)
641 return NULL;
642 s = s->prev;
644 return NULL;
647 /* structure lookup */
648 ST_INLN Sym *struct_find(int v)
650 v -= TOK_IDENT;
651 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
652 return NULL;
653 return table_ident[v]->sym_struct;
656 /* find an identifier */
657 ST_INLN Sym *sym_find(int v)
659 v -= TOK_IDENT;
660 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
661 return NULL;
662 return table_ident[v]->sym_identifier;
665 static int sym_scope(Sym *s)
667 if (IS_ENUM_VAL (s->type.t))
668 return s->type.ref->sym_scope;
669 else
670 return s->sym_scope;
673 /* push a given symbol on the symbol stack */
674 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
676 Sym *s, **ps;
677 TokenSym *ts;
679 if (local_stack)
680 ps = &local_stack;
681 else
682 ps = &global_stack;
683 s = sym_push2(ps, v, type->t, c);
684 s->type.ref = type->ref;
685 s->r = r;
686 /* don't record fields or anonymous symbols */
687 /* XXX: simplify */
688 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
689 /* record symbol in token array */
690 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
691 if (v & SYM_STRUCT)
692 ps = &ts->sym_struct;
693 else
694 ps = &ts->sym_identifier;
695 s->prev_tok = *ps;
696 *ps = s;
697 s->sym_scope = local_scope;
698 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
699 tcc_error("redeclaration of '%s'",
700 get_tok_str(v & ~SYM_STRUCT, NULL));
702 return s;
705 /* push a global identifier */
706 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
708 Sym *s, **ps;
709 s = sym_push2(&global_stack, v, t, c);
710 s->r = VT_CONST | VT_SYM;
711 /* don't record anonymous symbol */
712 if (v < SYM_FIRST_ANOM) {
713 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
714 /* modify the top most local identifier, so that sym_identifier will
715 point to 's' when popped; happens when called from inline asm */
716 while (*ps != NULL && (*ps)->sym_scope)
717 ps = &(*ps)->prev_tok;
718 s->prev_tok = *ps;
719 *ps = s;
721 return s;
724 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
725 pop them yet from the list, but do remove them from the token array. */
726 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
728 Sym *s, *ss, **ps;
729 TokenSym *ts;
730 int v;
732 s = *ptop;
733 while(s != b) {
734 ss = s->prev;
735 v = s->v;
736 /* remove symbol in token array */
737 /* XXX: simplify */
738 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
739 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
740 if (v & SYM_STRUCT)
741 ps = &ts->sym_struct;
742 else
743 ps = &ts->sym_identifier;
744 *ps = s->prev_tok;
746 if (!keep)
747 sym_free(s);
748 s = ss;
750 if (!keep)
751 *ptop = b;
754 /* label lookup */
755 ST_FUNC Sym *label_find(int v)
757 v -= TOK_IDENT;
758 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
759 return NULL;
760 return table_ident[v]->sym_label;
763 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
765 Sym *s, **ps;
766 s = sym_push2(ptop, v, VT_STATIC, 0);
767 s->r = flags;
768 ps = &table_ident[v - TOK_IDENT]->sym_label;
769 if (ptop == &global_label_stack) {
770 /* modify the top most local identifier, so that
771 sym_identifier will point to 's' when popped */
772 while (*ps != NULL)
773 ps = &(*ps)->prev_tok;
775 s->prev_tok = *ps;
776 *ps = s;
777 return s;
780 /* pop labels until element last is reached. Look if any labels are
781 undefined. Define symbols if '&&label' was used. */
782 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
784 Sym *s, *s1;
785 for(s = *ptop; s != slast; s = s1) {
786 s1 = s->prev;
787 if (s->r == LABEL_DECLARED) {
788 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
789 } else if (s->r == LABEL_FORWARD) {
790 tcc_error("label '%s' used but not defined",
791 get_tok_str(s->v, NULL));
792 } else {
793 if (s->c) {
794 /* define corresponding symbol. A size of
795 1 is put. */
796 put_extern_sym(s, cur_text_section, s->jnext, 1);
799 /* remove label */
800 if (s->r != LABEL_GONE)
801 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
802 if (!keep)
803 sym_free(s);
804 else
805 s->r = LABEL_GONE;
807 if (!keep)
808 *ptop = slast;
811 /* ------------------------------------------------------------------------- */
812 static void vcheck_cmp(void)
814 /* cannot let cpu flags if other instruction are generated. Also
815 avoid leaving VT_JMP anywhere except on the top of the stack
816 because it would complicate the code generator.
818 Don't do this when nocode_wanted. vtop might come from
819 !nocode_wanted regions (see 88_codeopt.c) and transforming
820 it to a register without actually generating code is wrong
821 as their value might still be used for real. All values
822 we push under nocode_wanted will eventually be popped
823 again, so that the VT_CMP/VT_JMP value will be in vtop
824 when code is unsuppressed again. */
826 /* However if it's just automatic suppression via CODE_OFF/ON()
827 then it seems that we better let things work undisturbed.
828 How can it work at all under nocode_wanted? Well, gv() will
829 actually clear it at the gsym() in load()/VT_JMP in the
830 generator backends */
832 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
833 gv(RC_INT);
836 static void vsetc(CType *type, int r, CValue *vc)
838 if (vtop >= vstack + (VSTACK_SIZE - 1))
839 tcc_error("memory full (vstack)");
840 vcheck_cmp();
841 vtop++;
842 vtop->type = *type;
843 vtop->r = r;
844 vtop->r2 = VT_CONST;
845 vtop->c = *vc;
846 vtop->sym = NULL;
849 ST_FUNC void vswap(void)
851 SValue tmp;
853 vcheck_cmp();
854 tmp = vtop[0];
855 vtop[0] = vtop[-1];
856 vtop[-1] = tmp;
859 /* pop stack value */
860 ST_FUNC void vpop(void)
862 int v;
863 v = vtop->r & VT_VALMASK;
864 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
865 /* for x86, we need to pop the FP stack */
866 if (v == TREG_ST0) {
867 o(0xd8dd); /* fstp %st(0) */
868 } else
869 #endif
870 if (v == VT_CMP) {
871 /* need to put correct jump if && or || without test */
872 gsym(vtop->jtrue);
873 gsym(vtop->jfalse);
875 vtop--;
878 /* push constant of type "type" with useless value */
879 static void vpush(CType *type)
881 vset(type, VT_CONST, 0);
884 /* push arbitrary 64bit constant */
885 static void vpush64(int ty, unsigned long long v)
887 CValue cval;
888 CType ctype;
889 ctype.t = ty;
890 ctype.ref = NULL;
891 cval.i = v;
892 vsetc(&ctype, VT_CONST, &cval);
895 /* push integer constant */
896 ST_FUNC void vpushi(int v)
898 vpush64(VT_INT, v);
901 /* push a pointer sized constant */
902 static void vpushs(addr_t v)
904 vpush64(VT_SIZE_T, v);
907 /* push long long constant */
908 static inline void vpushll(long long v)
910 vpush64(VT_LLONG, v);
913 ST_FUNC void vset(CType *type, int r, int v)
915 CValue cval;
916 cval.i = v;
917 vsetc(type, r, &cval);
920 static void vseti(int r, int v)
922 CType type;
923 type.t = VT_INT;
924 type.ref = NULL;
925 vset(&type, r, v);
928 ST_FUNC void vpushv(SValue *v)
930 if (vtop >= vstack + (VSTACK_SIZE - 1))
931 tcc_error("memory full (vstack)");
932 vtop++;
933 *vtop = *v;
936 static void vdup(void)
938 vpushv(vtop);
941 /* rotate n first stack elements to the bottom
942 I1 ... In -> I2 ... In I1 [top is right]
944 ST_FUNC void vrotb(int n)
946 int i;
947 SValue tmp;
949 vcheck_cmp();
950 tmp = vtop[-n + 1];
951 for(i=-n+1;i!=0;i++)
952 vtop[i] = vtop[i+1];
953 vtop[0] = tmp;
956 /* rotate the n elements before entry e towards the top
957 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
959 ST_FUNC void vrote(SValue *e, int n)
961 int i;
962 SValue tmp;
964 vcheck_cmp();
965 tmp = *e;
966 for(i = 0;i < n - 1; i++)
967 e[-i] = e[-i - 1];
968 e[-n + 1] = tmp;
971 /* rotate n first stack elements to the top
972 I1 ... In -> In I1 ... I(n-1) [top is right]
974 ST_FUNC void vrott(int n)
976 vrote(vtop, n);
979 /* ------------------------------------------------------------------------- */
980 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
982 /* called from generators to set the result from relational ops */
983 ST_FUNC void vset_VT_CMP(int op)
985 vtop->r = VT_CMP;
986 vtop->cmp_op = op;
987 vtop->jfalse = 0;
988 vtop->jtrue = 0;
991 /* called once before asking generators to load VT_CMP to a register */
992 static void vset_VT_JMP(void)
994 int op = vtop->cmp_op;
996 if (vtop->jtrue || vtop->jfalse) {
997 int origt = vtop->type.t;
998 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
999 int inv = op & (op < 2); /* small optimization */
1000 vseti(VT_JMP+inv, gvtst(inv, 0));
1001 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1002 } else {
1003 /* otherwise convert flags (rsp. 0/1) to register */
1004 vtop->c.i = op;
1005 if (op < 2) /* doesn't seem to happen */
1006 vtop->r = VT_CONST;
1010 /* Set CPU Flags, doesn't yet jump */
1011 static void gvtst_set(int inv, int t)
1013 int *p;
1015 if (vtop->r != VT_CMP) {
1016 vpushi(0);
1017 gen_op(TOK_NE);
1018 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1019 vset_VT_CMP(vtop->c.i != 0);
1022 p = inv ? &vtop->jfalse : &vtop->jtrue;
1023 *p = gjmp_append(*p, t);
1026 /* Generate value test
1028 * Generate a test for any value (jump, comparison and integers) */
1029 static int gvtst(int inv, int t)
1031 int op, x, u;
1033 gvtst_set(inv, t);
1034 t = vtop->jtrue, u = vtop->jfalse;
1035 if (inv)
1036 x = u, u = t, t = x;
1037 op = vtop->cmp_op;
1039 /* jump to the wanted target */
1040 if (op > 1)
1041 t = gjmp_cond(op ^ inv, t);
1042 else if (op != inv)
1043 t = gjmp(t);
1044 /* resolve complementary jumps to here */
1045 gsym(u);
1047 vtop--;
1048 return t;
1051 /* generate a zero or nozero test */
1052 static void gen_test_zero(int op)
1054 if (vtop->r == VT_CMP) {
1055 int j;
1056 if (op == TOK_EQ) {
1057 j = vtop->jfalse;
1058 vtop->jfalse = vtop->jtrue;
1059 vtop->jtrue = j;
1060 vtop->cmp_op ^= 1;
1062 } else {
1063 vpushi(0);
1064 gen_op(op);
1068 /* ------------------------------------------------------------------------- */
1069 /* push a symbol value of TYPE */
1070 ST_FUNC void vpushsym(CType *type, Sym *sym)
1072 CValue cval;
1073 cval.i = 0;
1074 vsetc(type, VT_CONST | VT_SYM, &cval);
1075 vtop->sym = sym;
1078 /* Return a static symbol pointing to a section */
1079 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1081 int v;
1082 Sym *sym;
1084 v = anon_sym++;
1085 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1086 sym->type.t |= VT_STATIC;
1087 put_extern_sym(sym, sec, offset, size);
1088 return sym;
1091 /* push a reference to a section offset by adding a dummy symbol */
1092 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1094 vpushsym(type, get_sym_ref(type, sec, offset, size));
1097 /* define a new external reference to a symbol 'v' of type 'u' */
1098 ST_FUNC Sym *external_global_sym(int v, CType *type)
1100 Sym *s;
1102 s = sym_find(v);
1103 if (!s) {
1104 /* push forward reference */
1105 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1106 s->type.ref = type->ref;
1107 } else if (IS_ASM_SYM(s)) {
1108 s->type.t = type->t | (s->type.t & VT_EXTERN);
1109 s->type.ref = type->ref;
1110 update_storage(s);
1112 return s;
1115 /* create an external reference with no specific type similar to asm labels.
1116 This avoids type conflicts if the symbol is used from C too */
1117 ST_FUNC Sym *external_helper_sym(int v)
1119 CType ct = { VT_ASM_FUNC, NULL };
1120 return external_global_sym(v, &ct);
1123 /* push a reference to an helper function (such as memmove) */
1124 ST_FUNC void vpush_helper_func(int v)
1126 vpushsym(&func_old_type, external_helper_sym(v));
1129 /* Merge symbol attributes. */
1130 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1132 if (sa1->aligned && !sa->aligned)
1133 sa->aligned = sa1->aligned;
1134 sa->packed |= sa1->packed;
1135 sa->weak |= sa1->weak;
1136 sa->nodebug |= sa1->nodebug;
1137 if (sa1->visibility != STV_DEFAULT) {
1138 int vis = sa->visibility;
1139 if (vis == STV_DEFAULT
1140 || vis > sa1->visibility)
1141 vis = sa1->visibility;
1142 sa->visibility = vis;
1144 sa->dllexport |= sa1->dllexport;
1145 sa->nodecorate |= sa1->nodecorate;
1146 sa->dllimport |= sa1->dllimport;
1149 /* Merge function attributes. */
1150 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1152 if (fa1->func_call && !fa->func_call)
1153 fa->func_call = fa1->func_call;
1154 if (fa1->func_type && !fa->func_type)
1155 fa->func_type = fa1->func_type;
1156 if (fa1->func_args && !fa->func_args)
1157 fa->func_args = fa1->func_args;
1158 if (fa1->func_noreturn)
1159 fa->func_noreturn = 1;
1160 if (fa1->func_ctor)
1161 fa->func_ctor = 1;
1162 if (fa1->func_dtor)
1163 fa->func_dtor = 1;
1166 /* Merge attributes. */
1167 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1169 merge_symattr(&ad->a, &ad1->a);
1170 merge_funcattr(&ad->f, &ad1->f);
1172 if (ad1->section)
1173 ad->section = ad1->section;
1174 if (ad1->alias_target)
1175 ad->alias_target = ad1->alias_target;
1176 if (ad1->asm_label)
1177 ad->asm_label = ad1->asm_label;
1178 if (ad1->attr_mode)
1179 ad->attr_mode = ad1->attr_mode;
1182 /* Merge some type attributes. */
1183 static void patch_type(Sym *sym, CType *type)
1185 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1186 if (!(sym->type.t & VT_EXTERN))
1187 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1188 sym->type.t &= ~VT_EXTERN;
1191 if (IS_ASM_SYM(sym)) {
1192 /* stay static if both are static */
1193 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1194 sym->type.ref = type->ref;
1197 if (!is_compatible_types(&sym->type, type)) {
1198 tcc_error("incompatible types for redefinition of '%s'",
1199 get_tok_str(sym->v, NULL));
1201 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1202 int static_proto = sym->type.t & VT_STATIC;
1203 /* warn if static follows non-static function declaration */
1204 if ((type->t & VT_STATIC) && !static_proto
1205 /* XXX this test for inline shouldn't be here. Until we
1206 implement gnu-inline mode again it silences a warning for
1207 mingw caused by our workarounds. */
1208 && !((type->t | sym->type.t) & VT_INLINE))
1209 tcc_warning("static storage ignored for redefinition of '%s'",
1210 get_tok_str(sym->v, NULL));
1212 /* set 'inline' if both agree or if one has static */
1213 if ((type->t | sym->type.t) & VT_INLINE) {
1214 if (!((type->t ^ sym->type.t) & VT_INLINE)
1215 || ((type->t | sym->type.t) & VT_STATIC))
1216 static_proto |= VT_INLINE;
1219 if (0 == (type->t & VT_EXTERN)) {
1220 struct FuncAttr f = sym->type.ref->f;
1221 /* put complete type, use static from prototype */
1222 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1223 sym->type.ref = type->ref;
1224 merge_funcattr(&sym->type.ref->f, &f);
1225 } else {
1226 sym->type.t &= ~VT_INLINE | static_proto;
1229 if (sym->type.ref->f.func_type == FUNC_OLD
1230 && type->ref->f.func_type != FUNC_OLD) {
1231 sym->type.ref = type->ref;
1234 } else {
1235 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1236 /* set array size if it was omitted in extern declaration */
1237 sym->type.ref->c = type->ref->c;
1239 if ((type->t ^ sym->type.t) & VT_STATIC)
1240 tcc_warning("storage mismatch for redefinition of '%s'",
1241 get_tok_str(sym->v, NULL));
1245 /* Merge some storage attributes. */
1246 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1248 if (type)
1249 patch_type(sym, type);
1251 #ifdef TCC_TARGET_PE
1252 if (sym->a.dllimport != ad->a.dllimport)
1253 tcc_error("incompatible dll linkage for redefinition of '%s'",
1254 get_tok_str(sym->v, NULL));
1255 #endif
1256 merge_symattr(&sym->a, &ad->a);
1257 if (ad->asm_label)
1258 sym->asm_label = ad->asm_label;
1259 update_storage(sym);
1262 /* copy sym to other stack */
1263 static Sym *sym_copy(Sym *s0, Sym **ps)
1265 Sym *s;
1266 s = sym_malloc(), *s = *s0;
1267 s->prev = *ps, *ps = s;
1268 if (s->v < SYM_FIRST_ANOM) {
1269 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1270 s->prev_tok = *ps, *ps = s;
1272 return s;
1275 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1276 static void sym_copy_ref(Sym *s, Sym **ps)
1278 int bt = s->type.t & VT_BTYPE;
1279 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1280 Sym **sp = &s->type.ref;
1281 for (s = *sp, *sp = NULL; s; s = s->next) {
1282 Sym *s2 = sym_copy(s, ps);
1283 sp = &(*sp = s2)->next;
1284 sym_copy_ref(s2, ps);
1289 /* define a new external reference to a symbol 'v' */
1290 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1292 Sym *s;
1294 /* look for global symbol */
1295 s = sym_find(v);
1296 while (s && s->sym_scope)
1297 s = s->prev_tok;
1299 if (!s) {
1300 /* push forward reference */
1301 s = global_identifier_push(v, type->t, 0);
1302 s->r |= r;
1303 s->a = ad->a;
1304 s->asm_label = ad->asm_label;
1305 s->type.ref = type->ref;
1306 /* copy type to the global stack */
1307 if (local_stack)
1308 sym_copy_ref(s, &global_stack);
1309 } else {
1310 patch_storage(s, ad, type);
1312 /* push variables on local_stack if any */
1313 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1314 s = sym_copy(s, &local_stack);
1315 return s;
1318 /* save registers up to (vtop - n) stack entry */
1319 ST_FUNC void save_regs(int n)
1321 SValue *p, *p1;
1322 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1323 save_reg(p->r);
1326 /* save r to the memory stack, and mark it as being free */
1327 ST_FUNC void save_reg(int r)
1329 save_reg_upstack(r, 0);
1332 /* save r to the memory stack, and mark it as being free,
1333 if seen up to (vtop - n) stack entry */
1334 ST_FUNC void save_reg_upstack(int r, int n)
1336 int l, size, align, bt;
1337 SValue *p, *p1, sv;
1339 if ((r &= VT_VALMASK) >= VT_CONST)
1340 return;
1341 if (nocode_wanted)
1342 return;
1343 l = 0;
1344 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1345 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1346 /* must save value on stack if not already done */
1347 if (!l) {
1348 bt = p->type.t & VT_BTYPE;
1349 if (bt == VT_VOID)
1350 continue;
1351 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1352 bt = VT_PTR;
1353 sv.type.t = bt;
1354 size = type_size(&sv.type, &align);
1355 l = get_temp_local_var(size,align);
1356 sv.r = VT_LOCAL | VT_LVAL;
1357 sv.c.i = l;
1358 store(p->r & VT_VALMASK, &sv);
1359 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1360 /* x86 specific: need to pop fp register ST0 if saved */
1361 if (r == TREG_ST0) {
1362 o(0xd8dd); /* fstp %st(0) */
1364 #endif
1365 /* special long long case */
1366 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1367 sv.c.i += PTR_SIZE;
1368 store(p->r2, &sv);
1371 /* mark that stack entry as being saved on the stack */
1372 if (p->r & VT_LVAL) {
1373 /* also clear the bounded flag because the
1374 relocation address of the function was stored in
1375 p->c.i */
1376 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1377 } else {
1378 p->r = VT_LVAL | VT_LOCAL;
1380 p->sym = NULL;
1381 p->r2 = VT_CONST;
1382 p->c.i = l;
1387 #ifdef TCC_TARGET_ARM
1388 /* find a register of class 'rc2' with at most one reference on stack.
1389 * If none, call get_reg(rc) */
1390 ST_FUNC int get_reg_ex(int rc, int rc2)
1392 int r;
1393 SValue *p;
1395 for(r=0;r<NB_REGS;r++) {
1396 if (reg_classes[r] & rc2) {
1397 int n;
1398 n=0;
1399 for(p = vstack; p <= vtop; p++) {
1400 if ((p->r & VT_VALMASK) == r ||
1401 p->r2 == r)
1402 n++;
1404 if (n <= 1)
1405 return r;
1408 return get_reg(rc);
1410 #endif
1412 /* find a free register of class 'rc'. If none, save one register */
1413 ST_FUNC int get_reg(int rc)
1415 int r;
1416 SValue *p;
1418 /* find a free register */
1419 for(r=0;r<NB_REGS;r++) {
1420 if (reg_classes[r] & rc) {
1421 if (nocode_wanted)
1422 return r;
1423 for(p=vstack;p<=vtop;p++) {
1424 if ((p->r & VT_VALMASK) == r ||
1425 p->r2 == r)
1426 goto notfound;
1428 return r;
1430 notfound: ;
1433 /* no register left : free the first one on the stack (VERY
1434 IMPORTANT to start from the bottom to ensure that we don't
1435 spill registers used in gen_opi()) */
1436 for(p=vstack;p<=vtop;p++) {
1437 /* look at second register (if long long) */
1438 r = p->r2;
1439 if (r < VT_CONST && (reg_classes[r] & rc))
1440 goto save_found;
1441 r = p->r & VT_VALMASK;
1442 if (r < VT_CONST && (reg_classes[r] & rc)) {
1443 save_found:
1444 save_reg(r);
1445 return r;
1448 /* Should never comes here */
1449 return -1;
1452 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1453 static int get_temp_local_var(int size,int align){
1454 int i;
1455 struct temp_local_variable *temp_var;
1456 int found_var;
1457 SValue *p;
1458 int r;
1459 char free;
1460 char found;
1461 found=0;
1462 for(i=0;i<nb_temp_local_vars;i++){
1463 temp_var=&arr_temp_local_vars[i];
1464 if(temp_var->size<size||align!=temp_var->align){
1465 continue;
1467 /*check if temp_var is free*/
1468 free=1;
1469 for(p=vstack;p<=vtop;p++) {
1470 r=p->r&VT_VALMASK;
1471 if(r==VT_LOCAL||r==VT_LLOCAL){
1472 if(p->c.i==temp_var->location){
1473 free=0;
1474 break;
1478 if(free){
1479 found_var=temp_var->location;
1480 found=1;
1481 break;
1484 if(!found){
1485 loc = (loc - size) & -align;
1486 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1487 temp_var=&arr_temp_local_vars[i];
1488 temp_var->location=loc;
1489 temp_var->size=size;
1490 temp_var->align=align;
1491 nb_temp_local_vars++;
1493 found_var=loc;
1495 return found_var;
1498 static void clear_temp_local_var_list(){
1499 nb_temp_local_vars=0;
1502 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1503 if needed */
1504 static void move_reg(int r, int s, int t)
1506 SValue sv;
1508 if (r != s) {
1509 save_reg(r);
1510 sv.type.t = t;
1511 sv.type.ref = NULL;
1512 sv.r = s;
1513 sv.c.i = 0;
1514 load(r, &sv);
1518 /* get address of vtop (vtop MUST BE an lvalue) */
1519 ST_FUNC void gaddrof(void)
1521 vtop->r &= ~VT_LVAL;
1522 /* tricky: if saved lvalue, then we can go back to lvalue */
1523 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1524 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1527 #ifdef CONFIG_TCC_BCHECK
1528 /* generate a bounded pointer addition */
1529 static void gen_bounded_ptr_add(void)
1531 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1532 if (save) {
1533 vpushv(&vtop[-1]);
1534 vrott(3);
1536 vpush_helper_func(TOK___bound_ptr_add);
1537 vrott(3);
1538 gfunc_call(2);
1539 vtop -= save;
1540 vpushi(0);
1541 /* returned pointer is in REG_IRET */
1542 vtop->r = REG_IRET | VT_BOUNDED;
1543 if (nocode_wanted)
1544 return;
1545 /* relocation offset of the bounding function call point */
1546 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1549 /* patch pointer addition in vtop so that pointer dereferencing is
1550 also tested */
1551 static void gen_bounded_ptr_deref(void)
1553 addr_t func;
1554 int size, align;
1555 ElfW_Rel *rel;
1556 Sym *sym;
1558 if (nocode_wanted)
1559 return;
1561 size = type_size(&vtop->type, &align);
1562 switch(size) {
1563 case 1: func = TOK___bound_ptr_indir1; break;
1564 case 2: func = TOK___bound_ptr_indir2; break;
1565 case 4: func = TOK___bound_ptr_indir4; break;
1566 case 8: func = TOK___bound_ptr_indir8; break;
1567 case 12: func = TOK___bound_ptr_indir12; break;
1568 case 16: func = TOK___bound_ptr_indir16; break;
1569 default:
1570 /* may happen with struct member access */
1571 return;
1573 sym = external_helper_sym(func);
1574 if (!sym->c)
1575 put_extern_sym(sym, NULL, 0, 0);
1576 /* patch relocation */
1577 /* XXX: find a better solution ? */
1578 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1579 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1582 /* generate lvalue bound code */
1583 static void gbound(void)
1585 CType type1;
1587 vtop->r &= ~VT_MUSTBOUND;
1588 /* if lvalue, then use checking code before dereferencing */
1589 if (vtop->r & VT_LVAL) {
1590 /* if not VT_BOUNDED value, then make one */
1591 if (!(vtop->r & VT_BOUNDED)) {
1592 /* must save type because we must set it to int to get pointer */
1593 type1 = vtop->type;
1594 vtop->type.t = VT_PTR;
1595 gaddrof();
1596 vpushi(0);
1597 gen_bounded_ptr_add();
1598 vtop->r |= VT_LVAL;
1599 vtop->type = type1;
1601 /* then check for dereferencing */
1602 gen_bounded_ptr_deref();
1606 /* we need to call __bound_ptr_add before we start to load function
1607 args into registers */
1608 ST_FUNC void gbound_args(int nb_args)
1610 int i, v;
1611 SValue *sv;
1613 for (i = 1; i <= nb_args; ++i)
1614 if (vtop[1 - i].r & VT_MUSTBOUND) {
1615 vrotb(i);
1616 gbound();
1617 vrott(i);
1620 sv = vtop - nb_args;
1621 if (sv->r & VT_SYM) {
1622 v = sv->sym->v;
1623 if (v == TOK_setjmp
1624 || v == TOK__setjmp
1625 #ifndef TCC_TARGET_PE
1626 || v == TOK_sigsetjmp
1627 || v == TOK___sigsetjmp
1628 #endif
1630 vpush_helper_func(TOK___bound_setjmp);
1631 vpushv(sv + 1);
1632 gfunc_call(1);
1633 func_bound_add_epilog = 1;
1635 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1636 if (v == TOK_alloca)
1637 func_bound_add_epilog = 1;
1638 #endif
1639 #if TARGETOS_NetBSD
1640 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1641 sv->sym->asm_label = TOK___bound_longjmp;
1642 #endif
1646 /* Add bounds for local symbols from S to E (via ->prev) */
1647 static void add_local_bounds(Sym *s, Sym *e)
1649 for (; s != e; s = s->prev) {
1650 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1651 continue;
1652 /* Add arrays/structs/unions because we always take address */
1653 if ((s->type.t & VT_ARRAY)
1654 || (s->type.t & VT_BTYPE) == VT_STRUCT
1655 || s->a.addrtaken) {
1656 /* add local bound info */
1657 int align, size = type_size(&s->type, &align);
1658 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1659 2 * sizeof(addr_t));
1660 bounds_ptr[0] = s->c;
1661 bounds_ptr[1] = size;
1665 #endif
1667 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1668 static void pop_local_syms(Sym *b, int keep)
1670 #ifdef CONFIG_TCC_BCHECK
1671 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1672 add_local_bounds(local_stack, b);
1673 #endif
1674 if (debug_modes)
1675 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1676 sym_pop(&local_stack, b, keep);
1679 static void incr_bf_adr(int o)
1681 vtop->type = char_pointer_type;
1682 gaddrof();
1683 vpushs(o);
1684 gen_op('+');
1685 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1686 vtop->r |= VT_LVAL;
1689 /* single-byte load mode for packed or otherwise unaligned bitfields */
1690 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1692 int n, o, bits;
1693 save_reg_upstack(vtop->r, 1);
1694 vpush64(type->t & VT_BTYPE, 0); // B X
1695 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1696 do {
1697 vswap(); // X B
1698 incr_bf_adr(o);
1699 vdup(); // X B B
1700 n = 8 - bit_pos;
1701 if (n > bit_size)
1702 n = bit_size;
1703 if (bit_pos)
1704 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1705 if (n < 8)
1706 vpushi((1 << n) - 1), gen_op('&');
1707 gen_cast(type);
1708 if (bits)
1709 vpushi(bits), gen_op(TOK_SHL);
1710 vrotb(3); // B Y X
1711 gen_op('|'); // B X
1712 bits += n, bit_size -= n, o = 1;
1713 } while (bit_size);
1714 vswap(), vpop();
1715 if (!(type->t & VT_UNSIGNED)) {
1716 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1717 vpushi(n), gen_op(TOK_SHL);
1718 vpushi(n), gen_op(TOK_SAR);
1722 /* single-byte store mode for packed or otherwise unaligned bitfields */
1723 static void store_packed_bf(int bit_pos, int bit_size)
1725 int bits, n, o, m, c;
1726 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1727 vswap(); // X B
1728 save_reg_upstack(vtop->r, 1);
1729 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1730 do {
1731 incr_bf_adr(o); // X B
1732 vswap(); //B X
1733 c ? vdup() : gv_dup(); // B V X
1734 vrott(3); // X B V
1735 if (bits)
1736 vpushi(bits), gen_op(TOK_SHR);
1737 if (bit_pos)
1738 vpushi(bit_pos), gen_op(TOK_SHL);
1739 n = 8 - bit_pos;
1740 if (n > bit_size)
1741 n = bit_size;
1742 if (n < 8) {
1743 m = ((1 << n) - 1) << bit_pos;
1744 vpushi(m), gen_op('&'); // X B V1
1745 vpushv(vtop-1); // X B V1 B
1746 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1747 gen_op('&'); // X B V1 B1
1748 gen_op('|'); // X B V2
1750 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1751 vstore(), vpop(); // X B
1752 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1753 } while (bit_size);
1754 vpop(), vpop();
1757 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1759 int t;
1760 if (0 == sv->type.ref)
1761 return 0;
1762 t = sv->type.ref->auxtype;
1763 if (t != -1 && t != VT_STRUCT) {
1764 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1765 sv->r |= VT_LVAL;
1767 return t;
1770 /* store vtop a register belonging to class 'rc'. lvalues are
1771 converted to values. Cannot be used if cannot be converted to
1772 register value (such as structures). */
1773 ST_FUNC int gv(int rc)
1775 int r, r2, r_ok, r2_ok, rc2, bt;
1776 int bit_pos, bit_size, size, align;
1778 /* NOTE: get_reg can modify vstack[] */
1779 if (vtop->type.t & VT_BITFIELD) {
1780 CType type;
1782 bit_pos = BIT_POS(vtop->type.t);
1783 bit_size = BIT_SIZE(vtop->type.t);
1784 /* remove bit field info to avoid loops */
1785 vtop->type.t &= ~VT_STRUCT_MASK;
1787 type.ref = NULL;
1788 type.t = vtop->type.t & VT_UNSIGNED;
1789 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1790 type.t |= VT_UNSIGNED;
1792 r = adjust_bf(vtop, bit_pos, bit_size);
1794 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1795 type.t |= VT_LLONG;
1796 else
1797 type.t |= VT_INT;
1799 if (r == VT_STRUCT) {
1800 load_packed_bf(&type, bit_pos, bit_size);
1801 } else {
1802 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1803 /* cast to int to propagate signedness in following ops */
1804 gen_cast(&type);
1805 /* generate shifts */
1806 vpushi(bits - (bit_pos + bit_size));
1807 gen_op(TOK_SHL);
1808 vpushi(bits - bit_size);
1809 /* NOTE: transformed to SHR if unsigned */
1810 gen_op(TOK_SAR);
1812 r = gv(rc);
1813 } else {
1814 if (is_float(vtop->type.t) &&
1815 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1816 /* CPUs usually cannot use float constants, so we store them
1817 generically in data segment */
1818 init_params p = { rodata_section };
1819 unsigned long offset;
1820 size = type_size(&vtop->type, &align);
1821 if (NODATA_WANTED)
1822 size = 0, align = 1;
1823 offset = section_add(p.sec, size, align);
1824 vpush_ref(&vtop->type, p.sec, offset, size);
1825 vswap();
1826 init_putv(&p, &vtop->type, offset);
1827 vtop->r |= VT_LVAL;
1829 #ifdef CONFIG_TCC_BCHECK
1830 if (vtop->r & VT_MUSTBOUND)
1831 gbound();
1832 #endif
1834 bt = vtop->type.t & VT_BTYPE;
1836 #ifdef TCC_TARGET_RISCV64
1837 /* XXX mega hack */
1838 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1839 rc = RC_INT;
1840 #endif
1841 rc2 = RC2_TYPE(bt, rc);
1843 /* need to reload if:
1844 - constant
1845 - lvalue (need to dereference pointer)
1846 - already a register, but not in the right class */
1847 r = vtop->r & VT_VALMASK;
1848 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1849 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1851 if (!r_ok || !r2_ok) {
1852 if (!r_ok)
1853 r = get_reg(rc);
1854 if (rc2) {
1855 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1856 int original_type = vtop->type.t;
1858 /* two register type load :
1859 expand to two words temporarily */
1860 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1861 /* load constant */
1862 unsigned long long ll = vtop->c.i;
1863 vtop->c.i = ll; /* first word */
1864 load(r, vtop);
1865 vtop->r = r; /* save register value */
1866 vpushi(ll >> 32); /* second word */
1867 } else if (vtop->r & VT_LVAL) {
1868 /* We do not want to modifier the long long pointer here.
1869 So we save any other instances down the stack */
1870 save_reg_upstack(vtop->r, 1);
1871 /* load from memory */
1872 vtop->type.t = load_type;
1873 load(r, vtop);
1874 vdup();
1875 vtop[-1].r = r; /* save register value */
1876 /* increment pointer to get second word */
1877 vtop->type.t = VT_PTRDIFF_T;
1878 gaddrof();
1879 vpushs(PTR_SIZE);
1880 gen_op('+');
1881 vtop->r |= VT_LVAL;
1882 vtop->type.t = load_type;
1883 } else {
1884 /* move registers */
1885 if (!r_ok)
1886 load(r, vtop);
1887 if (r2_ok && vtop->r2 < VT_CONST)
1888 goto done;
1889 vdup();
1890 vtop[-1].r = r; /* save register value */
1891 vtop->r = vtop[-1].r2;
1893 /* Allocate second register. Here we rely on the fact that
1894 get_reg() tries first to free r2 of an SValue. */
1895 r2 = get_reg(rc2);
1896 load(r2, vtop);
1897 vpop();
1898 /* write second register */
1899 vtop->r2 = r2;
1900 done:
1901 vtop->type.t = original_type;
1902 } else {
1903 if (vtop->r == VT_CMP)
1904 vset_VT_JMP();
1905 /* one register type load */
1906 load(r, vtop);
1909 vtop->r = r;
1910 #ifdef TCC_TARGET_C67
1911 /* uses register pairs for doubles */
1912 if (bt == VT_DOUBLE)
1913 vtop->r2 = r+1;
1914 #endif
1916 return r;
1919 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1920 ST_FUNC void gv2(int rc1, int rc2)
1922 /* generate more generic register first. But VT_JMP or VT_CMP
1923 values must be generated first in all cases to avoid possible
1924 reload errors */
1925 if (vtop->r != VT_CMP && rc1 <= rc2) {
1926 vswap();
1927 gv(rc1);
1928 vswap();
1929 gv(rc2);
1930 /* test if reload is needed for first register */
1931 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1932 vswap();
1933 gv(rc1);
1934 vswap();
1936 } else {
1937 gv(rc2);
1938 vswap();
1939 gv(rc1);
1940 vswap();
1941 /* test if reload is needed for first register */
1942 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1943 gv(rc2);
1948 #if PTR_SIZE == 4
1949 /* expand 64bit on stack in two ints */
1950 ST_FUNC void lexpand(void)
1952 int u, v;
1953 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1954 v = vtop->r & (VT_VALMASK | VT_LVAL);
1955 if (v == VT_CONST) {
1956 vdup();
1957 vtop[0].c.i >>= 32;
1958 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1959 vdup();
1960 vtop[0].c.i += 4;
1961 } else {
1962 gv(RC_INT);
1963 vdup();
1964 vtop[0].r = vtop[-1].r2;
1965 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1967 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1969 #endif
1971 #if PTR_SIZE == 4
1972 /* build a long long from two ints */
1973 static void lbuild(int t)
1975 gv2(RC_INT, RC_INT);
1976 vtop[-1].r2 = vtop[0].r;
1977 vtop[-1].type.t = t;
1978 vpop();
1980 #endif
1982 /* convert stack entry to register and duplicate its value in another
1983 register */
1984 static void gv_dup(void)
1986 int t, rc, r;
1988 t = vtop->type.t;
1989 #if PTR_SIZE == 4
1990 if ((t & VT_BTYPE) == VT_LLONG) {
1991 if (t & VT_BITFIELD) {
1992 gv(RC_INT);
1993 t = vtop->type.t;
1995 lexpand();
1996 gv_dup();
1997 vswap();
1998 vrotb(3);
1999 gv_dup();
2000 vrotb(4);
2001 /* stack: H L L1 H1 */
2002 lbuild(t);
2003 vrotb(3);
2004 vrotb(3);
2005 vswap();
2006 lbuild(t);
2007 vswap();
2008 return;
2010 #endif
2011 /* duplicate value */
2012 rc = RC_TYPE(t);
2013 gv(rc);
2014 r = get_reg(rc);
2015 vdup();
2016 load(r, vtop);
2017 vtop->r = r;
2020 #if PTR_SIZE == 4
2021 /* generate CPU independent (unsigned) long long operations */
2022 static void gen_opl(int op)
2024 int t, a, b, op1, c, i;
2025 int func;
2026 unsigned short reg_iret = REG_IRET;
2027 unsigned short reg_lret = REG_IRE2;
2028 SValue tmp;
2030 switch(op) {
2031 case '/':
2032 case TOK_PDIV:
2033 func = TOK___divdi3;
2034 goto gen_func;
2035 case TOK_UDIV:
2036 func = TOK___udivdi3;
2037 goto gen_func;
2038 case '%':
2039 func = TOK___moddi3;
2040 goto gen_mod_func;
2041 case TOK_UMOD:
2042 func = TOK___umoddi3;
2043 gen_mod_func:
2044 #ifdef TCC_ARM_EABI
2045 reg_iret = TREG_R2;
2046 reg_lret = TREG_R3;
2047 #endif
2048 gen_func:
2049 /* call generic long long function */
2050 vpush_helper_func(func);
2051 vrott(3);
2052 gfunc_call(2);
2053 vpushi(0);
2054 vtop->r = reg_iret;
2055 vtop->r2 = reg_lret;
2056 break;
2057 case '^':
2058 case '&':
2059 case '|':
2060 case '*':
2061 case '+':
2062 case '-':
2063 //pv("gen_opl A",0,2);
2064 t = vtop->type.t;
2065 vswap();
2066 lexpand();
2067 vrotb(3);
2068 lexpand();
2069 /* stack: L1 H1 L2 H2 */
2070 tmp = vtop[0];
2071 vtop[0] = vtop[-3];
2072 vtop[-3] = tmp;
2073 tmp = vtop[-2];
2074 vtop[-2] = vtop[-3];
2075 vtop[-3] = tmp;
2076 vswap();
2077 /* stack: H1 H2 L1 L2 */
2078 //pv("gen_opl B",0,4);
2079 if (op == '*') {
2080 vpushv(vtop - 1);
2081 vpushv(vtop - 1);
2082 gen_op(TOK_UMULL);
2083 lexpand();
2084 /* stack: H1 H2 L1 L2 ML MH */
2085 for(i=0;i<4;i++)
2086 vrotb(6);
2087 /* stack: ML MH H1 H2 L1 L2 */
2088 tmp = vtop[0];
2089 vtop[0] = vtop[-2];
2090 vtop[-2] = tmp;
2091 /* stack: ML MH H1 L2 H2 L1 */
2092 gen_op('*');
2093 vrotb(3);
2094 vrotb(3);
2095 gen_op('*');
2096 /* stack: ML MH M1 M2 */
2097 gen_op('+');
2098 gen_op('+');
2099 } else if (op == '+' || op == '-') {
2100 /* XXX: add non carry method too (for MIPS or alpha) */
2101 if (op == '+')
2102 op1 = TOK_ADDC1;
2103 else
2104 op1 = TOK_SUBC1;
2105 gen_op(op1);
2106 /* stack: H1 H2 (L1 op L2) */
2107 vrotb(3);
2108 vrotb(3);
2109 gen_op(op1 + 1); /* TOK_xxxC2 */
2110 } else {
2111 gen_op(op);
2112 /* stack: H1 H2 (L1 op L2) */
2113 vrotb(3);
2114 vrotb(3);
2115 /* stack: (L1 op L2) H1 H2 */
2116 gen_op(op);
2117 /* stack: (L1 op L2) (H1 op H2) */
2119 /* stack: L H */
2120 lbuild(t);
2121 break;
2122 case TOK_SAR:
2123 case TOK_SHR:
2124 case TOK_SHL:
2125 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2126 t = vtop[-1].type.t;
2127 vswap();
2128 lexpand();
2129 vrotb(3);
2130 /* stack: L H shift */
2131 c = (int)vtop->c.i;
2132 /* constant: simpler */
2133 /* NOTE: all comments are for SHL. the other cases are
2134 done by swapping words */
2135 vpop();
2136 if (op != TOK_SHL)
2137 vswap();
2138 if (c >= 32) {
2139 /* stack: L H */
2140 vpop();
2141 if (c > 32) {
2142 vpushi(c - 32);
2143 gen_op(op);
2145 if (op != TOK_SAR) {
2146 vpushi(0);
2147 } else {
2148 gv_dup();
2149 vpushi(31);
2150 gen_op(TOK_SAR);
2152 vswap();
2153 } else {
2154 vswap();
2155 gv_dup();
2156 /* stack: H L L */
2157 vpushi(c);
2158 gen_op(op);
2159 vswap();
2160 vpushi(32 - c);
2161 if (op == TOK_SHL)
2162 gen_op(TOK_SHR);
2163 else
2164 gen_op(TOK_SHL);
2165 vrotb(3);
2166 /* stack: L L H */
2167 vpushi(c);
2168 if (op == TOK_SHL)
2169 gen_op(TOK_SHL);
2170 else
2171 gen_op(TOK_SHR);
2172 gen_op('|');
2174 if (op != TOK_SHL)
2175 vswap();
2176 lbuild(t);
2177 } else {
2178 /* XXX: should provide a faster fallback on x86 ? */
2179 switch(op) {
2180 case TOK_SAR:
2181 func = TOK___ashrdi3;
2182 goto gen_func;
2183 case TOK_SHR:
2184 func = TOK___lshrdi3;
2185 goto gen_func;
2186 case TOK_SHL:
2187 func = TOK___ashldi3;
2188 goto gen_func;
2191 break;
2192 default:
2193 /* compare operations */
2194 t = vtop->type.t;
2195 vswap();
2196 lexpand();
2197 vrotb(3);
2198 lexpand();
2199 /* stack: L1 H1 L2 H2 */
2200 tmp = vtop[-1];
2201 vtop[-1] = vtop[-2];
2202 vtop[-2] = tmp;
2203 /* stack: L1 L2 H1 H2 */
2204 save_regs(4);
2205 /* compare high */
2206 op1 = op;
2207 /* when values are equal, we need to compare low words. since
2208 the jump is inverted, we invert the test too. */
2209 if (op1 == TOK_LT)
2210 op1 = TOK_LE;
2211 else if (op1 == TOK_GT)
2212 op1 = TOK_GE;
2213 else if (op1 == TOK_ULT)
2214 op1 = TOK_ULE;
2215 else if (op1 == TOK_UGT)
2216 op1 = TOK_UGE;
2217 a = 0;
2218 b = 0;
2219 gen_op(op1);
2220 if (op == TOK_NE) {
2221 b = gvtst(0, 0);
2222 } else {
2223 a = gvtst(1, 0);
2224 if (op != TOK_EQ) {
2225 /* generate non equal test */
2226 vpushi(0);
2227 vset_VT_CMP(TOK_NE);
2228 b = gvtst(0, 0);
2231 /* compare low. Always unsigned */
2232 op1 = op;
2233 if (op1 == TOK_LT)
2234 op1 = TOK_ULT;
2235 else if (op1 == TOK_LE)
2236 op1 = TOK_ULE;
2237 else if (op1 == TOK_GT)
2238 op1 = TOK_UGT;
2239 else if (op1 == TOK_GE)
2240 op1 = TOK_UGE;
2241 gen_op(op1);
2242 #if 0//def TCC_TARGET_I386
2243 if (op == TOK_NE) { gsym(b); break; }
2244 if (op == TOK_EQ) { gsym(a); break; }
2245 #endif
2246 gvtst_set(1, a);
2247 gvtst_set(0, b);
2248 break;
2251 #endif
2253 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2255 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2256 return (a ^ b) >> 63 ? -x : x;
2259 static int gen_opic_lt(uint64_t a, uint64_t b)
2261 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2264 /* handle integer constant optimizations and various machine
2265 independent opt */
2266 static void gen_opic(int op)
2268 SValue *v1 = vtop - 1;
2269 SValue *v2 = vtop;
2270 int t1 = v1->type.t & VT_BTYPE;
2271 int t2 = v2->type.t & VT_BTYPE;
2272 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2273 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2274 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2275 uint64_t l1 = c1 ? v1->c.i : 0;
2276 uint64_t l2 = c2 ? v2->c.i : 0;
2277 int shm = (t1 == VT_LLONG) ? 63 : 31;
2279 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2280 l1 = ((uint32_t)l1 |
2281 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2282 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2283 l2 = ((uint32_t)l2 |
2284 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2286 if (c1 && c2) {
2287 switch(op) {
2288 case '+': l1 += l2; break;
2289 case '-': l1 -= l2; break;
2290 case '&': l1 &= l2; break;
2291 case '^': l1 ^= l2; break;
2292 case '|': l1 |= l2; break;
2293 case '*': l1 *= l2; break;
2295 case TOK_PDIV:
2296 case '/':
2297 case '%':
2298 case TOK_UDIV:
2299 case TOK_UMOD:
2300 /* if division by zero, generate explicit division */
2301 if (l2 == 0) {
2302 if (const_wanted && !(nocode_wanted & unevalmask))
2303 tcc_error("division by zero in constant");
2304 goto general_case;
2306 switch(op) {
2307 default: l1 = gen_opic_sdiv(l1, l2); break;
2308 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2309 case TOK_UDIV: l1 = l1 / l2; break;
2310 case TOK_UMOD: l1 = l1 % l2; break;
2312 break;
2313 case TOK_SHL: l1 <<= (l2 & shm); break;
2314 case TOK_SHR: l1 >>= (l2 & shm); break;
2315 case TOK_SAR:
2316 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2317 break;
2318 /* tests */
2319 case TOK_ULT: l1 = l1 < l2; break;
2320 case TOK_UGE: l1 = l1 >= l2; break;
2321 case TOK_EQ: l1 = l1 == l2; break;
2322 case TOK_NE: l1 = l1 != l2; break;
2323 case TOK_ULE: l1 = l1 <= l2; break;
2324 case TOK_UGT: l1 = l1 > l2; break;
2325 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2326 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2327 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2328 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2329 /* logical */
2330 case TOK_LAND: l1 = l1 && l2; break;
2331 case TOK_LOR: l1 = l1 || l2; break;
2332 default:
2333 goto general_case;
2335 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2336 l1 = ((uint32_t)l1 |
2337 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2338 v1->c.i = l1;
2339 vtop--;
2340 } else {
2341 nonconst = VT_NONCONST;
2342 /* if commutative ops, put c2 as constant */
2343 if (c1 && (op == '+' || op == '&' || op == '^' ||
2344 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2345 vswap();
2346 c2 = c1; //c = c1, c1 = c2, c2 = c;
2347 l2 = l1; //l = l1, l1 = l2, l2 = l;
2349 if (!const_wanted &&
2350 c1 && ((l1 == 0 &&
2351 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2352 (l1 == -1 && op == TOK_SAR))) {
2353 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2354 vtop--;
2355 } else if (!const_wanted &&
2356 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2357 (op == '|' &&
2358 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2359 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2360 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2361 if (l2 == 1)
2362 vtop->c.i = 0;
2363 vswap();
2364 vtop--;
2365 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2366 op == TOK_PDIV) &&
2367 l2 == 1) ||
2368 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2369 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2370 l2 == 0) ||
2371 (op == '&' &&
2372 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2373 /* filter out NOP operations like x*1, x-0, x&-1... */
2374 vtop--;
2375 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2376 /* try to use shifts instead of muls or divs */
2377 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2378 int n = -1;
2379 while (l2) {
2380 l2 >>= 1;
2381 n++;
2383 vtop->c.i = n;
2384 if (op == '*')
2385 op = TOK_SHL;
2386 else if (op == TOK_PDIV)
2387 op = TOK_SAR;
2388 else
2389 op = TOK_SHR;
2391 goto general_case;
2392 } else if (c2 && (op == '+' || op == '-') &&
2393 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2394 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2395 /* symbol + constant case */
2396 if (op == '-')
2397 l2 = -l2;
2398 l2 += vtop[-1].c.i;
2399 /* The backends can't always deal with addends to symbols
2400 larger than +-1<<31. Don't construct such. */
2401 if ((int)l2 != l2)
2402 goto general_case;
2403 vtop--;
2404 vtop->c.i = l2;
2405 } else {
2406 general_case:
2407 /* call low level op generator */
2408 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2409 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2410 gen_opl(op);
2411 else
2412 gen_opi(op);
2415 if (vtop->r == VT_CONST)
2416 vtop->r |= nonconst;
2419 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2420 # define gen_negf gen_opf
2421 #elif defined TCC_TARGET_ARM
2422 void gen_negf(int op)
2424 /* arm will detect 0-x and replace by vneg */
2425 vpushi(0), vswap(), gen_op('-');
2427 #else
2428 /* XXX: implement in gen_opf() for other backends too */
2429 void gen_negf(int op)
2431 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2432 subtract(-0, x), but with them it's really a sign flip
2433 operation. We implement this with bit manipulation and have
2434 to do some type reinterpretation for this, which TCC can do
2435 only via memory. */
2437 int align, size, bt;
2439 size = type_size(&vtop->type, &align);
2440 bt = vtop->type.t & VT_BTYPE;
2441 save_reg(gv(RC_TYPE(bt)));
2442 vdup();
2443 incr_bf_adr(size - 1);
2444 vdup();
2445 vpushi(0x80); /* flip sign */
2446 gen_op('^');
2447 vstore();
2448 vpop();
2450 #endif
2452 /* generate a floating point operation with constant propagation */
2453 static void gen_opif(int op)
2455 int c1, c2;
2456 SValue *v1, *v2;
2457 #if defined _MSC_VER && defined __x86_64__
2458 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2459 volatile
2460 #endif
2461 long double f1, f2;
2463 v1 = vtop - 1;
2464 v2 = vtop;
2465 if (op == TOK_NEG)
2466 v1 = v2;
2468 /* currently, we cannot do computations with forward symbols */
2469 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2470 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2471 if (c1 && c2) {
2472 if (v1->type.t == VT_FLOAT) {
2473 f1 = v1->c.f;
2474 f2 = v2->c.f;
2475 } else if (v1->type.t == VT_DOUBLE) {
2476 f1 = v1->c.d;
2477 f2 = v2->c.d;
2478 } else {
2479 f1 = v1->c.ld;
2480 f2 = v2->c.ld;
2482 /* NOTE: we only do constant propagation if finite number (not
2483 NaN or infinity) (ANSI spec) */
2484 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2485 goto general_case;
2486 switch(op) {
2487 case '+': f1 += f2; break;
2488 case '-': f1 -= f2; break;
2489 case '*': f1 *= f2; break;
2490 case '/':
2491 if (f2 == 0.0) {
2492 union { float f; unsigned u; } x1, x2, y;
2493 /* If not in initializer we need to potentially generate
2494 FP exceptions at runtime, otherwise we want to fold. */
2495 if (!const_wanted)
2496 goto general_case;
2497 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2498 when used to compile the f1 /= f2 below, would be -nan */
2499 x1.f = f1, x2.f = f2;
2500 if (f1 == 0.0)
2501 y.u = 0x7fc00000; /* nan */
2502 else
2503 y.u = 0x7f800000; /* infinity */
2504 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2505 f1 = y.f;
2506 break;
2508 f1 /= f2;
2509 break;
2510 case TOK_NEG:
2511 f1 = -f1;
2512 goto unary_result;
2513 /* XXX: also handles tests ? */
2514 default:
2515 goto general_case;
2517 vtop--;
2518 unary_result:
2519 /* XXX: overflow test ? */
2520 if (v1->type.t == VT_FLOAT) {
2521 v1->c.f = f1;
2522 } else if (v1->type.t == VT_DOUBLE) {
2523 v1->c.d = f1;
2524 } else {
2525 v1->c.ld = f1;
2527 } else {
2528 general_case:
2529 if (op == TOK_NEG) {
2530 gen_negf(op);
2531 } else {
2532 gen_opf(op);
2537 /* print a type. If 'varstr' is not NULL, then the variable is also
2538 printed in the type */
2539 /* XXX: union */
2540 /* XXX: add array and function pointers */
2541 static void type_to_str(char *buf, int buf_size,
2542 CType *type, const char *varstr)
2544 int bt, v, t;
2545 Sym *s, *sa;
2546 char buf1[256];
2547 const char *tstr;
2549 t = type->t;
2550 bt = t & VT_BTYPE;
2551 buf[0] = '\0';
2553 if (t & VT_EXTERN)
2554 pstrcat(buf, buf_size, "extern ");
2555 if (t & VT_STATIC)
2556 pstrcat(buf, buf_size, "static ");
2557 if (t & VT_TYPEDEF)
2558 pstrcat(buf, buf_size, "typedef ");
2559 if (t & VT_INLINE)
2560 pstrcat(buf, buf_size, "inline ");
2561 if (bt != VT_PTR) {
2562 if (t & VT_VOLATILE)
2563 pstrcat(buf, buf_size, "volatile ");
2564 if (t & VT_CONSTANT)
2565 pstrcat(buf, buf_size, "const ");
2567 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2568 || ((t & VT_UNSIGNED)
2569 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2570 && !IS_ENUM(t)
2572 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2574 buf_size -= strlen(buf);
2575 buf += strlen(buf);
2577 switch(bt) {
2578 case VT_VOID:
2579 tstr = "void";
2580 goto add_tstr;
2581 case VT_BOOL:
2582 tstr = "_Bool";
2583 goto add_tstr;
2584 case VT_BYTE:
2585 tstr = "char";
2586 goto add_tstr;
2587 case VT_SHORT:
2588 tstr = "short";
2589 goto add_tstr;
2590 case VT_INT:
2591 tstr = "int";
2592 goto maybe_long;
2593 case VT_LLONG:
2594 tstr = "long long";
2595 maybe_long:
2596 if (t & VT_LONG)
2597 tstr = "long";
2598 if (!IS_ENUM(t))
2599 goto add_tstr;
2600 tstr = "enum ";
2601 goto tstruct;
2602 case VT_FLOAT:
2603 tstr = "float";
2604 goto add_tstr;
2605 case VT_DOUBLE:
2606 tstr = "double";
2607 if (!(t & VT_LONG))
2608 goto add_tstr;
2609 case VT_LDOUBLE:
2610 tstr = "long double";
2611 add_tstr:
2612 pstrcat(buf, buf_size, tstr);
2613 break;
2614 case VT_STRUCT:
2615 tstr = "struct ";
2616 if (IS_UNION(t))
2617 tstr = "union ";
2618 tstruct:
2619 pstrcat(buf, buf_size, tstr);
2620 v = type->ref->v & ~SYM_STRUCT;
2621 if (v >= SYM_FIRST_ANOM)
2622 pstrcat(buf, buf_size, "<anonymous>");
2623 else
2624 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2625 break;
2626 case VT_FUNC:
2627 s = type->ref;
2628 buf1[0]=0;
2629 if (varstr && '*' == *varstr) {
2630 pstrcat(buf1, sizeof(buf1), "(");
2631 pstrcat(buf1, sizeof(buf1), varstr);
2632 pstrcat(buf1, sizeof(buf1), ")");
2634 pstrcat(buf1, buf_size, "(");
2635 sa = s->next;
2636 while (sa != NULL) {
2637 char buf2[256];
2638 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2639 pstrcat(buf1, sizeof(buf1), buf2);
2640 sa = sa->next;
2641 if (sa)
2642 pstrcat(buf1, sizeof(buf1), ", ");
2644 if (s->f.func_type == FUNC_ELLIPSIS)
2645 pstrcat(buf1, sizeof(buf1), ", ...");
2646 pstrcat(buf1, sizeof(buf1), ")");
2647 type_to_str(buf, buf_size, &s->type, buf1);
2648 goto no_var;
2649 case VT_PTR:
2650 s = type->ref;
2651 if (t & (VT_ARRAY|VT_VLA)) {
2652 if (varstr && '*' == *varstr)
2653 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2654 else
2655 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2656 type_to_str(buf, buf_size, &s->type, buf1);
2657 goto no_var;
2659 pstrcpy(buf1, sizeof(buf1), "*");
2660 if (t & VT_CONSTANT)
2661 pstrcat(buf1, buf_size, "const ");
2662 if (t & VT_VOLATILE)
2663 pstrcat(buf1, buf_size, "volatile ");
2664 if (varstr)
2665 pstrcat(buf1, sizeof(buf1), varstr);
2666 type_to_str(buf, buf_size, &s->type, buf1);
2667 goto no_var;
2669 if (varstr) {
2670 pstrcat(buf, buf_size, " ");
2671 pstrcat(buf, buf_size, varstr);
2673 no_var: ;
2676 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2678 char buf1[256], buf2[256];
2679 type_to_str(buf1, sizeof(buf1), st, NULL);
2680 type_to_str(buf2, sizeof(buf2), dt, NULL);
2681 tcc_error(fmt, buf1, buf2);
2684 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2686 char buf1[256], buf2[256];
2687 type_to_str(buf1, sizeof(buf1), st, NULL);
2688 type_to_str(buf2, sizeof(buf2), dt, NULL);
2689 tcc_warning(fmt, buf1, buf2);
2692 static int pointed_size(CType *type)
2694 int align;
2695 return type_size(pointed_type(type), &align);
2698 static inline int is_null_pointer(SValue *p)
2700 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2701 return 0;
2702 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2703 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2704 ((p->type.t & VT_BTYPE) == VT_PTR &&
2705 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2706 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2707 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2711 /* compare function types. OLD functions match any new functions */
2712 static int is_compatible_func(CType *type1, CType *type2)
2714 Sym *s1, *s2;
2716 s1 = type1->ref;
2717 s2 = type2->ref;
2718 if (s1->f.func_call != s2->f.func_call)
2719 return 0;
2720 if (s1->f.func_type != s2->f.func_type
2721 && s1->f.func_type != FUNC_OLD
2722 && s2->f.func_type != FUNC_OLD)
2723 return 0;
2724 for (;;) {
2725 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2726 return 0;
2727 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2728 return 1;
2729 s1 = s1->next;
2730 s2 = s2->next;
2731 if (!s1)
2732 return !s2;
2733 if (!s2)
2734 return 0;
2738 /* return true if type1 and type2 are the same. If unqualified is
2739 true, qualifiers on the types are ignored.
2741 static int compare_types(CType *type1, CType *type2, int unqualified)
2743 int bt1, t1, t2;
2745 t1 = type1->t & VT_TYPE;
2746 t2 = type2->t & VT_TYPE;
2747 if (unqualified) {
2748 /* strip qualifiers before comparing */
2749 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2750 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2753 /* Default Vs explicit signedness only matters for char */
2754 if ((t1 & VT_BTYPE) != VT_BYTE) {
2755 t1 &= ~VT_DEFSIGN;
2756 t2 &= ~VT_DEFSIGN;
2758 /* XXX: bitfields ? */
2759 if (t1 != t2)
2760 return 0;
2762 if ((t1 & VT_ARRAY)
2763 && !(type1->ref->c < 0
2764 || type2->ref->c < 0
2765 || type1->ref->c == type2->ref->c))
2766 return 0;
2768 /* test more complicated cases */
2769 bt1 = t1 & VT_BTYPE;
2770 if (bt1 == VT_PTR) {
2771 type1 = pointed_type(type1);
2772 type2 = pointed_type(type2);
2773 return is_compatible_types(type1, type2);
2774 } else if (bt1 == VT_STRUCT) {
2775 return (type1->ref == type2->ref);
2776 } else if (bt1 == VT_FUNC) {
2777 return is_compatible_func(type1, type2);
2778 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2779 /* If both are enums then they must be the same, if only one is then
2780 t1 and t2 must be equal, which was checked above already. */
2781 return type1->ref == type2->ref;
2782 } else {
2783 return 1;
2787 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2788 type is stored in DEST if non-null (except for pointer plus/minus) . */
2789 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2791 CType *type1 = &op1->type, *type2 = &op2->type, type;
2792 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2793 int ret = 1;
2795 type.t = VT_VOID;
2796 type.ref = NULL;
2798 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2799 ret = op == '?' ? 1 : 0;
2800 /* NOTE: as an extension, we accept void on only one side */
2801 type.t = VT_VOID;
2802 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2803 if (op == '+') ; /* Handled in caller */
2804 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2805 /* If one is a null ptr constant the result type is the other. */
2806 else if (is_null_pointer (op2)) type = *type1;
2807 else if (is_null_pointer (op1)) type = *type2;
2808 else if (bt1 != bt2) {
2809 /* accept comparison or cond-expr between pointer and integer
2810 with a warning */
2811 if ((op == '?' || TOK_ISCOND(op))
2812 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2813 tcc_warning("pointer/integer mismatch in %s",
2814 op == '?' ? "conditional expression" : "comparison");
2815 else if (op != '-' || !is_integer_btype(bt2))
2816 ret = 0;
2817 type = *(bt1 == VT_PTR ? type1 : type2);
2818 } else {
2819 CType *pt1 = pointed_type(type1);
2820 CType *pt2 = pointed_type(type2);
2821 int pbt1 = pt1->t & VT_BTYPE;
2822 int pbt2 = pt2->t & VT_BTYPE;
2823 int newquals, copied = 0;
2824 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2825 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2826 if (op != '?' && !TOK_ISCOND(op))
2827 ret = 0;
2828 else
2829 type_incompatibility_warning(type1, type2,
2830 op == '?'
2831 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2832 : "pointer type mismatch in comparison('%s' and '%s')");
2834 if (op == '?') {
2835 /* pointers to void get preferred, otherwise the
2836 pointed to types minus qualifs should be compatible */
2837 type = *((pbt1 == VT_VOID) ? type1 : type2);
2838 /* combine qualifs */
2839 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2840 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2841 & newquals)
2843 /* copy the pointer target symbol */
2844 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2845 0, type.ref->c);
2846 copied = 1;
2847 pointed_type(&type)->t |= newquals;
2849 /* pointers to incomplete arrays get converted to
2850 pointers to completed ones if possible */
2851 if (pt1->t & VT_ARRAY
2852 && pt2->t & VT_ARRAY
2853 && pointed_type(&type)->ref->c < 0
2854 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2856 if (!copied)
2857 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2858 0, type.ref->c);
2859 pointed_type(&type)->ref =
2860 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2861 0, pointed_type(&type)->ref->c);
2862 pointed_type(&type)->ref->c =
2863 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2867 if (TOK_ISCOND(op))
2868 type.t = VT_SIZE_T;
2869 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2870 if (op != '?' || !compare_types(type1, type2, 1))
2871 ret = 0;
2872 type = *type1;
2873 } else if (is_float(bt1) || is_float(bt2)) {
2874 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2875 type.t = VT_LDOUBLE;
2876 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2877 type.t = VT_DOUBLE;
2878 } else {
2879 type.t = VT_FLOAT;
2881 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2882 /* cast to biggest op */
2883 type.t = VT_LLONG | VT_LONG;
2884 if (bt1 == VT_LLONG)
2885 type.t &= t1;
2886 if (bt2 == VT_LLONG)
2887 type.t &= t2;
2888 /* convert to unsigned if it does not fit in a long long */
2889 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2890 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2891 type.t |= VT_UNSIGNED;
2892 } else {
2893 /* integer operations */
2894 type.t = VT_INT | (VT_LONG & (t1 | t2));
2895 /* convert to unsigned if it does not fit in an integer */
2896 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2897 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2898 type.t |= VT_UNSIGNED;
2900 if (dest)
2901 *dest = type;
2902 return ret;
2905 /* generic gen_op: handles types problems */
2906 ST_FUNC void gen_op(int op)
2908 int t1, t2, bt1, bt2, t;
2909 CType type1, combtype;
2911 redo:
2912 t1 = vtop[-1].type.t;
2913 t2 = vtop[0].type.t;
2914 bt1 = t1 & VT_BTYPE;
2915 bt2 = t2 & VT_BTYPE;
2917 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2918 if (bt2 == VT_FUNC) {
2919 mk_pointer(&vtop->type);
2920 gaddrof();
2922 if (bt1 == VT_FUNC) {
2923 vswap();
2924 mk_pointer(&vtop->type);
2925 gaddrof();
2926 vswap();
2928 goto redo;
2929 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2930 tcc_error_noabort("invalid operand types for binary operation");
2931 vpop();
2932 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2933 /* at least one operand is a pointer */
2934 /* relational op: must be both pointers */
2935 int align;
2936 if (TOK_ISCOND(op))
2937 goto std_op;
2938 /* if both pointers, then it must be the '-' op */
2939 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2940 if (op != '-')
2941 tcc_error("cannot use pointers here");
2942 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2943 vrott(3);
2944 gen_opic(op);
2945 vtop->type.t = VT_PTRDIFF_T;
2946 vswap();
2947 gen_op(TOK_PDIV);
2948 } else {
2949 /* exactly one pointer : must be '+' or '-'. */
2950 if (op != '-' && op != '+')
2951 tcc_error("cannot use pointers here");
2952 /* Put pointer as first operand */
2953 if (bt2 == VT_PTR) {
2954 vswap();
2955 t = t1, t1 = t2, t2 = t;
2957 #if PTR_SIZE == 4
2958 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2959 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2960 gen_cast_s(VT_INT);
2961 #endif
2962 type1 = vtop[-1].type;
2963 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2964 gen_op('*');
2965 #ifdef CONFIG_TCC_BCHECK
2966 if (tcc_state->do_bounds_check && !const_wanted) {
2967 /* if bounded pointers, we generate a special code to
2968 test bounds */
2969 if (op == '-') {
2970 vpushi(0);
2971 vswap();
2972 gen_op('-');
2974 gen_bounded_ptr_add();
2975 } else
2976 #endif
2978 gen_opic(op);
2980 type1.t &= ~(VT_ARRAY|VT_VLA);
2981 /* put again type if gen_opic() swaped operands */
2982 vtop->type = type1;
2984 } else {
2985 /* floats can only be used for a few operations */
2986 if (is_float(combtype.t)
2987 && op != '+' && op != '-' && op != '*' && op != '/'
2988 && !TOK_ISCOND(op))
2989 tcc_error("invalid operands for binary operation");
2990 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2991 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2992 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2993 t |= VT_UNSIGNED;
2994 t |= (VT_LONG & t1);
2995 combtype.t = t;
2997 std_op:
2998 t = t2 = combtype.t;
2999 /* XXX: currently, some unsigned operations are explicit, so
3000 we modify them here */
3001 if (t & VT_UNSIGNED) {
3002 if (op == TOK_SAR)
3003 op = TOK_SHR;
3004 else if (op == '/')
3005 op = TOK_UDIV;
3006 else if (op == '%')
3007 op = TOK_UMOD;
3008 else if (op == TOK_LT)
3009 op = TOK_ULT;
3010 else if (op == TOK_GT)
3011 op = TOK_UGT;
3012 else if (op == TOK_LE)
3013 op = TOK_ULE;
3014 else if (op == TOK_GE)
3015 op = TOK_UGE;
3017 vswap();
3018 gen_cast_s(t);
3019 vswap();
3020 /* special case for shifts and long long: we keep the shift as
3021 an integer */
3022 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3023 t2 = VT_INT;
3024 gen_cast_s(t2);
3025 if (is_float(t))
3026 gen_opif(op);
3027 else
3028 gen_opic(op);
3029 if (TOK_ISCOND(op)) {
3030 /* relational op: the result is an int */
3031 vtop->type.t = VT_INT;
3032 } else {
3033 vtop->type.t = t;
3036 // Make sure that we have converted to an rvalue:
3037 if (vtop->r & VT_LVAL)
3038 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3041 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3042 #define gen_cvt_itof1 gen_cvt_itof
3043 #else
3044 /* generic itof for unsigned long long case */
3045 static void gen_cvt_itof1(int t)
3047 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3048 (VT_LLONG | VT_UNSIGNED)) {
3050 if (t == VT_FLOAT)
3051 vpush_helper_func(TOK___floatundisf);
3052 #if LDOUBLE_SIZE != 8
3053 else if (t == VT_LDOUBLE)
3054 vpush_helper_func(TOK___floatundixf);
3055 #endif
3056 else
3057 vpush_helper_func(TOK___floatundidf);
3058 vrott(2);
3059 gfunc_call(1);
3060 vpushi(0);
3061 PUT_R_RET(vtop, t);
3062 } else {
3063 gen_cvt_itof(t);
3066 #endif
3068 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3069 #define gen_cvt_ftoi1 gen_cvt_ftoi
3070 #else
3071 /* generic ftoi for unsigned long long case */
3072 static void gen_cvt_ftoi1(int t)
3074 int st;
3075 if (t == (VT_LLONG | VT_UNSIGNED)) {
3076 /* not handled natively */
3077 st = vtop->type.t & VT_BTYPE;
3078 if (st == VT_FLOAT)
3079 vpush_helper_func(TOK___fixunssfdi);
3080 #if LDOUBLE_SIZE != 8
3081 else if (st == VT_LDOUBLE)
3082 vpush_helper_func(TOK___fixunsxfdi);
3083 #endif
3084 else
3085 vpush_helper_func(TOK___fixunsdfdi);
3086 vrott(2);
3087 gfunc_call(1);
3088 vpushi(0);
3089 PUT_R_RET(vtop, t);
3090 } else {
3091 gen_cvt_ftoi(t);
3094 #endif
3096 /* special delayed cast for char/short */
3097 static void force_charshort_cast(void)
3099 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3100 int dbt = vtop->type.t;
3101 vtop->r &= ~VT_MUSTCAST;
3102 vtop->type.t = sbt;
3103 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3104 vtop->type.t = dbt;
3107 static void gen_cast_s(int t)
3109 CType type;
3110 type.t = t;
3111 type.ref = NULL;
3112 gen_cast(&type);
3115 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3116 static void gen_cast(CType *type)
3118 int sbt, dbt, sf, df, c;
3119 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3121 /* special delayed cast for char/short */
3122 if (vtop->r & VT_MUSTCAST)
3123 force_charshort_cast();
3125 /* bitfields first get cast to ints */
3126 if (vtop->type.t & VT_BITFIELD)
3127 gv(RC_INT);
3129 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3130 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3131 if (sbt == VT_FUNC)
3132 sbt = VT_PTR;
3134 again:
3135 if (sbt != dbt) {
3136 sf = is_float(sbt);
3137 df = is_float(dbt);
3138 dbt_bt = dbt & VT_BTYPE;
3139 sbt_bt = sbt & VT_BTYPE;
3140 if (dbt_bt == VT_VOID)
3141 goto done;
3142 if (sbt_bt == VT_VOID) {
3143 error:
3144 cast_error(&vtop->type, type);
3147 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3148 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3149 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3150 #endif
3151 if (c) {
3152 /* constant case: we can do it now */
3153 /* XXX: in ISOC, cannot do it if error in convert */
3154 if (sbt == VT_FLOAT)
3155 vtop->c.ld = vtop->c.f;
3156 else if (sbt == VT_DOUBLE)
3157 vtop->c.ld = vtop->c.d;
3159 if (df) {
3160 if (sbt_bt == VT_LLONG) {
3161 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3162 vtop->c.ld = vtop->c.i;
3163 else
3164 vtop->c.ld = -(long double)-vtop->c.i;
3165 } else if(!sf) {
3166 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3167 vtop->c.ld = (uint32_t)vtop->c.i;
3168 else
3169 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3172 if (dbt == VT_FLOAT)
3173 vtop->c.f = (float)vtop->c.ld;
3174 else if (dbt == VT_DOUBLE)
3175 vtop->c.d = (double)vtop->c.ld;
3176 } else if (sf && dbt == VT_BOOL) {
3177 vtop->c.i = (vtop->c.ld != 0);
3178 } else {
3179 if(sf)
3180 vtop->c.i = vtop->c.ld;
3181 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3183 else if (sbt & VT_UNSIGNED)
3184 vtop->c.i = (uint32_t)vtop->c.i;
3185 else
3186 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3188 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3190 else if (dbt == VT_BOOL)
3191 vtop->c.i = (vtop->c.i != 0);
3192 else {
3193 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3194 dbt_bt == VT_SHORT ? 0xffff :
3195 0xffffffff;
3196 vtop->c.i &= m;
3197 if (!(dbt & VT_UNSIGNED))
3198 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3201 goto done;
3203 } else if (dbt == VT_BOOL
3204 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3205 == (VT_CONST | VT_SYM)) {
3206 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3207 vtop->r = VT_CONST;
3208 vtop->c.i = 1;
3209 goto done;
3212 /* cannot generate code for global or static initializers */
3213 if (nocode_wanted & DATA_ONLY_WANTED)
3214 goto done;
3216 /* non constant case: generate code */
3217 if (dbt == VT_BOOL) {
3218 gen_test_zero(TOK_NE);
3219 goto done;
3222 if (sf || df) {
3223 if (sf && df) {
3224 /* convert from fp to fp */
3225 gen_cvt_ftof(dbt);
3226 } else if (df) {
3227 /* convert int to fp */
3228 gen_cvt_itof1(dbt);
3229 } else {
3230 /* convert fp to int */
3231 sbt = dbt;
3232 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3233 sbt = VT_INT;
3234 gen_cvt_ftoi1(sbt);
3235 goto again; /* may need char/short cast */
3237 goto done;
3240 ds = btype_size(dbt_bt);
3241 ss = btype_size(sbt_bt);
3242 if (ds == 0 || ss == 0)
3243 goto error;
3245 if (IS_ENUM(type->t) && type->ref->c < 0)
3246 tcc_error("cast to incomplete type");
3248 /* same size and no sign conversion needed */
3249 if (ds == ss && ds >= 4)
3250 goto done;
3251 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3252 tcc_warning("cast between pointer and integer of different size");
3253 if (sbt_bt == VT_PTR) {
3254 /* put integer type to allow logical operations below */
3255 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3259 /* processor allows { int a = 0, b = *(char*)&a; }
3260 That means that if we cast to less width, we can just
3261 change the type and read it still later. */
3262 #define ALLOW_SUBTYPE_ACCESS 1
3264 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3265 /* value still in memory */
3266 if (ds <= ss)
3267 goto done;
3268 /* ss <= 4 here */
3269 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3270 gv(RC_INT);
3271 goto done; /* no 64bit envolved */
3274 gv(RC_INT);
3276 trunc = 0;
3277 #if PTR_SIZE == 4
3278 if (ds == 8) {
3279 /* generate high word */
3280 if (sbt & VT_UNSIGNED) {
3281 vpushi(0);
3282 gv(RC_INT);
3283 } else {
3284 gv_dup();
3285 vpushi(31);
3286 gen_op(TOK_SAR);
3288 lbuild(dbt);
3289 } else if (ss == 8) {
3290 /* from long long: just take low order word */
3291 lexpand();
3292 vpop();
3294 ss = 4;
3296 #elif PTR_SIZE == 8
3297 if (ds == 8) {
3298 /* need to convert from 32bit to 64bit */
3299 if (sbt & VT_UNSIGNED) {
3300 #if defined(TCC_TARGET_RISCV64)
3301 /* RISC-V keeps 32bit vals in registers sign-extended.
3302 So here we need a zero-extension. */
3303 trunc = 32;
3304 #else
3305 goto done;
3306 #endif
3307 } else {
3308 gen_cvt_sxtw();
3309 goto done;
3311 ss = ds, ds = 4, dbt = sbt;
3312 } else if (ss == 8) {
3313 /* RISC-V keeps 32bit vals in registers sign-extended.
3314 So here we need a sign-extension for signed types and
3315 zero-extension. for unsigned types. */
3316 #if !defined(TCC_TARGET_RISCV64)
3317 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3318 #endif
3319 } else {
3320 ss = 4;
3322 #endif
3324 if (ds >= ss)
3325 goto done;
3326 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3327 if (ss == 4) {
3328 gen_cvt_csti(dbt);
3329 goto done;
3331 #endif
3332 bits = (ss - ds) * 8;
3333 /* for unsigned, gen_op will convert SAR to SHR */
3334 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3335 vpushi(bits);
3336 gen_op(TOK_SHL);
3337 vpushi(bits - trunc);
3338 gen_op(TOK_SAR);
3339 vpushi(trunc);
3340 gen_op(TOK_SHR);
3342 done:
3343 vtop->type = *type;
3344 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3347 /* return type size as known at compile time. Put alignment at 'a' */
3348 ST_FUNC int type_size(CType *type, int *a)
3350 Sym *s;
3351 int bt;
3353 bt = type->t & VT_BTYPE;
3354 if (bt == VT_STRUCT) {
3355 /* struct/union */
3356 s = type->ref;
3357 *a = s->r;
3358 return s->c;
3359 } else if (bt == VT_PTR) {
3360 if (type->t & VT_ARRAY) {
3361 int ts;
3363 s = type->ref;
3364 ts = type_size(&s->type, a);
3366 if (ts < 0 && s->c < 0)
3367 ts = -ts;
3369 return ts * s->c;
3370 } else {
3371 *a = PTR_SIZE;
3372 return PTR_SIZE;
3374 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3375 *a = 0;
3376 return -1; /* incomplete enum */
3377 } else if (bt == VT_LDOUBLE) {
3378 *a = LDOUBLE_ALIGN;
3379 return LDOUBLE_SIZE;
3380 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3381 #ifdef TCC_TARGET_I386
3382 #ifdef TCC_TARGET_PE
3383 *a = 8;
3384 #else
3385 *a = 4;
3386 #endif
3387 #elif defined(TCC_TARGET_ARM)
3388 #ifdef TCC_ARM_EABI
3389 *a = 8;
3390 #else
3391 *a = 4;
3392 #endif
3393 #else
3394 *a = 8;
3395 #endif
3396 return 8;
3397 } else if (bt == VT_INT || bt == VT_FLOAT) {
3398 *a = 4;
3399 return 4;
3400 } else if (bt == VT_SHORT) {
3401 *a = 2;
3402 return 2;
3403 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3404 *a = 8;
3405 return 16;
3406 } else {
3407 /* char, void, function, _Bool */
3408 *a = 1;
3409 return 1;
3413 /* push type size as known at runtime time on top of value stack. Put
3414 alignment at 'a' */
3415 static void vpush_type_size(CType *type, int *a)
3417 if (type->t & VT_VLA) {
3418 type_size(&type->ref->type, a);
3419 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3420 } else {
3421 int size = type_size(type, a);
3422 if (size < 0)
3423 tcc_error("unknown type size");
3424 #if PTR_SIZE == 8
3425 vpushll(size);
3426 #else
3427 vpushi(size);
3428 #endif
3432 /* return the pointed type of t */
3433 static inline CType *pointed_type(CType *type)
3435 return &type->ref->type;
3438 /* modify type so that its it is a pointer to type. */
3439 ST_FUNC void mk_pointer(CType *type)
3441 Sym *s;
3442 s = sym_push(SYM_FIELD, type, 0, -1);
3443 type->t = VT_PTR | (type->t & VT_STORAGE);
3444 type->ref = s;
3447 /* return true if type1 and type2 are exactly the same (including
3448 qualifiers).
3450 static int is_compatible_types(CType *type1, CType *type2)
3452 return compare_types(type1,type2,0);
3455 /* return true if type1 and type2 are the same (ignoring qualifiers).
3457 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3459 return compare_types(type1,type2,1);
3462 static void cast_error(CType *st, CType *dt)
3464 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3467 /* verify type compatibility to store vtop in 'dt' type */
3468 static void verify_assign_cast(CType *dt)
3470 CType *st, *type1, *type2;
3471 int dbt, sbt, qualwarn, lvl;
3473 st = &vtop->type; /* source type */
3474 dbt = dt->t & VT_BTYPE;
3475 sbt = st->t & VT_BTYPE;
3476 if (dt->t & VT_CONSTANT)
3477 tcc_warning("assignment of read-only location");
3478 switch(dbt) {
3479 case VT_VOID:
3480 if (sbt != dbt)
3481 tcc_error("assignment to void expression");
3482 break;
3483 case VT_PTR:
3484 /* special cases for pointers */
3485 /* '0' can also be a pointer */
3486 if (is_null_pointer(vtop))
3487 break;
3488 /* accept implicit pointer to integer cast with warning */
3489 if (is_integer_btype(sbt)) {
3490 tcc_warning("assignment makes pointer from integer without a cast");
3491 break;
3493 type1 = pointed_type(dt);
3494 if (sbt == VT_PTR)
3495 type2 = pointed_type(st);
3496 else if (sbt == VT_FUNC)
3497 type2 = st; /* a function is implicitly a function pointer */
3498 else
3499 goto error;
3500 if (is_compatible_types(type1, type2))
3501 break;
3502 for (qualwarn = lvl = 0;; ++lvl) {
3503 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3504 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3505 qualwarn = 1;
3506 dbt = type1->t & (VT_BTYPE|VT_LONG);
3507 sbt = type2->t & (VT_BTYPE|VT_LONG);
3508 if (dbt != VT_PTR || sbt != VT_PTR)
3509 break;
3510 type1 = pointed_type(type1);
3511 type2 = pointed_type(type2);
3513 if (!is_compatible_unqualified_types(type1, type2)) {
3514 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3515 /* void * can match anything */
3516 } else if (dbt == sbt
3517 && is_integer_btype(sbt & VT_BTYPE)
3518 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3519 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3520 /* Like GCC don't warn by default for merely changes
3521 in pointer target signedness. Do warn for different
3522 base types, though, in particular for unsigned enums
3523 and signed int targets. */
3524 } else {
3525 tcc_warning("assignment from incompatible pointer type");
3526 break;
3529 if (qualwarn)
3530 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3531 break;
3532 case VT_BYTE:
3533 case VT_SHORT:
3534 case VT_INT:
3535 case VT_LLONG:
3536 if (sbt == VT_PTR || sbt == VT_FUNC) {
3537 tcc_warning("assignment makes integer from pointer without a cast");
3538 } else if (sbt == VT_STRUCT) {
3539 goto case_VT_STRUCT;
3541 /* XXX: more tests */
3542 break;
3543 case VT_STRUCT:
3544 case_VT_STRUCT:
3545 if (!is_compatible_unqualified_types(dt, st)) {
3546 error:
3547 cast_error(st, dt);
3549 break;
3553 static void gen_assign_cast(CType *dt)
3555 verify_assign_cast(dt);
3556 gen_cast(dt);
3559 /* store vtop in lvalue pushed on stack */
3560 ST_FUNC void vstore(void)
3562 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3564 ft = vtop[-1].type.t;
3565 sbt = vtop->type.t & VT_BTYPE;
3566 dbt = ft & VT_BTYPE;
3567 verify_assign_cast(&vtop[-1].type);
3569 if (sbt == VT_STRUCT) {
3570 /* if structure, only generate pointer */
3571 /* structure assignment : generate memcpy */
3572 size = type_size(&vtop->type, &align);
3573 /* destination, keep on stack() as result */
3574 vpushv(vtop - 1);
3575 #ifdef CONFIG_TCC_BCHECK
3576 if (vtop->r & VT_MUSTBOUND)
3577 gbound(); /* check would be wrong after gaddrof() */
3578 #endif
3579 vtop->type.t = VT_PTR;
3580 gaddrof();
3581 /* source */
3582 vswap();
3583 #ifdef CONFIG_TCC_BCHECK
3584 if (vtop->r & VT_MUSTBOUND)
3585 gbound();
3586 #endif
3587 vtop->type.t = VT_PTR;
3588 gaddrof();
3590 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3591 if (1
3592 #ifdef CONFIG_TCC_BCHECK
3593 && !tcc_state->do_bounds_check
3594 #endif
3596 gen_struct_copy(size);
3597 } else
3598 #endif
3600 /* type size */
3601 vpushi(size);
3602 /* Use memmove, rather than memcpy, as dest and src may be same: */
3603 #ifdef TCC_ARM_EABI
3604 if(!(align & 7))
3605 vpush_helper_func(TOK_memmove8);
3606 else if(!(align & 3))
3607 vpush_helper_func(TOK_memmove4);
3608 else
3609 #endif
3610 vpush_helper_func(TOK_memmove);
3611 vrott(4);
3612 gfunc_call(3);
3615 } else if (ft & VT_BITFIELD) {
3616 /* bitfield store handling */
3618 /* save lvalue as expression result (example: s.b = s.a = n;) */
3619 vdup(), vtop[-1] = vtop[-2];
3621 bit_pos = BIT_POS(ft);
3622 bit_size = BIT_SIZE(ft);
3623 /* remove bit field info to avoid loops */
3624 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3626 if (dbt == VT_BOOL) {
3627 gen_cast(&vtop[-1].type);
3628 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3630 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3631 if (dbt != VT_BOOL) {
3632 gen_cast(&vtop[-1].type);
3633 dbt = vtop[-1].type.t & VT_BTYPE;
3635 if (r == VT_STRUCT) {
3636 store_packed_bf(bit_pos, bit_size);
3637 } else {
3638 unsigned long long mask = (1ULL << bit_size) - 1;
3639 if (dbt != VT_BOOL) {
3640 /* mask source */
3641 if (dbt == VT_LLONG)
3642 vpushll(mask);
3643 else
3644 vpushi((unsigned)mask);
3645 gen_op('&');
3647 /* shift source */
3648 vpushi(bit_pos);
3649 gen_op(TOK_SHL);
3650 vswap();
3651 /* duplicate destination */
3652 vdup();
3653 vrott(3);
3654 /* load destination, mask and or with source */
3655 if (dbt == VT_LLONG)
3656 vpushll(~(mask << bit_pos));
3657 else
3658 vpushi(~((unsigned)mask << bit_pos));
3659 gen_op('&');
3660 gen_op('|');
3661 /* store result */
3662 vstore();
3663 /* ... and discard */
3664 vpop();
3666 } else if (dbt == VT_VOID) {
3667 --vtop;
3668 } else {
3669 /* optimize char/short casts */
3670 delayed_cast = 0;
3671 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3672 && is_integer_btype(sbt)
3674 if ((vtop->r & VT_MUSTCAST)
3675 && btype_size(dbt) > btype_size(sbt)
3677 force_charshort_cast();
3678 delayed_cast = 1;
3679 } else {
3680 gen_cast(&vtop[-1].type);
3683 #ifdef CONFIG_TCC_BCHECK
3684 /* bound check case */
3685 if (vtop[-1].r & VT_MUSTBOUND) {
3686 vswap();
3687 gbound();
3688 vswap();
3690 #endif
3691 gv(RC_TYPE(dbt)); /* generate value */
3693 if (delayed_cast) {
3694 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3695 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3696 vtop->type.t = ft & VT_TYPE;
3699 /* if lvalue was saved on stack, must read it */
3700 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3701 SValue sv;
3702 r = get_reg(RC_INT);
3703 sv.type.t = VT_PTRDIFF_T;
3704 sv.r = VT_LOCAL | VT_LVAL;
3705 sv.c.i = vtop[-1].c.i;
3706 load(r, &sv);
3707 vtop[-1].r = r | VT_LVAL;
3710 r = vtop->r & VT_VALMASK;
3711 /* two word case handling :
3712 store second register at word + 4 (or +8 for x86-64) */
3713 if (USING_TWO_WORDS(dbt)) {
3714 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3715 vtop[-1].type.t = load_type;
3716 store(r, vtop - 1);
3717 vswap();
3718 /* convert to int to increment easily */
3719 vtop->type.t = VT_PTRDIFF_T;
3720 gaddrof();
3721 vpushs(PTR_SIZE);
3722 gen_op('+');
3723 vtop->r |= VT_LVAL;
3724 vswap();
3725 vtop[-1].type.t = load_type;
3726 /* XXX: it works because r2 is spilled last ! */
3727 store(vtop->r2, vtop - 1);
3728 } else {
3729 /* single word */
3730 store(r, vtop - 1);
3732 vswap();
3733 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3737 /* post defines POST/PRE add. c is the token ++ or -- */
3738 ST_FUNC void inc(int post, int c)
3740 test_lvalue();
3741 vdup(); /* save lvalue */
3742 if (post) {
3743 gv_dup(); /* duplicate value */
3744 vrotb(3);
3745 vrotb(3);
3747 /* add constant */
3748 vpushi(c - TOK_MID);
3749 gen_op('+');
3750 vstore(); /* store value */
3751 if (post)
3752 vpop(); /* if post op, return saved value */
3755 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3757 /* read the string */
3758 if (tok != TOK_STR)
3759 expect(msg);
3760 cstr_new(astr);
3761 while (tok == TOK_STR) {
3762 /* XXX: add \0 handling too ? */
3763 cstr_cat(astr, tokc.str.data, -1);
3764 next();
3766 cstr_ccat(astr, '\0');
3769 /* If I is >= 1 and a power of two, returns log2(i)+1.
3770 If I is 0 returns 0. */
3771 ST_FUNC int exact_log2p1(int i)
3773 int ret;
3774 if (!i)
3775 return 0;
3776 for (ret = 1; i >= 1 << 8; ret += 8)
3777 i >>= 8;
3778 if (i >= 1 << 4)
3779 ret += 4, i >>= 4;
3780 if (i >= 1 << 2)
3781 ret += 2, i >>= 2;
3782 if (i >= 1 << 1)
3783 ret++;
3784 return ret;
3787 /* Parse __attribute__((...)) GNUC extension. */
3788 static void parse_attribute(AttributeDef *ad)
3790 int t, n;
3791 CString astr;
3793 redo:
3794 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3795 return;
3796 next();
3797 skip('(');
3798 skip('(');
3799 while (tok != ')') {
3800 if (tok < TOK_IDENT)
3801 expect("attribute name");
3802 t = tok;
3803 next();
3804 switch(t) {
3805 case TOK_CLEANUP1:
3806 case TOK_CLEANUP2:
3808 Sym *s;
3810 skip('(');
3811 s = sym_find(tok);
3812 if (!s) {
3813 tcc_warning_c(warn_implicit_function_declaration)(
3814 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3815 s = external_global_sym(tok, &func_old_type);
3816 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3817 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3818 ad->cleanup_func = s;
3819 next();
3820 skip(')');
3821 break;
3823 case TOK_CONSTRUCTOR1:
3824 case TOK_CONSTRUCTOR2:
3825 ad->f.func_ctor = 1;
3826 break;
3827 case TOK_DESTRUCTOR1:
3828 case TOK_DESTRUCTOR2:
3829 ad->f.func_dtor = 1;
3830 break;
3831 case TOK_ALWAYS_INLINE1:
3832 case TOK_ALWAYS_INLINE2:
3833 ad->f.func_alwinl = 1;
3834 break;
3835 case TOK_SECTION1:
3836 case TOK_SECTION2:
3837 skip('(');
3838 parse_mult_str(&astr, "section name");
3839 ad->section = find_section(tcc_state, (char *)astr.data);
3840 skip(')');
3841 cstr_free(&astr);
3842 break;
3843 case TOK_ALIAS1:
3844 case TOK_ALIAS2:
3845 skip('(');
3846 parse_mult_str(&astr, "alias(\"target\")");
3847 ad->alias_target = /* save string as token, for later */
3848 tok_alloc((char*)astr.data, astr.size-1)->tok;
3849 skip(')');
3850 cstr_free(&astr);
3851 break;
3852 case TOK_VISIBILITY1:
3853 case TOK_VISIBILITY2:
3854 skip('(');
3855 parse_mult_str(&astr,
3856 "visibility(\"default|hidden|internal|protected\")");
3857 if (!strcmp (astr.data, "default"))
3858 ad->a.visibility = STV_DEFAULT;
3859 else if (!strcmp (astr.data, "hidden"))
3860 ad->a.visibility = STV_HIDDEN;
3861 else if (!strcmp (astr.data, "internal"))
3862 ad->a.visibility = STV_INTERNAL;
3863 else if (!strcmp (astr.data, "protected"))
3864 ad->a.visibility = STV_PROTECTED;
3865 else
3866 expect("visibility(\"default|hidden|internal|protected\")");
3867 skip(')');
3868 cstr_free(&astr);
3869 break;
3870 case TOK_ALIGNED1:
3871 case TOK_ALIGNED2:
3872 if (tok == '(') {
3873 next();
3874 n = expr_const();
3875 if (n <= 0 || (n & (n - 1)) != 0)
3876 tcc_error("alignment must be a positive power of two");
3877 skip(')');
3878 } else {
3879 n = MAX_ALIGN;
3881 ad->a.aligned = exact_log2p1(n);
3882 if (n != 1 << (ad->a.aligned - 1))
3883 tcc_error("alignment of %d is larger than implemented", n);
3884 break;
3885 case TOK_PACKED1:
3886 case TOK_PACKED2:
3887 ad->a.packed = 1;
3888 break;
3889 case TOK_WEAK1:
3890 case TOK_WEAK2:
3891 ad->a.weak = 1;
3892 break;
3893 case TOK_NODEBUG1:
3894 case TOK_NODEBUG2:
3895 ad->a.nodebug = 1;
3896 break;
3897 case TOK_UNUSED1:
3898 case TOK_UNUSED2:
3899 /* currently, no need to handle it because tcc does not
3900 track unused objects */
3901 break;
3902 case TOK_NORETURN1:
3903 case TOK_NORETURN2:
3904 ad->f.func_noreturn = 1;
3905 break;
3906 case TOK_CDECL1:
3907 case TOK_CDECL2:
3908 case TOK_CDECL3:
3909 ad->f.func_call = FUNC_CDECL;
3910 break;
3911 case TOK_STDCALL1:
3912 case TOK_STDCALL2:
3913 case TOK_STDCALL3:
3914 ad->f.func_call = FUNC_STDCALL;
3915 break;
3916 #ifdef TCC_TARGET_I386
3917 case TOK_REGPARM1:
3918 case TOK_REGPARM2:
3919 skip('(');
3920 n = expr_const();
3921 if (n > 3)
3922 n = 3;
3923 else if (n < 0)
3924 n = 0;
3925 if (n > 0)
3926 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3927 skip(')');
3928 break;
3929 case TOK_FASTCALL1:
3930 case TOK_FASTCALL2:
3931 case TOK_FASTCALL3:
3932 ad->f.func_call = FUNC_FASTCALLW;
3933 break;
3934 #endif
3935 case TOK_MODE:
3936 skip('(');
3937 switch(tok) {
3938 case TOK_MODE_DI:
3939 ad->attr_mode = VT_LLONG + 1;
3940 break;
3941 case TOK_MODE_QI:
3942 ad->attr_mode = VT_BYTE + 1;
3943 break;
3944 case TOK_MODE_HI:
3945 ad->attr_mode = VT_SHORT + 1;
3946 break;
3947 case TOK_MODE_SI:
3948 case TOK_MODE_word:
3949 ad->attr_mode = VT_INT + 1;
3950 break;
3951 default:
3952 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3953 break;
3955 next();
3956 skip(')');
3957 break;
3958 case TOK_DLLEXPORT:
3959 ad->a.dllexport = 1;
3960 break;
3961 case TOK_NODECORATE:
3962 ad->a.nodecorate = 1;
3963 break;
3964 case TOK_DLLIMPORT:
3965 ad->a.dllimport = 1;
3966 break;
3967 default:
3968 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3969 /* skip parameters */
3970 if (tok == '(') {
3971 int parenthesis = 0;
3972 do {
3973 if (tok == '(')
3974 parenthesis++;
3975 else if (tok == ')')
3976 parenthesis--;
3977 next();
3978 } while (parenthesis && tok != -1);
3980 break;
3982 if (tok != ',')
3983 break;
3984 next();
3986 skip(')');
3987 skip(')');
3988 goto redo;
3991 static Sym * find_field (CType *type, int v, int *cumofs)
3993 Sym *s = type->ref;
3994 int v1 = v | SYM_FIELD;
3996 while ((s = s->next) != NULL) {
3997 if (s->v == v1) {
3998 *cumofs += s->c;
3999 return s;
4001 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4002 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4003 /* try to find field in anonymous sub-struct/union */
4004 Sym *ret = find_field (&s->type, v1, cumofs);
4005 if (ret) {
4006 *cumofs += s->c;
4007 return ret;
4012 if (!(v & SYM_FIELD)) { /* top-level call */
4013 s = type->ref;
4014 if (s->c < 0)
4015 tcc_error("dereferencing incomplete type '%s'",
4016 get_tok_str(s->v & ~SYM_STRUCT, 0));
4017 else
4018 tcc_error("field not found: %s",
4019 get_tok_str(v, &tokc));
4021 return NULL;
4024 static void check_fields (CType *type, int check)
4026 Sym *s = type->ref;
4028 while ((s = s->next) != NULL) {
4029 int v = s->v & ~SYM_FIELD;
4030 if (v < SYM_FIRST_ANOM) {
4031 TokenSym *ts = table_ident[v - TOK_IDENT];
4032 if (check && (ts->tok & SYM_FIELD))
4033 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4034 ts->tok ^= SYM_FIELD;
4035 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4036 check_fields (&s->type, check);
4040 static void struct_layout(CType *type, AttributeDef *ad)
4042 int size, align, maxalign, offset, c, bit_pos, bit_size;
4043 int packed, a, bt, prevbt, prev_bit_size;
4044 int pcc = !tcc_state->ms_bitfields;
4045 int pragma_pack = *tcc_state->pack_stack_ptr;
4046 Sym *f;
4048 maxalign = 1;
4049 offset = 0;
4050 c = 0;
4051 bit_pos = 0;
4052 prevbt = VT_STRUCT; /* make it never match */
4053 prev_bit_size = 0;
4055 //#define BF_DEBUG
4057 for (f = type->ref->next; f; f = f->next) {
4058 if (f->type.t & VT_BITFIELD)
4059 bit_size = BIT_SIZE(f->type.t);
4060 else
4061 bit_size = -1;
4062 size = type_size(&f->type, &align);
4063 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4064 packed = 0;
4066 if (pcc && bit_size == 0) {
4067 /* in pcc mode, packing does not affect zero-width bitfields */
4069 } else {
4070 /* in pcc mode, attribute packed overrides if set. */
4071 if (pcc && (f->a.packed || ad->a.packed))
4072 align = packed = 1;
4074 /* pragma pack overrides align if lesser and packs bitfields always */
4075 if (pragma_pack) {
4076 packed = 1;
4077 if (pragma_pack < align)
4078 align = pragma_pack;
4079 /* in pcc mode pragma pack also overrides individual align */
4080 if (pcc && pragma_pack < a)
4081 a = 0;
4084 /* some individual align was specified */
4085 if (a)
4086 align = a;
4088 if (type->ref->type.t == VT_UNION) {
4089 if (pcc && bit_size >= 0)
4090 size = (bit_size + 7) >> 3;
4091 offset = 0;
4092 if (size > c)
4093 c = size;
4095 } else if (bit_size < 0) {
4096 if (pcc)
4097 c += (bit_pos + 7) >> 3;
4098 c = (c + align - 1) & -align;
4099 offset = c;
4100 if (size > 0)
4101 c += size;
4102 bit_pos = 0;
4103 prevbt = VT_STRUCT;
4104 prev_bit_size = 0;
4106 } else {
4107 /* A bit-field. Layout is more complicated. There are two
4108 options: PCC (GCC) compatible and MS compatible */
4109 if (pcc) {
4110 /* In PCC layout a bit-field is placed adjacent to the
4111 preceding bit-fields, except if:
4112 - it has zero-width
4113 - an individual alignment was given
4114 - it would overflow its base type container and
4115 there is no packing */
4116 if (bit_size == 0) {
4117 new_field:
4118 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4119 bit_pos = 0;
4120 } else if (f->a.aligned) {
4121 goto new_field;
4122 } else if (!packed) {
4123 int a8 = align * 8;
4124 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4125 if (ofs > size / align)
4126 goto new_field;
4129 /* in pcc mode, long long bitfields have type int if they fit */
4130 if (size == 8 && bit_size <= 32)
4131 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4133 while (bit_pos >= align * 8)
4134 c += align, bit_pos -= align * 8;
4135 offset = c;
4137 /* In PCC layout named bit-fields influence the alignment
4138 of the containing struct using the base types alignment,
4139 except for packed fields (which here have correct align). */
4140 if (f->v & SYM_FIRST_ANOM
4141 // && bit_size // ??? gcc on ARM/rpi does that
4143 align = 1;
4145 } else {
4146 bt = f->type.t & VT_BTYPE;
4147 if ((bit_pos + bit_size > size * 8)
4148 || (bit_size > 0) == (bt != prevbt)
4150 c = (c + align - 1) & -align;
4151 offset = c;
4152 bit_pos = 0;
4153 /* In MS bitfield mode a bit-field run always uses
4154 at least as many bits as the underlying type.
4155 To start a new run it's also required that this
4156 or the last bit-field had non-zero width. */
4157 if (bit_size || prev_bit_size)
4158 c += size;
4160 /* In MS layout the records alignment is normally
4161 influenced by the field, except for a zero-width
4162 field at the start of a run (but by further zero-width
4163 fields it is again). */
4164 if (bit_size == 0 && prevbt != bt)
4165 align = 1;
4166 prevbt = bt;
4167 prev_bit_size = bit_size;
4170 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4171 | (bit_pos << VT_STRUCT_SHIFT);
4172 bit_pos += bit_size;
4174 if (align > maxalign)
4175 maxalign = align;
4177 #ifdef BF_DEBUG
4178 printf("set field %s offset %-2d size %-2d align %-2d",
4179 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4180 if (f->type.t & VT_BITFIELD) {
4181 printf(" pos %-2d bits %-2d",
4182 BIT_POS(f->type.t),
4183 BIT_SIZE(f->type.t)
4186 printf("\n");
4187 #endif
4189 f->c = offset;
4190 f->r = 0;
4193 if (pcc)
4194 c += (bit_pos + 7) >> 3;
4196 /* store size and alignment */
4197 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4198 if (a < maxalign)
4199 a = maxalign;
4200 type->ref->r = a;
4201 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4202 /* can happen if individual align for some member was given. In
4203 this case MSVC ignores maxalign when aligning the size */
4204 a = pragma_pack;
4205 if (a < bt)
4206 a = bt;
4208 c = (c + a - 1) & -a;
4209 type->ref->c = c;
4211 #ifdef BF_DEBUG
4212 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4213 #endif
4215 /* check whether we can access bitfields by their type */
4216 for (f = type->ref->next; f; f = f->next) {
4217 int s, px, cx, c0;
4218 CType t;
4220 if (0 == (f->type.t & VT_BITFIELD))
4221 continue;
4222 f->type.ref = f;
4223 f->auxtype = -1;
4224 bit_size = BIT_SIZE(f->type.t);
4225 if (bit_size == 0)
4226 continue;
4227 bit_pos = BIT_POS(f->type.t);
4228 size = type_size(&f->type, &align);
4230 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4231 #ifdef TCC_TARGET_ARM
4232 && !(f->c & (align - 1))
4233 #endif
4235 continue;
4237 /* try to access the field using a different type */
4238 c0 = -1, s = align = 1;
4239 t.t = VT_BYTE;
4240 for (;;) {
4241 px = f->c * 8 + bit_pos;
4242 cx = (px >> 3) & -align;
4243 px = px - (cx << 3);
4244 if (c0 == cx)
4245 break;
4246 s = (px + bit_size + 7) >> 3;
4247 if (s > 4) {
4248 t.t = VT_LLONG;
4249 } else if (s > 2) {
4250 t.t = VT_INT;
4251 } else if (s > 1) {
4252 t.t = VT_SHORT;
4253 } else {
4254 t.t = VT_BYTE;
4256 s = type_size(&t, &align);
4257 c0 = cx;
4260 if (px + bit_size <= s * 8 && cx + s <= c
4261 #ifdef TCC_TARGET_ARM
4262 && !(cx & (align - 1))
4263 #endif
4265 /* update offset and bit position */
4266 f->c = cx;
4267 bit_pos = px;
4268 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4269 | (bit_pos << VT_STRUCT_SHIFT);
4270 if (s != size)
4271 f->auxtype = t.t;
4272 #ifdef BF_DEBUG
4273 printf("FIX field %s offset %-2d size %-2d align %-2d "
4274 "pos %-2d bits %-2d\n",
4275 get_tok_str(f->v & ~SYM_FIELD, NULL),
4276 cx, s, align, px, bit_size);
4277 #endif
4278 } else {
4279 /* fall back to load/store single-byte wise */
4280 f->auxtype = VT_STRUCT;
4281 #ifdef BF_DEBUG
4282 printf("FIX field %s : load byte-wise\n",
4283 get_tok_str(f->v & ~SYM_FIELD, NULL));
4284 #endif
4289 static void do_Static_assert(void);
4291 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4292 static void struct_decl(CType *type, int u)
4294 int v, c, size, align, flexible;
4295 int bit_size, bsize, bt;
4296 Sym *s, *ss, **ps;
4297 AttributeDef ad, ad1;
4298 CType type1, btype;
4300 memset(&ad, 0, sizeof ad);
4301 next();
4302 parse_attribute(&ad);
4303 if (tok != '{') {
4304 v = tok;
4305 next();
4306 /* struct already defined ? return it */
4307 if (v < TOK_IDENT)
4308 expect("struct/union/enum name");
4309 s = struct_find(v);
4310 if (s && (s->sym_scope == local_scope || tok != '{')) {
4311 if (u == s->type.t)
4312 goto do_decl;
4313 if (u == VT_ENUM && IS_ENUM(s->type.t))
4314 goto do_decl;
4315 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4317 } else {
4318 v = anon_sym++;
4320 /* Record the original enum/struct/union token. */
4321 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4322 type1.ref = NULL;
4323 /* we put an undefined size for struct/union */
4324 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4325 s->r = 0; /* default alignment is zero as gcc */
4326 do_decl:
4327 type->t = s->type.t;
4328 type->ref = s;
4330 if (tok == '{') {
4331 next();
4332 if (s->c != -1)
4333 tcc_error("struct/union/enum already defined");
4334 s->c = -2;
4335 /* cannot be empty */
4336 /* non empty enums are not allowed */
4337 ps = &s->next;
4338 if (u == VT_ENUM) {
4339 long long ll = 0, pl = 0, nl = 0;
4340 CType t;
4341 t.ref = s;
4342 /* enum symbols have static storage */
4343 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4344 for(;;) {
4345 v = tok;
4346 if (v < TOK_UIDENT)
4347 expect("identifier");
4348 ss = sym_find(v);
4349 if (ss && !local_stack)
4350 tcc_error("redefinition of enumerator '%s'",
4351 get_tok_str(v, NULL));
4352 next();
4353 if (tok == '=') {
4354 next();
4355 ll = expr_const64();
4357 ss = sym_push(v, &t, VT_CONST, 0);
4358 ss->enum_val = ll;
4359 *ps = ss, ps = &ss->next;
4360 if (ll < nl)
4361 nl = ll;
4362 if (ll > pl)
4363 pl = ll;
4364 if (tok != ',')
4365 break;
4366 next();
4367 ll++;
4368 /* NOTE: we accept a trailing comma */
4369 if (tok == '}')
4370 break;
4372 skip('}');
4373 /* set integral type of the enum */
4374 t.t = VT_INT;
4375 if (nl >= 0) {
4376 if (pl != (unsigned)pl)
4377 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4378 t.t |= VT_UNSIGNED;
4379 } else if (pl != (int)pl || nl != (int)nl)
4380 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4381 s->type.t = type->t = t.t | VT_ENUM;
4382 s->c = 0;
4383 /* set type for enum members */
4384 for (ss = s->next; ss; ss = ss->next) {
4385 ll = ss->enum_val;
4386 if (ll == (int)ll) /* default is int if it fits */
4387 continue;
4388 if (t.t & VT_UNSIGNED) {
4389 ss->type.t |= VT_UNSIGNED;
4390 if (ll == (unsigned)ll)
4391 continue;
4393 ss->type.t = (ss->type.t & ~VT_BTYPE)
4394 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4396 } else {
4397 c = 0;
4398 flexible = 0;
4399 while (tok != '}') {
4400 if (tok == TOK_STATIC_ASSERT) {
4401 do_Static_assert();
4402 continue;
4404 if (!parse_btype(&btype, &ad1, 0)) {
4405 skip(';');
4406 continue;
4408 while (1) {
4409 if (flexible)
4410 tcc_error("flexible array member '%s' not at the end of struct",
4411 get_tok_str(v, NULL));
4412 bit_size = -1;
4413 v = 0;
4414 type1 = btype;
4415 if (tok != ':') {
4416 if (tok != ';')
4417 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4418 if (v == 0) {
4419 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4420 expect("identifier");
4421 else {
4422 int v = btype.ref->v;
4423 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4424 if (tcc_state->ms_extensions == 0)
4425 expect("identifier");
4429 if (type_size(&type1, &align) < 0) {
4430 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4431 flexible = 1;
4432 else
4433 tcc_error("field '%s' has incomplete type",
4434 get_tok_str(v, NULL));
4436 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4437 (type1.t & VT_BTYPE) == VT_VOID ||
4438 (type1.t & VT_STORAGE))
4439 tcc_error("invalid type for '%s'",
4440 get_tok_str(v, NULL));
4442 if (tok == ':') {
4443 next();
4444 bit_size = expr_const();
4445 /* XXX: handle v = 0 case for messages */
4446 if (bit_size < 0)
4447 tcc_error("negative width in bit-field '%s'",
4448 get_tok_str(v, NULL));
4449 if (v && bit_size == 0)
4450 tcc_error("zero width for bit-field '%s'",
4451 get_tok_str(v, NULL));
4452 parse_attribute(&ad1);
4454 size = type_size(&type1, &align);
4455 if (bit_size >= 0) {
4456 bt = type1.t & VT_BTYPE;
4457 if (bt != VT_INT &&
4458 bt != VT_BYTE &&
4459 bt != VT_SHORT &&
4460 bt != VT_BOOL &&
4461 bt != VT_LLONG)
4462 tcc_error("bitfields must have scalar type");
4463 bsize = size * 8;
4464 if (bit_size > bsize) {
4465 tcc_error("width of '%s' exceeds its type",
4466 get_tok_str(v, NULL));
4467 } else if (bit_size == bsize
4468 && !ad.a.packed && !ad1.a.packed) {
4469 /* no need for bit fields */
4471 } else if (bit_size == 64) {
4472 tcc_error("field width 64 not implemented");
4473 } else {
4474 type1.t = (type1.t & ~VT_STRUCT_MASK)
4475 | VT_BITFIELD
4476 | (bit_size << (VT_STRUCT_SHIFT + 6));
4479 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4480 /* Remember we've seen a real field to check
4481 for placement of flexible array member. */
4482 c = 1;
4484 /* If member is a struct or bit-field, enforce
4485 placing into the struct (as anonymous). */
4486 if (v == 0 &&
4487 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4488 bit_size >= 0)) {
4489 v = anon_sym++;
4491 if (v) {
4492 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4493 ss->a = ad1.a;
4494 *ps = ss;
4495 ps = &ss->next;
4497 if (tok == ';' || tok == TOK_EOF)
4498 break;
4499 skip(',');
4501 skip(';');
4503 skip('}');
4504 parse_attribute(&ad);
4505 if (ad.cleanup_func) {
4506 tcc_warning("attribute '__cleanup__' ignored on type");
4508 check_fields(type, 1);
4509 check_fields(type, 0);
4510 struct_layout(type, &ad);
4511 if (debug_modes)
4512 tcc_debug_fix_anon(tcc_state, type);
4517 static void sym_to_attr(AttributeDef *ad, Sym *s)
4519 merge_symattr(&ad->a, &s->a);
4520 merge_funcattr(&ad->f, &s->f);
4523 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4524 are added to the element type, copied because it could be a typedef. */
4525 static void parse_btype_qualify(CType *type, int qualifiers)
4527 while (type->t & VT_ARRAY) {
4528 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4529 type = &type->ref->type;
4531 type->t |= qualifiers;
4534 /* return 0 if no type declaration. otherwise, return the basic type
4535 and skip it.
4537 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4539 int t, u, bt, st, type_found, typespec_found, g, n;
4540 Sym *s;
4541 CType type1;
4543 memset(ad, 0, sizeof(AttributeDef));
4544 type_found = 0;
4545 typespec_found = 0;
4546 t = VT_INT;
4547 bt = st = -1;
4548 type->ref = NULL;
4550 while(1) {
4551 switch(tok) {
4552 case TOK_EXTENSION:
4553 /* currently, we really ignore extension */
4554 next();
4555 continue;
4557 /* basic types */
4558 case TOK_CHAR:
4559 u = VT_BYTE;
4560 basic_type:
4561 next();
4562 basic_type1:
4563 if (u == VT_SHORT || u == VT_LONG) {
4564 if (st != -1 || (bt != -1 && bt != VT_INT))
4565 tmbt: tcc_error("too many basic types");
4566 st = u;
4567 } else {
4568 if (bt != -1 || (st != -1 && u != VT_INT))
4569 goto tmbt;
4570 bt = u;
4572 if (u != VT_INT)
4573 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4574 typespec_found = 1;
4575 break;
4576 case TOK_VOID:
4577 u = VT_VOID;
4578 goto basic_type;
4579 case TOK_SHORT:
4580 u = VT_SHORT;
4581 goto basic_type;
4582 case TOK_INT:
4583 u = VT_INT;
4584 goto basic_type;
4585 case TOK_ALIGNAS:
4586 { int n;
4587 AttributeDef ad1;
4588 next();
4589 skip('(');
4590 memset(&ad1, 0, sizeof(AttributeDef));
4591 if (parse_btype(&type1, &ad1, 0)) {
4592 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4593 if (ad1.a.aligned)
4594 n = 1 << (ad1.a.aligned - 1);
4595 else
4596 type_size(&type1, &n);
4597 } else {
4598 n = expr_const();
4599 if (n < 0 || (n & (n - 1)) != 0)
4600 tcc_error("alignment must be a positive power of two");
4602 skip(')');
4603 ad->a.aligned = exact_log2p1(n);
4605 continue;
4606 case TOK_LONG:
4607 if ((t & VT_BTYPE) == VT_DOUBLE) {
4608 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4609 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4610 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4611 } else {
4612 u = VT_LONG;
4613 goto basic_type;
4615 next();
4616 break;
4617 #ifdef TCC_TARGET_ARM64
4618 case TOK_UINT128:
4619 /* GCC's __uint128_t appears in some Linux header files. Make it a
4620 synonym for long double to get the size and alignment right. */
4621 u = VT_LDOUBLE;
4622 goto basic_type;
4623 #endif
4624 case TOK_BOOL:
4625 u = VT_BOOL;
4626 goto basic_type;
4627 case TOK_COMPLEX:
4628 tcc_error("_Complex is not yet supported");
4629 case TOK_FLOAT:
4630 u = VT_FLOAT;
4631 goto basic_type;
4632 case TOK_DOUBLE:
4633 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4634 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4635 } else {
4636 u = VT_DOUBLE;
4637 goto basic_type;
4639 next();
4640 break;
4641 case TOK_ENUM:
4642 struct_decl(&type1, VT_ENUM);
4643 basic_type2:
4644 u = type1.t;
4645 type->ref = type1.ref;
4646 goto basic_type1;
4647 case TOK_STRUCT:
4648 struct_decl(&type1, VT_STRUCT);
4649 goto basic_type2;
4650 case TOK_UNION:
4651 struct_decl(&type1, VT_UNION);
4652 goto basic_type2;
4654 /* type modifiers */
4655 case TOK__Atomic:
4656 next();
4657 type->t = t;
4658 parse_btype_qualify(type, VT_ATOMIC);
4659 t = type->t;
4660 if (tok == '(') {
4661 parse_expr_type(&type1);
4662 /* remove all storage modifiers except typedef */
4663 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4664 if (type1.ref)
4665 sym_to_attr(ad, type1.ref);
4666 goto basic_type2;
4668 break;
4669 case TOK_CONST1:
4670 case TOK_CONST2:
4671 case TOK_CONST3:
4672 type->t = t;
4673 parse_btype_qualify(type, VT_CONSTANT);
4674 t = type->t;
4675 next();
4676 break;
4677 case TOK_VOLATILE1:
4678 case TOK_VOLATILE2:
4679 case TOK_VOLATILE3:
4680 type->t = t;
4681 parse_btype_qualify(type, VT_VOLATILE);
4682 t = type->t;
4683 next();
4684 break;
4685 case TOK_SIGNED1:
4686 case TOK_SIGNED2:
4687 case TOK_SIGNED3:
4688 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4689 tcc_error("signed and unsigned modifier");
4690 t |= VT_DEFSIGN;
4691 next();
4692 typespec_found = 1;
4693 break;
4694 case TOK_REGISTER:
4695 case TOK_AUTO:
4696 case TOK_RESTRICT1:
4697 case TOK_RESTRICT2:
4698 case TOK_RESTRICT3:
4699 next();
4700 break;
4701 case TOK_UNSIGNED:
4702 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4703 tcc_error("signed and unsigned modifier");
4704 t |= VT_DEFSIGN | VT_UNSIGNED;
4705 next();
4706 typespec_found = 1;
4707 break;
4709 /* storage */
4710 case TOK_EXTERN:
4711 g = VT_EXTERN;
4712 goto storage;
4713 case TOK_STATIC:
4714 g = VT_STATIC;
4715 goto storage;
4716 case TOK_TYPEDEF:
4717 g = VT_TYPEDEF;
4718 goto storage;
4719 storage:
4720 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4721 tcc_error("multiple storage classes");
4722 t |= g;
4723 next();
4724 break;
4725 case TOK_INLINE1:
4726 case TOK_INLINE2:
4727 case TOK_INLINE3:
4728 t |= VT_INLINE;
4729 next();
4730 break;
4731 case TOK_NORETURN3:
4732 next();
4733 ad->f.func_noreturn = 1;
4734 break;
4735 /* GNUC attribute */
4736 case TOK_ATTRIBUTE1:
4737 case TOK_ATTRIBUTE2:
4738 parse_attribute(ad);
4739 if (ad->attr_mode) {
4740 u = ad->attr_mode -1;
4741 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4743 continue;
4744 /* GNUC typeof */
4745 case TOK_TYPEOF1:
4746 case TOK_TYPEOF2:
4747 case TOK_TYPEOF3:
4748 next();
4749 parse_expr_type(&type1);
4750 /* remove all storage modifiers except typedef */
4751 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4752 if (type1.ref)
4753 sym_to_attr(ad, type1.ref);
4754 goto basic_type2;
4755 case TOK_THREAD_LOCAL:
4756 tcc_error("_Thread_local is not implemented");
4757 default:
4758 if (typespec_found)
4759 goto the_end;
4760 s = sym_find(tok);
4761 if (!s || !(s->type.t & VT_TYPEDEF))
4762 goto the_end;
4764 n = tok, next();
4765 if (tok == ':' && ignore_label) {
4766 /* ignore if it's a label */
4767 unget_tok(n);
4768 goto the_end;
4771 t &= ~(VT_BTYPE|VT_LONG);
4772 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4773 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4774 type->ref = s->type.ref;
4775 if (t)
4776 parse_btype_qualify(type, t);
4777 t = type->t;
4778 /* get attributes from typedef */
4779 sym_to_attr(ad, s);
4780 typespec_found = 1;
4781 st = bt = -2;
4782 break;
4784 type_found = 1;
4786 the_end:
4787 if (tcc_state->char_is_unsigned) {
4788 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4789 t |= VT_UNSIGNED;
4791 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4792 bt = t & (VT_BTYPE|VT_LONG);
4793 if (bt == VT_LONG)
4794 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4795 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4796 if (bt == VT_LDOUBLE)
4797 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4798 #endif
4799 type->t = t;
4800 return type_found;
4803 /* convert a function parameter type (array to pointer and function to
4804 function pointer) */
4805 static inline void convert_parameter_type(CType *pt)
4807 /* remove const and volatile qualifiers (XXX: const could be used
4808 to indicate a const function parameter */
4809 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4810 /* array must be transformed to pointer according to ANSI C */
4811 pt->t &= ~VT_ARRAY;
4812 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4813 mk_pointer(pt);
4817 ST_FUNC void parse_asm_str(CString *astr)
4819 skip('(');
4820 parse_mult_str(astr, "string constant");
4823 /* Parse an asm label and return the token */
4824 static int asm_label_instr(void)
4826 int v;
4827 CString astr;
4829 next();
4830 parse_asm_str(&astr);
4831 skip(')');
4832 #ifdef ASM_DEBUG
4833 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4834 #endif
4835 v = tok_alloc(astr.data, astr.size - 1)->tok;
4836 cstr_free(&astr);
4837 return v;
4840 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4842 int n, l, t1, arg_size, align;
4843 Sym **plast, *s, *first;
4844 AttributeDef ad1;
4845 CType pt;
4846 TokenString *vla_array_tok = NULL;
4847 int *vla_array_str = NULL;
4849 if (tok == '(') {
4850 /* function type, or recursive declarator (return if so) */
4851 next();
4852 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4853 return 0;
4854 if (tok == ')')
4855 l = 0;
4856 else if (parse_btype(&pt, &ad1, 0))
4857 l = FUNC_NEW;
4858 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4859 merge_attr (ad, &ad1);
4860 return 0;
4861 } else
4862 l = FUNC_OLD;
4864 first = NULL;
4865 plast = &first;
4866 arg_size = 0;
4867 ++local_scope;
4868 if (l) {
4869 for(;;) {
4870 /* read param name and compute offset */
4871 if (l != FUNC_OLD) {
4872 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4873 break;
4874 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4875 if ((pt.t & VT_BTYPE) == VT_VOID)
4876 tcc_error("parameter declared as void");
4877 if (n == 0)
4878 n = SYM_FIELD;
4879 } else {
4880 n = tok;
4881 pt.t = VT_VOID; /* invalid type */
4882 pt.ref = NULL;
4883 next();
4885 if (n < TOK_UIDENT)
4886 expect("identifier");
4887 convert_parameter_type(&pt);
4888 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4889 /* these symbols may be evaluated for VLArrays (see below, under
4890 nocode_wanted) which is why we push them here as normal symbols
4891 temporarily. Example: int func(int a, int b[++a]); */
4892 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4893 *plast = s;
4894 plast = &s->next;
4895 if (tok == ')')
4896 break;
4897 skip(',');
4898 if (l == FUNC_NEW && tok == TOK_DOTS) {
4899 l = FUNC_ELLIPSIS;
4900 next();
4901 break;
4903 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4904 tcc_error("invalid type");
4906 } else
4907 /* if no parameters, then old type prototype */
4908 l = FUNC_OLD;
4909 skip(')');
4910 /* remove parameter symbols from token table, keep on stack */
4911 if (first) {
4912 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4913 for (s = first; s; s = s->next)
4914 s->v |= SYM_FIELD;
4916 --local_scope;
4917 /* NOTE: const is ignored in returned type as it has a special
4918 meaning in gcc / C++ */
4919 type->t &= ~VT_CONSTANT;
4920 /* some ancient pre-K&R C allows a function to return an array
4921 and the array brackets to be put after the arguments, such
4922 that "int c()[]" means something like "int[] c()" */
4923 if (tok == '[') {
4924 next();
4925 skip(']'); /* only handle simple "[]" */
4926 mk_pointer(type);
4928 /* we push a anonymous symbol which will contain the function prototype */
4929 ad->f.func_args = arg_size;
4930 ad->f.func_type = l;
4931 s = sym_push(SYM_FIELD, type, 0, 0);
4932 s->a = ad->a;
4933 s->f = ad->f;
4934 s->next = first;
4935 type->t = VT_FUNC;
4936 type->ref = s;
4937 } else if (tok == '[') {
4938 int saved_nocode_wanted = nocode_wanted;
4939 /* array definition */
4940 next();
4941 n = -1;
4942 t1 = 0;
4943 if (td & TYPE_PARAM) while (1) {
4944 /* XXX The optional type-quals and static should only be accepted
4945 in parameter decls. The '*' as well, and then even only
4946 in prototypes (not function defs). */
4947 switch (tok) {
4948 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4949 case TOK_CONST1:
4950 case TOK_VOLATILE1:
4951 case TOK_STATIC:
4952 case '*':
4953 next();
4954 continue;
4955 default:
4956 break;
4958 if (tok != ']') {
4959 /* Code generation is not done now but has to be done
4960 at start of function. Save code here for later use. */
4961 nocode_wanted = 1;
4962 skip_or_save_block(&vla_array_tok);
4963 unget_tok(0);
4964 vla_array_str = vla_array_tok->str;
4965 begin_macro(vla_array_tok, 2);
4966 next();
4967 gexpr();
4968 end_macro();
4969 next();
4970 goto check;
4972 break;
4974 } else if (tok != ']') {
4975 if (!local_stack || (storage & VT_STATIC))
4976 vpushi(expr_const());
4977 else {
4978 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4979 length must always be evaluated, even under nocode_wanted,
4980 so that its size slot is initialized (e.g. under sizeof
4981 or typeof). */
4982 nocode_wanted = 0;
4983 gexpr();
4985 check:
4986 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4987 n = vtop->c.i;
4988 if (n < 0)
4989 tcc_error("invalid array size");
4990 } else {
4991 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4992 tcc_error("size of variable length array should be an integer");
4993 n = 0;
4994 t1 = VT_VLA;
4997 skip(']');
4998 /* parse next post type */
4999 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5001 if ((type->t & VT_BTYPE) == VT_FUNC)
5002 tcc_error("declaration of an array of functions");
5003 if ((type->t & VT_BTYPE) == VT_VOID
5004 || type_size(type, &align) < 0)
5005 tcc_error("declaration of an array of incomplete type elements");
5007 t1 |= type->t & VT_VLA;
5009 if (t1 & VT_VLA) {
5010 if (n < 0) {
5011 if (td & TYPE_NEST)
5012 tcc_error("need explicit inner array size in VLAs");
5014 else {
5015 loc -= type_size(&int_type, &align);
5016 loc &= -align;
5017 n = loc;
5019 vpush_type_size(type, &align);
5020 gen_op('*');
5021 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5022 vswap();
5023 vstore();
5026 if (n != -1)
5027 vpop();
5028 nocode_wanted = saved_nocode_wanted;
5030 /* we push an anonymous symbol which will contain the array
5031 element type */
5032 s = sym_push(SYM_FIELD, type, 0, n);
5033 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5034 type->ref = s;
5036 if (vla_array_str) {
5037 if (t1 & VT_VLA)
5038 s->vla_array_str = vla_array_str;
5039 else
5040 tok_str_free_str(vla_array_str);
5043 return 1;
5046 /* Parse a type declarator (except basic type), and return the type
5047 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5048 expected. 'type' should contain the basic type. 'ad' is the
5049 attribute definition of the basic type. It can be modified by
5050 type_decl(). If this (possibly abstract) declarator is a pointer chain
5051 it returns the innermost pointed to type (equals *type, but is a different
5052 pointer), otherwise returns type itself, that's used for recursive calls. */
5053 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5055 CType *post, *ret;
5056 int qualifiers, storage;
5058 /* recursive type, remove storage bits first, apply them later again */
5059 storage = type->t & VT_STORAGE;
5060 type->t &= ~VT_STORAGE;
5061 post = ret = type;
5063 while (tok == '*') {
5064 qualifiers = 0;
5065 redo:
5066 next();
5067 switch(tok) {
5068 case TOK__Atomic:
5069 qualifiers |= VT_ATOMIC;
5070 goto redo;
5071 case TOK_CONST1:
5072 case TOK_CONST2:
5073 case TOK_CONST3:
5074 qualifiers |= VT_CONSTANT;
5075 goto redo;
5076 case TOK_VOLATILE1:
5077 case TOK_VOLATILE2:
5078 case TOK_VOLATILE3:
5079 qualifiers |= VT_VOLATILE;
5080 goto redo;
5081 case TOK_RESTRICT1:
5082 case TOK_RESTRICT2:
5083 case TOK_RESTRICT3:
5084 goto redo;
5085 /* XXX: clarify attribute handling */
5086 case TOK_ATTRIBUTE1:
5087 case TOK_ATTRIBUTE2:
5088 parse_attribute(ad);
5089 break;
5091 mk_pointer(type);
5092 type->t |= qualifiers;
5093 if (ret == type)
5094 /* innermost pointed to type is the one for the first derivation */
5095 ret = pointed_type(type);
5098 if (tok == '(') {
5099 /* This is possibly a parameter type list for abstract declarators
5100 ('int ()'), use post_type for testing this. */
5101 if (!post_type(type, ad, 0, td)) {
5102 /* It's not, so it's a nested declarator, and the post operations
5103 apply to the innermost pointed to type (if any). */
5104 /* XXX: this is not correct to modify 'ad' at this point, but
5105 the syntax is not clear */
5106 parse_attribute(ad);
5107 post = type_decl(type, ad, v, td);
5108 skip(')');
5109 } else
5110 goto abstract;
5111 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5112 /* type identifier */
5113 *v = tok;
5114 next();
5115 } else {
5116 abstract:
5117 if (!(td & TYPE_ABSTRACT))
5118 expect("identifier");
5119 *v = 0;
5121 post_type(post, ad, post != ret ? 0 : storage,
5122 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5123 parse_attribute(ad);
5124 type->t |= storage;
5125 return ret;
5128 /* indirection with full error checking and bound check */
5129 ST_FUNC void indir(void)
5131 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5132 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5133 return;
5134 expect("pointer");
5136 if (vtop->r & VT_LVAL)
5137 gv(RC_INT);
5138 vtop->type = *pointed_type(&vtop->type);
5139 /* Arrays and functions are never lvalues */
5140 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5141 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5142 vtop->r |= VT_LVAL;
5143 /* if bound checking, the referenced pointer must be checked */
5144 #ifdef CONFIG_TCC_BCHECK
5145 if (tcc_state->do_bounds_check)
5146 vtop->r |= VT_MUSTBOUND;
5147 #endif
5151 /* pass a parameter to a function and do type checking and casting */
5152 static void gfunc_param_typed(Sym *func, Sym *arg)
5154 int func_type;
5155 CType type;
5157 func_type = func->f.func_type;
5158 if (func_type == FUNC_OLD ||
5159 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5160 /* default casting : only need to convert float to double */
5161 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5162 gen_cast_s(VT_DOUBLE);
5163 } else if (vtop->type.t & VT_BITFIELD) {
5164 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5165 type.ref = vtop->type.ref;
5166 gen_cast(&type);
5167 } else if (vtop->r & VT_MUSTCAST) {
5168 force_charshort_cast();
5170 } else if (arg == NULL) {
5171 tcc_error("too many arguments to function");
5172 } else {
5173 type = arg->type;
5174 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5175 gen_assign_cast(&type);
5179 /* parse an expression and return its type without any side effect. */
5180 static void expr_type(CType *type, void (*expr_fn)(void))
5182 nocode_wanted++;
5183 expr_fn();
5184 *type = vtop->type;
5185 vpop();
5186 nocode_wanted--;
5189 /* parse an expression of the form '(type)' or '(expr)' and return its
5190 type */
5191 static void parse_expr_type(CType *type)
5193 int n;
5194 AttributeDef ad;
5196 skip('(');
5197 if (parse_btype(type, &ad, 0)) {
5198 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5199 } else {
5200 expr_type(type, gexpr);
5202 skip(')');
5205 static void parse_type(CType *type)
5207 AttributeDef ad;
5208 int n;
5210 if (!parse_btype(type, &ad, 0)) {
5211 expect("type");
5213 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5216 static void parse_builtin_params(int nc, const char *args)
5218 char c, sep = '(';
5219 CType type;
5220 if (nc)
5221 nocode_wanted++;
5222 next();
5223 if (*args == 0)
5224 skip(sep);
5225 while ((c = *args++)) {
5226 skip(sep);
5227 sep = ',';
5228 if (c == 't') {
5229 parse_type(&type);
5230 vpush(&type);
5231 continue;
5233 expr_eq();
5234 type.ref = NULL;
5235 type.t = 0;
5236 switch (c) {
5237 case 'e':
5238 continue;
5239 case 'V':
5240 type.t = VT_CONSTANT;
5241 case 'v':
5242 type.t |= VT_VOID;
5243 mk_pointer (&type);
5244 break;
5245 case 'S':
5246 type.t = VT_CONSTANT;
5247 case 's':
5248 type.t |= char_type.t;
5249 mk_pointer (&type);
5250 break;
5251 case 'i':
5252 type.t = VT_INT;
5253 break;
5254 case 'l':
5255 type.t = VT_SIZE_T;
5256 break;
5257 default:
5258 break;
5260 gen_assign_cast(&type);
5262 skip(')');
5263 if (nc)
5264 nocode_wanted--;
5267 static void parse_atomic(int atok)
5269 int size, align, arg, t, save = 0;
5270 CType *atom, *atom_ptr, ct = {0};
5271 SValue store;
5272 char buf[40];
5273 static const char *const templates[] = {
5275 * Each entry consists of callback and function template.
5276 * The template represents argument types and return type.
5278 * ? void (return-only)
5279 * b bool
5280 * a atomic
5281 * A read-only atomic
5282 * p pointer to memory
5283 * v value
5284 * l load pointer
5285 * s save pointer
5286 * m memory model
5289 /* keep in order of appearance in tcctok.h: */
5290 /* __atomic_store */ "alm.?",
5291 /* __atomic_load */ "Asm.v",
5292 /* __atomic_exchange */ "alsm.v",
5293 /* __atomic_compare_exchange */ "aplbmm.b",
5294 /* __atomic_fetch_add */ "avm.v",
5295 /* __atomic_fetch_sub */ "avm.v",
5296 /* __atomic_fetch_or */ "avm.v",
5297 /* __atomic_fetch_xor */ "avm.v",
5298 /* __atomic_fetch_and */ "avm.v",
5299 /* __atomic_fetch_nand */ "avm.v",
5300 /* __atomic_and_fetch */ "avm.v",
5301 /* __atomic_sub_fetch */ "avm.v",
5302 /* __atomic_or_fetch */ "avm.v",
5303 /* __atomic_xor_fetch */ "avm.v",
5304 /* __atomic_and_fetch */ "avm.v",
5305 /* __atomic_nand_fetch */ "avm.v"
5307 const char *template = templates[(atok - TOK___atomic_store)];
5309 atom = atom_ptr = NULL;
5310 size = 0; /* pacify compiler */
5311 next();
5312 skip('(');
5313 for (arg = 0;;) {
5314 expr_eq();
5315 switch (template[arg]) {
5316 case 'a':
5317 case 'A':
5318 atom_ptr = &vtop->type;
5319 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5320 expect("pointer");
5321 atom = pointed_type(atom_ptr);
5322 size = type_size(atom, &align);
5323 if (size > 8
5324 || (size & (size - 1))
5325 || (atok > TOK___atomic_compare_exchange
5326 && (0 == btype_size(atom->t & VT_BTYPE)
5327 || (atom->t & VT_BTYPE) == VT_PTR)))
5328 expect("integral or integer-sized pointer target type");
5329 /* GCC does not care either: */
5330 /* if (!(atom->t & VT_ATOMIC))
5331 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5332 break;
5334 case 'p':
5335 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5336 || type_size(pointed_type(&vtop->type), &align) != size)
5337 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5338 gen_assign_cast(atom_ptr);
5339 break;
5340 case 'v':
5341 gen_assign_cast(atom);
5342 break;
5343 case 'l':
5344 indir();
5345 gen_assign_cast(atom);
5346 break;
5347 case 's':
5348 save = 1;
5349 indir();
5350 store = *vtop;
5351 vpop();
5352 break;
5353 case 'm':
5354 gen_assign_cast(&int_type);
5355 break;
5356 case 'b':
5357 ct.t = VT_BOOL;
5358 gen_assign_cast(&ct);
5359 break;
5361 if ('.' == template[++arg])
5362 break;
5363 skip(',');
5365 skip(')');
5367 ct.t = VT_VOID;
5368 switch (template[arg + 1]) {
5369 case 'b':
5370 ct.t = VT_BOOL;
5371 break;
5372 case 'v':
5373 ct = *atom;
5374 break;
5377 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5378 vpush_helper_func(tok_alloc_const(buf));
5379 vrott(arg - save + 1);
5380 gfunc_call(arg - save);
5382 vpush(&ct);
5383 PUT_R_RET(vtop, ct.t);
5384 t = ct.t & VT_BTYPE;
5385 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5386 #ifdef PROMOTE_RET
5387 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5388 #else
5389 vtop->type.t = VT_INT;
5390 #endif
5392 gen_cast(&ct);
5393 if (save) {
5394 vpush(&ct);
5395 *vtop = store;
5396 vswap();
5397 vstore();
5401 ST_FUNC void unary(void)
5403 int n, t, align, size, r, sizeof_caller;
5404 CType type;
5405 Sym *s;
5406 AttributeDef ad;
5408 /* generate line number info */
5409 if (debug_modes)
5410 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5412 sizeof_caller = in_sizeof;
5413 in_sizeof = 0;
5414 type.ref = NULL;
5415 /* XXX: GCC 2.95.3 does not generate a table although it should be
5416 better here */
5417 tok_next:
5418 switch(tok) {
5419 case TOK_EXTENSION:
5420 next();
5421 goto tok_next;
5422 case TOK_LCHAR:
5423 #ifdef TCC_TARGET_PE
5424 t = VT_SHORT|VT_UNSIGNED;
5425 goto push_tokc;
5426 #endif
5427 case TOK_CINT:
5428 case TOK_CCHAR:
5429 t = VT_INT;
5430 push_tokc:
5431 type.t = t;
5432 vsetc(&type, VT_CONST, &tokc);
5433 next();
5434 break;
5435 case TOK_CUINT:
5436 t = VT_INT | VT_UNSIGNED;
5437 goto push_tokc;
5438 case TOK_CLLONG:
5439 t = VT_LLONG;
5440 goto push_tokc;
5441 case TOK_CULLONG:
5442 t = VT_LLONG | VT_UNSIGNED;
5443 goto push_tokc;
5444 case TOK_CFLOAT:
5445 t = VT_FLOAT;
5446 goto push_tokc;
5447 case TOK_CDOUBLE:
5448 t = VT_DOUBLE;
5449 goto push_tokc;
5450 case TOK_CLDOUBLE:
5451 t = VT_LDOUBLE;
5452 goto push_tokc;
5453 case TOK_CLONG:
5454 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5455 goto push_tokc;
5456 case TOK_CULONG:
5457 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5458 goto push_tokc;
5459 case TOK___FUNCTION__:
5460 if (!gnu_ext)
5461 goto tok_identifier;
5462 /* fall thru */
5463 case TOK___FUNC__:
5465 Section *sec;
5466 int len;
5467 /* special function name identifier */
5468 len = strlen(funcname) + 1;
5469 /* generate char[len] type */
5470 type.t = char_type.t;
5471 if (tcc_state->warn_write_strings & WARN_ON)
5472 type.t |= VT_CONSTANT;
5473 mk_pointer(&type);
5474 type.t |= VT_ARRAY;
5475 type.ref->c = len;
5476 sec = rodata_section;
5477 vpush_ref(&type, sec, sec->data_offset, len);
5478 if (!NODATA_WANTED)
5479 memcpy(section_ptr_add(sec, len), funcname, len);
5480 next();
5482 break;
5483 case TOK_LSTR:
5484 #ifdef TCC_TARGET_PE
5485 t = VT_SHORT | VT_UNSIGNED;
5486 #else
5487 t = VT_INT;
5488 #endif
5489 goto str_init;
5490 case TOK_STR:
5491 /* string parsing */
5492 t = char_type.t;
5493 str_init:
5494 if (tcc_state->warn_write_strings & WARN_ON)
5495 t |= VT_CONSTANT;
5496 type.t = t;
5497 mk_pointer(&type);
5498 type.t |= VT_ARRAY;
5499 memset(&ad, 0, sizeof(AttributeDef));
5500 ad.section = rodata_section;
5501 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5502 break;
5503 case '(':
5504 next();
5505 /* cast ? */
5506 if (parse_btype(&type, &ad, 0)) {
5507 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5508 skip(')');
5509 /* check ISOC99 compound literal */
5510 if (tok == '{') {
5511 /* data is allocated locally by default */
5512 if (global_expr)
5513 r = VT_CONST;
5514 else
5515 r = VT_LOCAL;
5516 /* all except arrays are lvalues */
5517 if (!(type.t & VT_ARRAY))
5518 r |= VT_LVAL;
5519 memset(&ad, 0, sizeof(AttributeDef));
5520 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5521 } else {
5522 if (sizeof_caller) {
5523 vpush(&type);
5524 return;
5526 unary();
5527 gen_cast(&type);
5529 } else if (tok == '{') {
5530 int saved_nocode_wanted = nocode_wanted;
5531 if (const_wanted && !(nocode_wanted & unevalmask))
5532 expect("constant");
5533 if (0 == local_scope)
5534 tcc_error("statement expression outside of function");
5535 /* save all registers */
5536 save_regs(0);
5537 /* statement expression : we do not accept break/continue
5538 inside as GCC does. We do retain the nocode_wanted state,
5539 as statement expressions can't ever be entered from the
5540 outside, so any reactivation of code emission (from labels
5541 or loop heads) can be disabled again after the end of it. */
5542 block(1);
5543 /* If the statement expr can be entered, then we retain the current
5544 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5545 If it can't be entered then the state is that from before the
5546 statement expression. */
5547 if (saved_nocode_wanted)
5548 nocode_wanted = saved_nocode_wanted;
5549 skip(')');
5550 } else {
5551 gexpr();
5552 skip(')');
5554 break;
5555 case '*':
5556 next();
5557 unary();
5558 indir();
5559 break;
5560 case '&':
5561 next();
5562 unary();
5563 /* functions names must be treated as function pointers,
5564 except for unary '&' and sizeof. Since we consider that
5565 functions are not lvalues, we only have to handle it
5566 there and in function calls. */
5567 /* arrays can also be used although they are not lvalues */
5568 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5569 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5570 test_lvalue();
5571 if (vtop->sym)
5572 vtop->sym->a.addrtaken = 1;
5573 mk_pointer(&vtop->type);
5574 gaddrof();
5575 break;
5576 case '!':
5577 next();
5578 unary();
5579 gen_test_zero(TOK_EQ);
5580 break;
5581 case '~':
5582 next();
5583 unary();
5584 vpushi(-1);
5585 gen_op('^');
5586 break;
5587 case '+':
5588 next();
5589 unary();
5590 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5591 tcc_error("pointer not accepted for unary plus");
5592 /* In order to force cast, we add zero, except for floating point
5593 where we really need an noop (otherwise -0.0 will be transformed
5594 into +0.0). */
5595 if (!is_float(vtop->type.t)) {
5596 vpushi(0);
5597 gen_op('+');
5599 break;
5600 case TOK_SIZEOF:
5601 case TOK_ALIGNOF1:
5602 case TOK_ALIGNOF2:
5603 case TOK_ALIGNOF3:
5604 t = tok;
5605 next();
5606 in_sizeof++;
5607 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5608 if (t == TOK_SIZEOF) {
5609 vpush_type_size(&type, &align);
5610 gen_cast_s(VT_SIZE_T);
5611 } else {
5612 type_size(&type, &align);
5613 s = NULL;
5614 if (vtop[1].r & VT_SYM)
5615 s = vtop[1].sym; /* hack: accessing previous vtop */
5616 if (s && s->a.aligned)
5617 align = 1 << (s->a.aligned - 1);
5618 vpushs(align);
5620 break;
5622 case TOK_builtin_expect:
5623 /* __builtin_expect is a no-op for now */
5624 parse_builtin_params(0, "ee");
5625 vpop();
5626 break;
5627 case TOK_builtin_types_compatible_p:
5628 parse_builtin_params(0, "tt");
5629 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5630 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5631 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5632 vtop -= 2;
5633 vpushi(n);
5634 break;
5635 case TOK_builtin_choose_expr:
5637 int64_t c;
5638 next();
5639 skip('(');
5640 c = expr_const64();
5641 skip(',');
5642 if (!c) {
5643 nocode_wanted++;
5645 expr_eq();
5646 if (!c) {
5647 vpop();
5648 nocode_wanted--;
5650 skip(',');
5651 if (c) {
5652 nocode_wanted++;
5654 expr_eq();
5655 if (c) {
5656 vpop();
5657 nocode_wanted--;
5659 skip(')');
5661 break;
5662 case TOK_builtin_constant_p:
5663 constant_p = 1;
5664 parse_builtin_params(1, "e");
5665 n = constant_p &&
5666 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5667 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5668 vtop--;
5669 vpushi(n);
5670 break;
5671 case TOK_builtin_frame_address:
5672 case TOK_builtin_return_address:
5674 int tok1 = tok;
5675 int64_t level;
5676 next();
5677 skip('(');
5678 level = expr_const64();
5679 if (level < 0) {
5680 tcc_error("%s only takes positive integers",
5681 tok1 == TOK_builtin_return_address ?
5682 "__builtin_return_address" :
5683 "__builtin_frame_address");
5685 skip(')');
5686 type.t = VT_VOID;
5687 mk_pointer(&type);
5688 vset(&type, VT_LOCAL, 0); /* local frame */
5689 while (level--) {
5690 #ifdef TCC_TARGET_RISCV64
5691 vpushi(2*PTR_SIZE);
5692 gen_op('-');
5693 #endif
5694 mk_pointer(&vtop->type);
5695 indir(); /* -> parent frame */
5697 if (tok1 == TOK_builtin_return_address) {
5698 // assume return address is just above frame pointer on stack
5699 #ifdef TCC_TARGET_ARM
5700 vpushi(2*PTR_SIZE);
5701 gen_op('+');
5702 #elif defined TCC_TARGET_RISCV64
5703 vpushi(PTR_SIZE);
5704 gen_op('-');
5705 #else
5706 vpushi(PTR_SIZE);
5707 gen_op('+');
5708 #endif
5709 mk_pointer(&vtop->type);
5710 indir();
5713 break;
5714 #ifdef TCC_TARGET_RISCV64
5715 case TOK_builtin_va_start:
5716 parse_builtin_params(0, "ee");
5717 r = vtop->r & VT_VALMASK;
5718 if (r == VT_LLOCAL)
5719 r = VT_LOCAL;
5720 if (r != VT_LOCAL)
5721 tcc_error("__builtin_va_start expects a local variable");
5722 gen_va_start();
5723 vstore();
5724 break;
5725 #endif
5726 #ifdef TCC_TARGET_X86_64
5727 #ifdef TCC_TARGET_PE
5728 case TOK_builtin_va_start:
5729 parse_builtin_params(0, "ee");
5730 r = vtop->r & VT_VALMASK;
5731 if (r == VT_LLOCAL)
5732 r = VT_LOCAL;
5733 if (r != VT_LOCAL)
5734 tcc_error("__builtin_va_start expects a local variable");
5735 vtop->r = r;
5736 vtop->type = char_pointer_type;
5737 vtop->c.i += 8;
5738 vstore();
5739 break;
5740 #else
5741 case TOK_builtin_va_arg_types:
5742 parse_builtin_params(0, "t");
5743 vpushi(classify_x86_64_va_arg(&vtop->type));
5744 vswap();
5745 vpop();
5746 break;
5747 #endif
5748 #endif
5750 #ifdef TCC_TARGET_ARM64
5751 case TOK_builtin_va_start: {
5752 parse_builtin_params(0, "ee");
5753 //xx check types
5754 gen_va_start();
5755 vpushi(0);
5756 vtop->type.t = VT_VOID;
5757 break;
5759 case TOK_builtin_va_arg: {
5760 parse_builtin_params(0, "et");
5761 type = vtop->type;
5762 vpop();
5763 //xx check types
5764 gen_va_arg(&type);
5765 vtop->type = type;
5766 break;
5768 case TOK___arm64_clear_cache: {
5769 parse_builtin_params(0, "ee");
5770 gen_clear_cache();
5771 vpushi(0);
5772 vtop->type.t = VT_VOID;
5773 break;
5775 #endif
5777 /* atomic operations */
5778 case TOK___atomic_store:
5779 case TOK___atomic_load:
5780 case TOK___atomic_exchange:
5781 case TOK___atomic_compare_exchange:
5782 case TOK___atomic_fetch_add:
5783 case TOK___atomic_fetch_sub:
5784 case TOK___atomic_fetch_or:
5785 case TOK___atomic_fetch_xor:
5786 case TOK___atomic_fetch_and:
5787 case TOK___atomic_fetch_nand:
5788 case TOK___atomic_add_fetch:
5789 case TOK___atomic_sub_fetch:
5790 case TOK___atomic_or_fetch:
5791 case TOK___atomic_xor_fetch:
5792 case TOK___atomic_and_fetch:
5793 case TOK___atomic_nand_fetch:
5794 parse_atomic(tok);
5795 break;
5797 /* pre operations */
5798 case TOK_INC:
5799 case TOK_DEC:
5800 t = tok;
5801 next();
5802 unary();
5803 inc(0, t);
5804 break;
5805 case '-':
5806 next();
5807 unary();
5808 if (is_float(vtop->type.t)) {
5809 gen_opif(TOK_NEG);
5810 } else {
5811 vpushi(0);
5812 vswap();
5813 gen_op('-');
5815 break;
5816 case TOK_LAND:
5817 if (!gnu_ext)
5818 goto tok_identifier;
5819 next();
5820 /* allow to take the address of a label */
5821 if (tok < TOK_UIDENT)
5822 expect("label identifier");
5823 s = label_find(tok);
5824 if (!s) {
5825 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5826 } else {
5827 if (s->r == LABEL_DECLARED)
5828 s->r = LABEL_FORWARD;
5830 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5831 s->type.t = VT_VOID;
5832 mk_pointer(&s->type);
5833 s->type.t |= VT_STATIC;
5835 vpushsym(&s->type, s);
5836 next();
5837 break;
5839 case TOK_GENERIC:
5841 CType controlling_type;
5842 int has_default = 0;
5843 int has_match = 0;
5844 int learn = 0;
5845 TokenString *str = NULL;
5846 int saved_const_wanted = const_wanted;
5848 next();
5849 skip('(');
5850 const_wanted = 0;
5851 expr_type(&controlling_type, expr_eq);
5852 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5853 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5854 mk_pointer(&controlling_type);
5855 const_wanted = saved_const_wanted;
5856 for (;;) {
5857 learn = 0;
5858 skip(',');
5859 if (tok == TOK_DEFAULT) {
5860 if (has_default)
5861 tcc_error("too many 'default'");
5862 has_default = 1;
5863 if (!has_match)
5864 learn = 1;
5865 next();
5866 } else {
5867 AttributeDef ad_tmp;
5868 int itmp;
5869 CType cur_type;
5871 parse_btype(&cur_type, &ad_tmp, 0);
5872 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5873 if (compare_types(&controlling_type, &cur_type, 0)) {
5874 if (has_match) {
5875 tcc_error("type match twice");
5877 has_match = 1;
5878 learn = 1;
5881 skip(':');
5882 if (learn) {
5883 if (str)
5884 tok_str_free(str);
5885 skip_or_save_block(&str);
5886 } else {
5887 skip_or_save_block(NULL);
5889 if (tok == ')')
5890 break;
5892 if (!str) {
5893 char buf[60];
5894 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5895 tcc_error("type '%s' does not match any association", buf);
5897 begin_macro(str, 1);
5898 next();
5899 expr_eq();
5900 if (tok != TOK_EOF)
5901 expect(",");
5902 end_macro();
5903 next();
5904 break;
5906 // special qnan , snan and infinity values
5907 case TOK___NAN__:
5908 n = 0x7fc00000;
5909 special_math_val:
5910 vpushi(n);
5911 vtop->type.t = VT_FLOAT;
5912 next();
5913 break;
5914 case TOK___SNAN__:
5915 n = 0x7f800001;
5916 goto special_math_val;
5917 case TOK___INF__:
5918 n = 0x7f800000;
5919 goto special_math_val;
5921 default:
5922 tok_identifier:
5923 t = tok;
5924 next();
5925 if (t < TOK_UIDENT)
5926 expect("identifier");
5927 s = sym_find(t);
5928 if (!s || IS_ASM_SYM(s)) {
5929 const char *name = get_tok_str(t, NULL);
5930 if (tok != '(')
5931 tcc_error("'%s' undeclared", name);
5932 /* for simple function calls, we tolerate undeclared
5933 external reference to int() function */
5934 tcc_warning_c(warn_implicit_function_declaration)(
5935 "implicit declaration of function '%s'", name);
5936 s = external_global_sym(t, &func_old_type);
5939 r = s->r;
5940 /* A symbol that has a register is a local register variable,
5941 which starts out as VT_LOCAL value. */
5942 if ((r & VT_VALMASK) < VT_CONST)
5943 r = (r & ~VT_VALMASK) | VT_LOCAL;
5945 vset(&s->type, r, s->c);
5946 /* Point to s as backpointer (even without r&VT_SYM).
5947 Will be used by at least the x86 inline asm parser for
5948 regvars. */
5949 vtop->sym = s;
5951 if (r & VT_SYM) {
5952 vtop->c.i = 0;
5953 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5954 vtop->c.i = s->enum_val;
5956 break;
5959 /* post operations */
5960 while (1) {
5961 if (tok == TOK_INC || tok == TOK_DEC) {
5962 inc(1, tok);
5963 next();
5964 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5965 int qualifiers, cumofs = 0;
5966 /* field */
5967 if (tok == TOK_ARROW)
5968 indir();
5969 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5970 test_lvalue();
5971 gaddrof();
5972 /* expect pointer on structure */
5973 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5974 expect("struct or union");
5975 if (tok == TOK_CDOUBLE)
5976 expect("field name");
5977 next();
5978 if (tok == TOK_CINT || tok == TOK_CUINT)
5979 expect("field name");
5980 s = find_field(&vtop->type, tok, &cumofs);
5981 /* add field offset to pointer */
5982 vtop->type = char_pointer_type; /* change type to 'char *' */
5983 vpushi(cumofs);
5984 gen_op('+');
5985 /* change type to field type, and set to lvalue */
5986 vtop->type = s->type;
5987 vtop->type.t |= qualifiers;
5988 /* an array is never an lvalue */
5989 if (!(vtop->type.t & VT_ARRAY)) {
5990 vtop->r |= VT_LVAL;
5991 #ifdef CONFIG_TCC_BCHECK
5992 /* if bound checking, the referenced pointer must be checked */
5993 if (tcc_state->do_bounds_check)
5994 vtop->r |= VT_MUSTBOUND;
5995 #endif
5997 next();
5998 } else if (tok == '[') {
5999 next();
6000 gexpr();
6001 gen_op('+');
6002 indir();
6003 skip(']');
6004 } else if (tok == '(') {
6005 SValue ret;
6006 Sym *sa;
6007 int nb_args, ret_nregs, ret_align, regsize, variadic;
6009 /* function call */
6010 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6011 /* pointer test (no array accepted) */
6012 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6013 vtop->type = *pointed_type(&vtop->type);
6014 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6015 goto error_func;
6016 } else {
6017 error_func:
6018 expect("function pointer");
6020 } else {
6021 vtop->r &= ~VT_LVAL; /* no lvalue */
6023 /* get return type */
6024 s = vtop->type.ref;
6025 next();
6026 sa = s->next; /* first parameter */
6027 nb_args = regsize = 0;
6028 ret.r2 = VT_CONST;
6029 /* compute first implicit argument if a structure is returned */
6030 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6031 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6032 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6033 &ret_align, &regsize);
6034 if (ret_nregs <= 0) {
6035 /* get some space for the returned structure */
6036 size = type_size(&s->type, &align);
6037 #ifdef TCC_TARGET_ARM64
6038 /* On arm64, a small struct is return in registers.
6039 It is much easier to write it to memory if we know
6040 that we are allowed to write some extra bytes, so
6041 round the allocated space up to a power of 2: */
6042 if (size < 16)
6043 while (size & (size - 1))
6044 size = (size | (size - 1)) + 1;
6045 #endif
6046 loc = (loc - size) & -align;
6047 ret.type = s->type;
6048 ret.r = VT_LOCAL | VT_LVAL;
6049 /* pass it as 'int' to avoid structure arg passing
6050 problems */
6051 vseti(VT_LOCAL, loc);
6052 #ifdef CONFIG_TCC_BCHECK
6053 if (tcc_state->do_bounds_check)
6054 --loc;
6055 #endif
6056 ret.c = vtop->c;
6057 if (ret_nregs < 0)
6058 vtop--;
6059 else
6060 nb_args++;
6062 } else {
6063 ret_nregs = 1;
6064 ret.type = s->type;
6067 if (ret_nregs > 0) {
6068 /* return in register */
6069 ret.c.i = 0;
6070 PUT_R_RET(&ret, ret.type.t);
6072 if (tok != ')') {
6073 for(;;) {
6074 expr_eq();
6075 gfunc_param_typed(s, sa);
6076 nb_args++;
6077 if (sa)
6078 sa = sa->next;
6079 if (tok == ')')
6080 break;
6081 skip(',');
6084 if (sa)
6085 tcc_error("too few arguments to function");
6086 skip(')');
6087 gfunc_call(nb_args);
6089 if (ret_nregs < 0) {
6090 vsetc(&ret.type, ret.r, &ret.c);
6091 #ifdef TCC_TARGET_RISCV64
6092 arch_transfer_ret_regs(1);
6093 #endif
6094 } else {
6095 /* return value */
6096 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6097 vsetc(&ret.type, r, &ret.c);
6098 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6101 /* handle packed struct return */
6102 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6103 int addr, offset;
6105 size = type_size(&s->type, &align);
6106 /* We're writing whole regs often, make sure there's enough
6107 space. Assume register size is power of 2. */
6108 if (regsize > align)
6109 align = regsize;
6110 loc = (loc - size) & -align;
6111 addr = loc;
6112 offset = 0;
6113 for (;;) {
6114 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6115 vswap();
6116 vstore();
6117 vtop--;
6118 if (--ret_nregs == 0)
6119 break;
6120 offset += regsize;
6122 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6125 /* Promote char/short return values. This is matters only
6126 for calling function that were not compiled by TCC and
6127 only on some architectures. For those where it doesn't
6128 matter we expect things to be already promoted to int,
6129 but not larger. */
6130 t = s->type.t & VT_BTYPE;
6131 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6132 #ifdef PROMOTE_RET
6133 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6134 #else
6135 vtop->type.t = VT_INT;
6136 #endif
6139 if (s->f.func_noreturn) {
6140 if (debug_modes)
6141 tcc_tcov_block_end(tcc_state, -1);
6142 CODE_OFF();
6144 } else {
6145 break;
6150 #ifndef precedence_parser /* original top-down parser */
6152 static void expr_prod(void)
6154 int t;
6156 unary();
6157 while ((t = tok) == '*' || t == '/' || t == '%') {
6158 next();
6159 unary();
6160 gen_op(t);
6164 static void expr_sum(void)
6166 int t;
6168 expr_prod();
6169 while ((t = tok) == '+' || t == '-') {
6170 next();
6171 expr_prod();
6172 gen_op(t);
6176 static void expr_shift(void)
6178 int t;
6180 expr_sum();
6181 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6182 next();
6183 expr_sum();
6184 gen_op(t);
6188 static void expr_cmp(void)
6190 int t;
6192 expr_shift();
6193 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6194 t == TOK_ULT || t == TOK_UGE) {
6195 next();
6196 expr_shift();
6197 gen_op(t);
6201 static void expr_cmpeq(void)
6203 int t;
6205 expr_cmp();
6206 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6207 next();
6208 expr_cmp();
6209 gen_op(t);
6213 static void expr_and(void)
6215 expr_cmpeq();
6216 while (tok == '&') {
6217 next();
6218 expr_cmpeq();
6219 gen_op('&');
6223 static void expr_xor(void)
6225 expr_and();
6226 while (tok == '^') {
6227 next();
6228 expr_and();
6229 gen_op('^');
6233 static void expr_or(void)
6235 expr_xor();
6236 while (tok == '|') {
6237 next();
6238 expr_xor();
6239 gen_op('|');
6243 static void expr_landor(int op);
6245 static void expr_land(void)
6247 expr_or();
6248 if (tok == TOK_LAND)
6249 expr_landor(tok);
6252 static void expr_lor(void)
6254 expr_land();
6255 if (tok == TOK_LOR)
6256 expr_landor(tok);
6259 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6260 #else /* defined precedence_parser */
6261 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6262 # define expr_lor() unary(), expr_infix(1)
6264 static int precedence(int tok)
6266 switch (tok) {
6267 case TOK_LOR: return 1;
6268 case TOK_LAND: return 2;
6269 case '|': return 3;
6270 case '^': return 4;
6271 case '&': return 5;
6272 case TOK_EQ: case TOK_NE: return 6;
6273 relat: case TOK_ULT: case TOK_UGE: return 7;
6274 case TOK_SHL: case TOK_SAR: return 8;
6275 case '+': case '-': return 9;
6276 case '*': case '/': case '%': return 10;
6277 default:
6278 if (tok >= TOK_ULE && tok <= TOK_GT)
6279 goto relat;
6280 return 0;
6283 static unsigned char prec[256];
6284 static void init_prec(void)
6286 int i;
6287 for (i = 0; i < 256; i++)
6288 prec[i] = precedence(i);
6290 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6292 static void expr_landor(int op);
6294 static void expr_infix(int p)
6296 int t = tok, p2;
6297 while ((p2 = precedence(t)) >= p) {
6298 if (t == TOK_LOR || t == TOK_LAND) {
6299 expr_landor(t);
6300 } else {
6301 next();
6302 unary();
6303 if (precedence(tok) > p2)
6304 expr_infix(p2 + 1);
6305 gen_op(t);
6307 t = tok;
6310 #endif
6312 /* Assuming vtop is a value used in a conditional context
6313 (i.e. compared with zero) return 0 if it's false, 1 if
6314 true and -1 if it can't be statically determined. */
6315 static int condition_3way(void)
6317 int c = -1;
6318 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6319 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6320 vdup();
6321 gen_cast_s(VT_BOOL);
6322 c = vtop->c.i;
6323 vpop();
6325 return c;
6328 static void expr_landor(int op)
6330 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6331 for(;;) {
6332 c = f ? i : condition_3way();
6333 if (c < 0)
6334 save_regs(1), cc = 0;
6335 else if (c != i)
6336 nocode_wanted++, f = 1;
6337 if (tok != op)
6338 break;
6339 if (c < 0)
6340 t = gvtst(i, t);
6341 else
6342 vpop();
6343 next();
6344 expr_landor_next(op);
6346 if (cc || f) {
6347 vpop();
6348 vpushi(i ^ f);
6349 gsym(t);
6350 nocode_wanted -= f;
6351 } else {
6352 gvtst_set(i, t);
6356 static int is_cond_bool(SValue *sv)
6358 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6359 && (sv->type.t & VT_BTYPE) == VT_INT)
6360 return (unsigned)sv->c.i < 2;
6361 if (sv->r == VT_CMP)
6362 return 1;
6363 return 0;
6366 static void expr_cond(void)
6368 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6369 SValue sv;
6370 CType type;
6372 expr_lor();
6373 if (tok == '?') {
6374 next();
6375 c = condition_3way();
6376 g = (tok == ':' && gnu_ext);
6377 tt = 0;
6378 if (!g) {
6379 if (c < 0) {
6380 save_regs(1);
6381 tt = gvtst(1, 0);
6382 } else {
6383 vpop();
6385 } else if (c < 0) {
6386 /* needed to avoid having different registers saved in
6387 each branch */
6388 save_regs(1);
6389 gv_dup();
6390 tt = gvtst(0, 0);
6393 if (c == 0)
6394 nocode_wanted++;
6395 if (!g)
6396 gexpr();
6398 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6399 mk_pointer(&vtop->type);
6400 sv = *vtop; /* save value to handle it later */
6401 vtop--; /* no vpop so that FP stack is not flushed */
6403 if (g) {
6404 u = tt;
6405 } else if (c < 0) {
6406 u = gjmp(0);
6407 gsym(tt);
6408 } else
6409 u = 0;
6411 if (c == 0)
6412 nocode_wanted--;
6413 if (c == 1)
6414 nocode_wanted++;
6415 skip(':');
6416 expr_cond();
6418 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6419 mk_pointer(&vtop->type);
6421 /* cast operands to correct type according to ISOC rules */
6422 if (!combine_types(&type, &sv, vtop, '?'))
6423 type_incompatibility_error(&sv.type, &vtop->type,
6424 "type mismatch in conditional expression (have '%s' and '%s')");
6426 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6427 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6428 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6429 this code jumps directly to the if's then/else branches. */
6430 t1 = gvtst(0, 0);
6431 t2 = gjmp(0);
6432 gsym(u);
6433 vpushv(&sv);
6434 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6435 gvtst_set(0, t1);
6436 gvtst_set(1, t2);
6437 gen_cast(&type);
6438 // tcc_warning("two conditions expr_cond");
6439 return;
6442 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6443 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6444 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6446 /* now we convert second operand */
6447 if (c != 1) {
6448 gen_cast(&type);
6449 if (islv) {
6450 mk_pointer(&vtop->type);
6451 gaddrof();
6452 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6453 gaddrof();
6456 rc = RC_TYPE(type.t);
6457 /* for long longs, we use fixed registers to avoid having
6458 to handle a complicated move */
6459 if (USING_TWO_WORDS(type.t))
6460 rc = RC_RET(type.t);
6462 tt = r2 = 0;
6463 if (c < 0) {
6464 r2 = gv(rc);
6465 tt = gjmp(0);
6467 gsym(u);
6468 if (c == 1)
6469 nocode_wanted--;
6471 /* this is horrible, but we must also convert first
6472 operand */
6473 if (c != 0) {
6474 *vtop = sv;
6475 gen_cast(&type);
6476 if (islv) {
6477 mk_pointer(&vtop->type);
6478 gaddrof();
6479 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6480 gaddrof();
6483 if (c < 0) {
6484 r1 = gv(rc);
6485 move_reg(r2, r1, islv ? VT_PTR : type.t);
6486 vtop->r = r2;
6487 gsym(tt);
6490 if (islv)
6491 indir();
6495 static void expr_eq(void)
6497 int t;
6499 expr_cond();
6500 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6501 test_lvalue();
6502 next();
6503 if (t == '=') {
6504 expr_eq();
6505 } else {
6506 vdup();
6507 expr_eq();
6508 gen_op(TOK_ASSIGN_OP(t));
6510 vstore();
6514 ST_FUNC void gexpr(void)
6516 while (1) {
6517 expr_eq();
6518 if (tok != ',')
6519 break;
6520 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6521 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6522 vpop();
6523 next();
6527 /* parse a constant expression and return value in vtop. */
6528 static void expr_const1(void)
6530 const_wanted++;
6531 nocode_wanted += unevalmask + 1;
6532 expr_cond();
6533 nocode_wanted -= unevalmask + 1;
6534 const_wanted--;
6537 /* parse an integer constant and return its value. */
6538 static inline int64_t expr_const64(void)
6540 int64_t c;
6541 expr_const1();
6542 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6543 expect("constant expression");
6544 c = vtop->c.i;
6545 vpop();
6546 return c;
6549 /* parse an integer constant and return its value.
6550 Complain if it doesn't fit 32bit (signed or unsigned). */
6551 ST_FUNC int expr_const(void)
6553 int c;
6554 int64_t wc = expr_const64();
6555 c = wc;
6556 if (c != wc && (unsigned)c != wc)
6557 tcc_error("constant exceeds 32 bit");
6558 return c;
6561 /* ------------------------------------------------------------------------- */
6562 /* return from function */
6564 #ifndef TCC_TARGET_ARM64
6565 static void gfunc_return(CType *func_type)
6567 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6568 CType type, ret_type;
6569 int ret_align, ret_nregs, regsize;
6570 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6571 &ret_align, &regsize);
6572 if (ret_nregs < 0) {
6573 #ifdef TCC_TARGET_RISCV64
6574 arch_transfer_ret_regs(0);
6575 #endif
6576 } else if (0 == ret_nregs) {
6577 /* if returning structure, must copy it to implicit
6578 first pointer arg location */
6579 type = *func_type;
6580 mk_pointer(&type);
6581 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6582 indir();
6583 vswap();
6584 /* copy structure value to pointer */
6585 vstore();
6586 } else {
6587 /* returning structure packed into registers */
6588 int size, addr, align, rc;
6589 size = type_size(func_type,&align);
6590 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6591 (vtop->c.i & (ret_align-1)))
6592 && (align & (ret_align-1))) {
6593 loc = (loc - size) & -ret_align;
6594 addr = loc;
6595 type = *func_type;
6596 vset(&type, VT_LOCAL | VT_LVAL, addr);
6597 vswap();
6598 vstore();
6599 vpop();
6600 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6602 vtop->type = ret_type;
6603 rc = RC_RET(ret_type.t);
6604 if (ret_nregs == 1)
6605 gv(rc);
6606 else {
6607 for (;;) {
6608 vdup();
6609 gv(rc);
6610 vpop();
6611 if (--ret_nregs == 0)
6612 break;
6613 /* We assume that when a structure is returned in multiple
6614 registers, their classes are consecutive values of the
6615 suite s(n) = 2^n */
6616 rc <<= 1;
6617 vtop->c.i += regsize;
6621 } else {
6622 gv(RC_RET(func_type->t));
6624 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6626 #endif
6628 static void check_func_return(void)
6630 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6631 return;
6632 if (!strcmp (funcname, "main")
6633 && (func_vt.t & VT_BTYPE) == VT_INT) {
6634 /* main returns 0 by default */
6635 vpushi(0);
6636 gen_assign_cast(&func_vt);
6637 gfunc_return(&func_vt);
6638 } else {
6639 tcc_warning("function might return no value: '%s'", funcname);
6643 /* ------------------------------------------------------------------------- */
6644 /* switch/case */
6646 static int case_cmpi(const void *pa, const void *pb)
6648 int64_t a = (*(struct case_t**) pa)->v1;
6649 int64_t b = (*(struct case_t**) pb)->v1;
6650 return a < b ? -1 : a > b;
6653 static int case_cmpu(const void *pa, const void *pb)
6655 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6656 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6657 return a < b ? -1 : a > b;
6660 static void gtst_addr(int t, int a)
6662 gsym_addr(gvtst(0, t), a);
6665 static void gcase(struct case_t **base, int len, int *bsym)
6667 struct case_t *p;
6668 int e;
6669 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6670 while (len > 8) {
6671 /* binary search */
6672 p = base[len/2];
6673 vdup();
6674 if (ll)
6675 vpushll(p->v2);
6676 else
6677 vpushi(p->v2);
6678 gen_op(TOK_LE);
6679 e = gvtst(1, 0);
6680 vdup();
6681 if (ll)
6682 vpushll(p->v1);
6683 else
6684 vpushi(p->v1);
6685 gen_op(TOK_GE);
6686 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6687 /* x < v1 */
6688 gcase(base, len/2, bsym);
6689 /* x > v2 */
6690 gsym(e);
6691 e = len/2 + 1;
6692 base += e; len -= e;
6694 /* linear scan */
6695 while (len--) {
6696 p = *base++;
6697 vdup();
6698 if (ll)
6699 vpushll(p->v2);
6700 else
6701 vpushi(p->v2);
6702 if (p->v1 == p->v2) {
6703 gen_op(TOK_EQ);
6704 gtst_addr(0, p->sym);
6705 } else {
6706 gen_op(TOK_LE);
6707 e = gvtst(1, 0);
6708 vdup();
6709 if (ll)
6710 vpushll(p->v1);
6711 else
6712 vpushi(p->v1);
6713 gen_op(TOK_GE);
6714 gtst_addr(0, p->sym);
6715 gsym(e);
6718 *bsym = gjmp(*bsym);
6721 /* ------------------------------------------------------------------------- */
6722 /* __attribute__((cleanup(fn))) */
6724 static void try_call_scope_cleanup(Sym *stop)
6726 Sym *cls = cur_scope->cl.s;
6728 for (; cls != stop; cls = cls->ncl) {
6729 Sym *fs = cls->next;
6730 Sym *vs = cls->prev_tok;
6732 vpushsym(&fs->type, fs);
6733 vset(&vs->type, vs->r, vs->c);
6734 vtop->sym = vs;
6735 mk_pointer(&vtop->type);
6736 gaddrof();
6737 gfunc_call(1);
6741 static void try_call_cleanup_goto(Sym *cleanupstate)
6743 Sym *oc, *cc;
6744 int ocd, ccd;
6746 if (!cur_scope->cl.s)
6747 return;
6749 /* search NCA of both cleanup chains given parents and initial depth */
6750 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6751 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6753 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6755 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6758 try_call_scope_cleanup(cc);
6761 /* call 'func' for each __attribute__((cleanup(func))) */
6762 static void block_cleanup(struct scope *o)
6764 int jmp = 0;
6765 Sym *g, **pg;
6766 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6767 if (g->prev_tok->r & LABEL_FORWARD) {
6768 Sym *pcl = g->next;
6769 if (!jmp)
6770 jmp = gjmp(0);
6771 gsym(pcl->jnext);
6772 try_call_scope_cleanup(o->cl.s);
6773 pcl->jnext = gjmp(0);
6774 if (!o->cl.n)
6775 goto remove_pending;
6776 g->c = o->cl.n;
6777 pg = &g->prev;
6778 } else {
6779 remove_pending:
6780 *pg = g->prev;
6781 sym_free(g);
6784 gsym(jmp);
6785 try_call_scope_cleanup(o->cl.s);
6788 /* ------------------------------------------------------------------------- */
6789 /* VLA */
6791 static void vla_restore(int loc)
6793 if (loc)
6794 gen_vla_sp_restore(loc);
6797 static void vla_leave(struct scope *o)
6799 struct scope *c = cur_scope, *v = NULL;
6800 for (; c != o && c; c = c->prev)
6801 if (c->vla.num)
6802 v = c;
6803 if (v)
6804 vla_restore(v->vla.locorig);
6807 /* ------------------------------------------------------------------------- */
6808 /* local scopes */
6810 static void new_scope(struct scope *o)
6812 /* copy and link previous scope */
6813 *o = *cur_scope;
6814 o->prev = cur_scope;
6815 cur_scope = o;
6816 cur_scope->vla.num = 0;
6818 /* record local declaration stack position */
6819 o->lstk = local_stack;
6820 o->llstk = local_label_stack;
6821 ++local_scope;
6824 static void prev_scope(struct scope *o, int is_expr)
6826 vla_leave(o->prev);
6828 if (o->cl.s != o->prev->cl.s)
6829 block_cleanup(o->prev);
6831 /* pop locally defined labels */
6832 label_pop(&local_label_stack, o->llstk, is_expr);
6834 /* In the is_expr case (a statement expression is finished here),
6835 vtop might refer to symbols on the local_stack. Either via the
6836 type or via vtop->sym. We can't pop those nor any that in turn
6837 might be referred to. To make it easier we don't roll back
6838 any symbols in that case; some upper level call to block() will
6839 do that. We do have to remove such symbols from the lookup
6840 tables, though. sym_pop will do that. */
6842 /* pop locally defined symbols */
6843 pop_local_syms(o->lstk, is_expr);
6844 cur_scope = o->prev;
6845 --local_scope;
6848 /* leave a scope via break/continue(/goto) */
6849 static void leave_scope(struct scope *o)
6851 if (!o)
6852 return;
6853 try_call_scope_cleanup(o->cl.s);
6854 vla_leave(o);
6857 /* short versiona for scopes with 'if/do/while/switch' which can
6858 declare only types (of struct/union/enum) */
6859 static void new_scope_s(struct scope *o)
6861 o->lstk = local_stack;
6862 ++local_scope;
6865 static void prev_scope_s(struct scope *o)
6867 sym_pop(&local_stack, o->lstk, 0);
6868 --local_scope;
6871 /* ------------------------------------------------------------------------- */
6872 /* call block from 'for do while' loops */
6874 static void lblock(int *bsym, int *csym)
6876 struct scope *lo = loop_scope, *co = cur_scope;
6877 int *b = co->bsym, *c = co->csym;
6878 if (csym) {
6879 co->csym = csym;
6880 loop_scope = co;
6882 co->bsym = bsym;
6883 block(0);
6884 co->bsym = b;
6885 if (csym) {
6886 co->csym = c;
6887 loop_scope = lo;
6891 static void block(int is_expr)
6893 int a, b, c, d, e, t;
6894 struct scope o;
6895 Sym *s;
6897 if (is_expr) {
6898 /* default return value is (void) */
6899 vpushi(0);
6900 vtop->type.t = VT_VOID;
6903 again:
6904 t = tok;
6905 /* If the token carries a value, next() might destroy it. Only with
6906 invalid code such as f(){"123"4;} */
6907 if (TOK_HAS_VALUE(t))
6908 goto expr;
6909 next();
6911 if (debug_modes)
6912 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6914 if (t == TOK_IF) {
6915 new_scope_s(&o);
6916 skip('(');
6917 gexpr();
6918 skip(')');
6919 a = gvtst(1, 0);
6920 block(0);
6921 if (tok == TOK_ELSE) {
6922 d = gjmp(0);
6923 gsym(a);
6924 next();
6925 block(0);
6926 gsym(d); /* patch else jmp */
6927 } else {
6928 gsym(a);
6930 prev_scope_s(&o);
6932 } else if (t == TOK_WHILE) {
6933 new_scope_s(&o);
6934 d = gind();
6935 skip('(');
6936 gexpr();
6937 skip(')');
6938 a = gvtst(1, 0);
6939 b = 0;
6940 lblock(&a, &b);
6941 gjmp_addr(d);
6942 gsym_addr(b, d);
6943 gsym(a);
6944 prev_scope_s(&o);
6946 } else if (t == '{') {
6947 if (debug_modes)
6948 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6949 new_scope(&o);
6951 /* handle local labels declarations */
6952 while (tok == TOK_LABEL) {
6953 do {
6954 next();
6955 if (tok < TOK_UIDENT)
6956 expect("label identifier");
6957 label_push(&local_label_stack, tok, LABEL_DECLARED);
6958 next();
6959 } while (tok == ',');
6960 skip(';');
6963 while (tok != '}') {
6964 decl(VT_LOCAL);
6965 if (tok != '}') {
6966 if (is_expr)
6967 vpop();
6968 block(is_expr);
6972 prev_scope(&o, is_expr);
6973 if (debug_modes)
6974 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6975 if (local_scope)
6976 next();
6977 else if (!nocode_wanted)
6978 check_func_return();
6980 } else if (t == TOK_RETURN) {
6981 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6982 if (tok != ';') {
6983 gexpr();
6984 if (b) {
6985 gen_assign_cast(&func_vt);
6986 } else {
6987 if (vtop->type.t != VT_VOID)
6988 tcc_warning("void function returns a value");
6989 vtop--;
6991 } else if (b) {
6992 tcc_warning("'return' with no value");
6993 b = 0;
6995 leave_scope(root_scope);
6996 if (b)
6997 gfunc_return(&func_vt);
6998 skip(';');
6999 /* jump unless last stmt in top-level block */
7000 if (tok != '}' || local_scope != 1)
7001 rsym = gjmp(rsym);
7002 if (debug_modes)
7003 tcc_tcov_block_end (tcc_state, -1);
7004 CODE_OFF();
7006 } else if (t == TOK_BREAK) {
7007 /* compute jump */
7008 if (!cur_scope->bsym)
7009 tcc_error("cannot break");
7010 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7011 leave_scope(cur_switch->scope);
7012 else
7013 leave_scope(loop_scope);
7014 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7015 skip(';');
7017 } else if (t == TOK_CONTINUE) {
7018 /* compute jump */
7019 if (!cur_scope->csym)
7020 tcc_error("cannot continue");
7021 leave_scope(loop_scope);
7022 *cur_scope->csym = gjmp(*cur_scope->csym);
7023 skip(';');
7025 } else if (t == TOK_FOR) {
7026 new_scope(&o);
7028 skip('(');
7029 if (tok != ';') {
7030 /* c99 for-loop init decl? */
7031 if (!decl(VT_JMP)) {
7032 /* no, regular for-loop init expr */
7033 gexpr();
7034 vpop();
7037 skip(';');
7038 a = b = 0;
7039 c = d = gind();
7040 if (tok != ';') {
7041 gexpr();
7042 a = gvtst(1, 0);
7044 skip(';');
7045 if (tok != ')') {
7046 e = gjmp(0);
7047 d = gind();
7048 gexpr();
7049 vpop();
7050 gjmp_addr(c);
7051 gsym(e);
7053 skip(')');
7054 lblock(&a, &b);
7055 gjmp_addr(d);
7056 gsym_addr(b, d);
7057 gsym(a);
7058 prev_scope(&o, 0);
7060 } else if (t == TOK_DO) {
7061 new_scope_s(&o);
7062 a = b = 0;
7063 d = gind();
7064 lblock(&a, &b);
7065 gsym(b);
7066 skip(TOK_WHILE);
7067 skip('(');
7068 gexpr();
7069 skip(')');
7070 skip(';');
7071 c = gvtst(0, 0);
7072 gsym_addr(c, d);
7073 gsym(a);
7074 prev_scope_s(&o);
7076 } else if (t == TOK_SWITCH) {
7077 struct switch_t *sw;
7079 sw = tcc_mallocz(sizeof *sw);
7080 sw->bsym = &a;
7081 sw->scope = cur_scope;
7082 sw->prev = cur_switch;
7083 sw->nocode_wanted = nocode_wanted;
7084 cur_switch = sw;
7086 new_scope_s(&o);
7087 skip('(');
7088 gexpr();
7089 skip(')');
7090 sw->sv = *vtop--; /* save switch value */
7091 a = 0;
7092 b = gjmp(0); /* jump to first case */
7093 lblock(&a, NULL);
7094 a = gjmp(a); /* add implicit break */
7095 /* case lookup */
7096 gsym(b);
7097 prev_scope_s(&o);
7099 if (sw->nocode_wanted)
7100 goto skip_switch;
7101 if (sw->sv.type.t & VT_UNSIGNED)
7102 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7103 else
7104 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7105 for (b = 1; b < sw->n; b++)
7106 if (sw->sv.type.t & VT_UNSIGNED
7107 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7108 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7109 tcc_error("duplicate case value");
7110 vpushv(&sw->sv);
7111 gv(RC_INT);
7112 d = 0, gcase(sw->p, sw->n, &d);
7113 vpop();
7114 if (sw->def_sym)
7115 gsym_addr(d, sw->def_sym);
7116 else
7117 gsym(d);
7118 skip_switch:
7119 /* break label */
7120 gsym(a);
7122 dynarray_reset(&sw->p, &sw->n);
7123 cur_switch = sw->prev;
7124 tcc_free(sw);
7126 } else if (t == TOK_CASE) {
7127 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7128 if (!cur_switch)
7129 expect("switch");
7130 cr->v1 = cr->v2 = expr_const64();
7131 if (gnu_ext && tok == TOK_DOTS) {
7132 next();
7133 cr->v2 = expr_const64();
7134 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7135 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7136 tcc_warning("empty case range");
7138 /* case and default are unreachable from a switch under nocode_wanted */
7139 if (!cur_switch->nocode_wanted)
7140 cr->sym = gind();
7141 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7142 skip(':');
7143 is_expr = 0;
7144 goto block_after_label;
7146 } else if (t == TOK_DEFAULT) {
7147 if (!cur_switch)
7148 expect("switch");
7149 if (cur_switch->def_sym)
7150 tcc_error("too many 'default'");
7151 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7152 skip(':');
7153 is_expr = 0;
7154 goto block_after_label;
7156 } else if (t == TOK_GOTO) {
7157 vla_restore(cur_scope->vla.locorig);
7158 if (tok == '*' && gnu_ext) {
7159 /* computed goto */
7160 next();
7161 gexpr();
7162 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7163 expect("pointer");
7164 ggoto();
7166 } else if (tok >= TOK_UIDENT) {
7167 s = label_find(tok);
7168 /* put forward definition if needed */
7169 if (!s)
7170 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7171 else if (s->r == LABEL_DECLARED)
7172 s->r = LABEL_FORWARD;
7174 if (s->r & LABEL_FORWARD) {
7175 /* start new goto chain for cleanups, linked via label->next */
7176 if (cur_scope->cl.s && !nocode_wanted) {
7177 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7178 pending_gotos->prev_tok = s;
7179 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7180 pending_gotos->next = s;
7182 s->jnext = gjmp(s->jnext);
7183 } else {
7184 try_call_cleanup_goto(s->cleanupstate);
7185 gjmp_addr(s->jnext);
7187 next();
7189 } else {
7190 expect("label identifier");
7192 skip(';');
7194 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7195 asm_instr();
7197 } else {
7198 if (tok == ':' && t >= TOK_UIDENT) {
7199 /* label case */
7200 next();
7201 s = label_find(t);
7202 if (s) {
7203 if (s->r == LABEL_DEFINED)
7204 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7205 s->r = LABEL_DEFINED;
7206 if (s->next) {
7207 Sym *pcl; /* pending cleanup goto */
7208 for (pcl = s->next; pcl; pcl = pcl->prev)
7209 gsym(pcl->jnext);
7210 sym_pop(&s->next, NULL, 0);
7211 } else
7212 gsym(s->jnext);
7213 } else {
7214 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7216 s->jnext = gind();
7217 s->cleanupstate = cur_scope->cl.s;
7219 block_after_label:
7221 /* Accept attributes after labels (e.g. 'unused') */
7222 AttributeDef ad_tmp;
7223 parse_attribute(&ad_tmp);
7225 if (debug_modes)
7226 tcc_tcov_reset_ind(tcc_state);
7227 vla_restore(cur_scope->vla.loc);
7228 if (tok != '}')
7229 goto again;
7230 /* we accept this, but it is a mistake */
7231 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7233 } else {
7234 /* expression case */
7235 if (t != ';') {
7236 unget_tok(t);
7237 expr:
7238 if (is_expr) {
7239 vpop();
7240 gexpr();
7241 } else {
7242 gexpr();
7243 vpop();
7245 skip(';');
7250 if (debug_modes)
7251 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7254 /* This skips over a stream of tokens containing balanced {} and ()
7255 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7256 with a '{'). If STR then allocates and stores the skipped tokens
7257 in *STR. This doesn't check if () and {} are nested correctly,
7258 i.e. "({)}" is accepted. */
7259 static void skip_or_save_block(TokenString **str)
7261 int braces = tok == '{';
7262 int level = 0;
7263 if (str)
7264 *str = tok_str_alloc();
7266 while (1) {
7267 int t = tok;
7268 if (level == 0
7269 && (t == ','
7270 || t == ';'
7271 || t == '}'
7272 || t == ')'
7273 || t == ']'))
7274 break;
7275 if (t == TOK_EOF) {
7276 if (str || level > 0)
7277 tcc_error("unexpected end of file");
7278 else
7279 break;
7281 if (str)
7282 tok_str_add_tok(*str);
7283 next();
7284 if (t == '{' || t == '(' || t == '[') {
7285 level++;
7286 } else if (t == '}' || t == ')' || t == ']') {
7287 level--;
7288 if (level == 0 && braces && t == '}')
7289 break;
7292 if (str) {
7293 tok_str_add(*str, -1);
7294 tok_str_add(*str, 0);
7298 #define EXPR_CONST 1
7299 #define EXPR_ANY 2
7301 static void parse_init_elem(int expr_type)
7303 int saved_global_expr;
7304 switch(expr_type) {
7305 case EXPR_CONST:
7306 /* compound literals must be allocated globally in this case */
7307 saved_global_expr = global_expr;
7308 global_expr = 1;
7309 expr_const1();
7310 global_expr = saved_global_expr;
7311 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7312 (compound literals). */
7313 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7314 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7315 || vtop->sym->v < SYM_FIRST_ANOM))
7316 #ifdef TCC_TARGET_PE
7317 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7318 #endif
7320 tcc_error("initializer element is not constant");
7321 break;
7322 case EXPR_ANY:
7323 expr_eq();
7324 break;
7328 #if 1
7329 static void init_assert(init_params *p, int offset)
7331 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7332 : !nocode_wanted && offset > p->local_offset)
7333 tcc_internal_error("initializer overflow");
7335 #else
7336 #define init_assert(sec, offset)
7337 #endif
7339 /* put zeros for variable based init */
7340 static void init_putz(init_params *p, unsigned long c, int size)
7342 init_assert(p, c + size);
7343 if (p->sec) {
7344 /* nothing to do because globals are already set to zero */
7345 } else {
7346 vpush_helper_func(TOK_memset);
7347 vseti(VT_LOCAL, c);
7348 #ifdef TCC_TARGET_ARM
7349 vpushs(size);
7350 vpushi(0);
7351 #else
7352 vpushi(0);
7353 vpushs(size);
7354 #endif
7355 gfunc_call(3);
7359 #define DIF_FIRST 1
7360 #define DIF_SIZE_ONLY 2
7361 #define DIF_HAVE_ELEM 4
7362 #define DIF_CLEAR 8
7364 /* delete relocations for specified range c ... c + size. Unfortunatly
7365 in very special cases, relocations may occur unordered */
7366 static void decl_design_delrels(Section *sec, int c, int size)
7368 ElfW_Rel *rel, *rel2, *rel_end;
7369 if (!sec || !sec->reloc)
7370 return;
7371 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7372 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7373 while (rel < rel_end) {
7374 if (rel->r_offset >= c && rel->r_offset < c + size) {
7375 sec->reloc->data_offset -= sizeof *rel;
7376 } else {
7377 if (rel2 != rel)
7378 memcpy(rel2, rel, sizeof *rel);
7379 ++rel2;
7381 ++rel;
7385 static void decl_design_flex(init_params *p, Sym *ref, int index)
7387 if (ref == p->flex_array_ref) {
7388 if (index >= ref->c)
7389 ref->c = index + 1;
7390 } else if (ref->c < 0)
7391 tcc_error("flexible array has zero size in this context");
7394 /* t is the array or struct type. c is the array or struct
7395 address. cur_field is the pointer to the current
7396 field, for arrays the 'c' member contains the current start
7397 index. 'flags' is as in decl_initializer.
7398 'al' contains the already initialized length of the
7399 current container (starting at c). This returns the new length of that. */
7400 static int decl_designator(init_params *p, CType *type, unsigned long c,
7401 Sym **cur_field, int flags, int al)
7403 Sym *s, *f;
7404 int index, index_last, align, l, nb_elems, elem_size;
7405 unsigned long corig = c;
7407 elem_size = 0;
7408 nb_elems = 1;
7410 if (flags & DIF_HAVE_ELEM)
7411 goto no_designator;
7413 if (gnu_ext && tok >= TOK_UIDENT) {
7414 l = tok, next();
7415 if (tok == ':')
7416 goto struct_field;
7417 unget_tok(l);
7420 /* NOTE: we only support ranges for last designator */
7421 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7422 if (tok == '[') {
7423 if (!(type->t & VT_ARRAY))
7424 expect("array type");
7425 next();
7426 index = index_last = expr_const();
7427 if (tok == TOK_DOTS && gnu_ext) {
7428 next();
7429 index_last = expr_const();
7431 skip(']');
7432 s = type->ref;
7433 decl_design_flex(p, s, index_last);
7434 if (index < 0 || index_last >= s->c || index_last < index)
7435 tcc_error("index exceeds array bounds or range is empty");
7436 if (cur_field)
7437 (*cur_field)->c = index_last;
7438 type = pointed_type(type);
7439 elem_size = type_size(type, &align);
7440 c += index * elem_size;
7441 nb_elems = index_last - index + 1;
7442 } else {
7443 int cumofs;
7444 next();
7445 l = tok;
7446 struct_field:
7447 next();
7448 if ((type->t & VT_BTYPE) != VT_STRUCT)
7449 expect("struct/union type");
7450 cumofs = 0;
7451 f = find_field(type, l, &cumofs);
7452 if (cur_field)
7453 *cur_field = f;
7454 type = &f->type;
7455 c += cumofs;
7457 cur_field = NULL;
7459 if (!cur_field) {
7460 if (tok == '=') {
7461 next();
7462 } else if (!gnu_ext) {
7463 expect("=");
7465 } else {
7466 no_designator:
7467 if (type->t & VT_ARRAY) {
7468 index = (*cur_field)->c;
7469 s = type->ref;
7470 decl_design_flex(p, s, index);
7471 if (index >= s->c)
7472 tcc_error("too many initializers");
7473 type = pointed_type(type);
7474 elem_size = type_size(type, &align);
7475 c += index * elem_size;
7476 } else {
7477 f = *cur_field;
7478 /* Skip bitfield padding. Also with size 32 and 64. */
7479 while (f && (f->v & SYM_FIRST_ANOM) &&
7480 is_integer_btype(f->type.t & VT_BTYPE))
7481 *cur_field = f = f->next;
7482 if (!f)
7483 tcc_error("too many initializers");
7484 type = &f->type;
7485 c += f->c;
7489 if (!elem_size) /* for structs */
7490 elem_size = type_size(type, &align);
7492 /* Using designators the same element can be initialized more
7493 than once. In that case we need to delete possibly already
7494 existing relocations. */
7495 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7496 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7497 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7500 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7502 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7503 Sym aref = {0};
7504 CType t1;
7505 int i;
7506 if (p->sec || (type->t & VT_ARRAY)) {
7507 /* make init_putv/vstore believe it were a struct */
7508 aref.c = elem_size;
7509 t1.t = VT_STRUCT, t1.ref = &aref;
7510 type = &t1;
7512 if (p->sec)
7513 vpush_ref(type, p->sec, c, elem_size);
7514 else
7515 vset(type, VT_LOCAL|VT_LVAL, c);
7516 for (i = 1; i < nb_elems; i++) {
7517 vdup();
7518 init_putv(p, type, c + elem_size * i);
7520 vpop();
7523 c += nb_elems * elem_size;
7524 if (c - corig > al)
7525 al = c - corig;
7526 return al;
7529 /* store a value or an expression directly in global data or in local array */
7530 static void init_putv(init_params *p, CType *type, unsigned long c)
7532 int bt;
7533 void *ptr;
7534 CType dtype;
7535 int size, align;
7536 Section *sec = p->sec;
7537 uint64_t val;
7539 dtype = *type;
7540 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7542 size = type_size(type, &align);
7543 if (type->t & VT_BITFIELD)
7544 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7545 init_assert(p, c + size);
7547 if (sec) {
7548 /* XXX: not portable */
7549 /* XXX: generate error if incorrect relocation */
7550 gen_assign_cast(&dtype);
7551 bt = type->t & VT_BTYPE;
7553 if ((vtop->r & VT_SYM)
7554 && bt != VT_PTR
7555 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7556 || (type->t & VT_BITFIELD))
7557 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7559 tcc_error("initializer element is not computable at load time");
7561 if (NODATA_WANTED) {
7562 vtop--;
7563 return;
7566 ptr = sec->data + c;
7567 val = vtop->c.i;
7569 /* XXX: make code faster ? */
7570 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7571 vtop->sym->v >= SYM_FIRST_ANOM &&
7572 /* XXX This rejects compound literals like
7573 '(void *){ptr}'. The problem is that '&sym' is
7574 represented the same way, which would be ruled out
7575 by the SYM_FIRST_ANOM check above, but also '"string"'
7576 in 'char *p = "string"' is represented the same
7577 with the type being VT_PTR and the symbol being an
7578 anonymous one. That is, there's no difference in vtop
7579 between '(void *){x}' and '&(void *){x}'. Ignore
7580 pointer typed entities here. Hopefully no real code
7581 will ever use compound literals with scalar type. */
7582 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7583 /* These come from compound literals, memcpy stuff over. */
7584 Section *ssec;
7585 ElfSym *esym;
7586 ElfW_Rel *rel;
7587 esym = elfsym(vtop->sym);
7588 ssec = tcc_state->sections[esym->st_shndx];
7589 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7590 if (ssec->reloc) {
7591 /* We need to copy over all memory contents, and that
7592 includes relocations. Use the fact that relocs are
7593 created it order, so look from the end of relocs
7594 until we hit one before the copied region. */
7595 unsigned long relofs = ssec->reloc->data_offset;
7596 while (relofs >= sizeof(*rel)) {
7597 relofs -= sizeof(*rel);
7598 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7599 if (rel->r_offset >= esym->st_value + size)
7600 continue;
7601 if (rel->r_offset < esym->st_value)
7602 break;
7603 put_elf_reloca(symtab_section, sec,
7604 c + rel->r_offset - esym->st_value,
7605 ELFW(R_TYPE)(rel->r_info),
7606 ELFW(R_SYM)(rel->r_info),
7607 #if PTR_SIZE == 8
7608 rel->r_addend
7609 #else
7611 #endif
7615 } else {
7616 if (type->t & VT_BITFIELD) {
7617 int bit_pos, bit_size, bits, n;
7618 unsigned char *p, v, m;
7619 bit_pos = BIT_POS(vtop->type.t);
7620 bit_size = BIT_SIZE(vtop->type.t);
7621 p = (unsigned char*)ptr + (bit_pos >> 3);
7622 bit_pos &= 7, bits = 0;
7623 while (bit_size) {
7624 n = 8 - bit_pos;
7625 if (n > bit_size)
7626 n = bit_size;
7627 v = val >> bits << bit_pos;
7628 m = ((1 << n) - 1) << bit_pos;
7629 *p = (*p & ~m) | (v & m);
7630 bits += n, bit_size -= n, bit_pos = 0, ++p;
7632 } else
7633 switch(bt) {
7634 case VT_BOOL:
7635 *(char *)ptr = val != 0;
7636 break;
7637 case VT_BYTE:
7638 *(char *)ptr = val;
7639 break;
7640 case VT_SHORT:
7641 write16le(ptr, val);
7642 break;
7643 case VT_FLOAT:
7644 write32le(ptr, val);
7645 break;
7646 case VT_DOUBLE:
7647 write64le(ptr, val);
7648 break;
7649 case VT_LDOUBLE:
7650 #if defined TCC_IS_NATIVE_387
7651 /* Host and target platform may be different but both have x87.
7652 On windows, tcc does not use VT_LDOUBLE, except when it is a
7653 cross compiler. In this case a mingw gcc as host compiler
7654 comes here with 10-byte long doubles, while msvc or tcc won't.
7655 tcc itself can still translate by asm.
7656 In any case we avoid possibly random bytes 11 and 12.
7658 if (sizeof (long double) >= 10)
7659 memcpy(ptr, &vtop->c.ld, 10);
7660 #ifdef __TINYC__
7661 else if (sizeof (long double) == sizeof (double))
7662 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7663 #endif
7664 else if (vtop->c.ld == 0.0)
7666 else
7667 #endif
7668 /* For other platforms it should work natively, but may not work
7669 for cross compilers */
7670 if (sizeof(long double) == LDOUBLE_SIZE)
7671 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7672 else if (sizeof(double) == LDOUBLE_SIZE)
7673 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7674 #ifndef TCC_CROSS_TEST
7675 else
7676 tcc_error("can't cross compile long double constants");
7677 #endif
7678 break;
7680 #if PTR_SIZE == 8
7681 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7682 case VT_LLONG:
7683 case VT_PTR:
7684 if (vtop->r & VT_SYM)
7685 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7686 else
7687 write64le(ptr, val);
7688 break;
7689 case VT_INT:
7690 write32le(ptr, val);
7691 break;
7692 #else
7693 case VT_LLONG:
7694 write64le(ptr, val);
7695 break;
7696 case VT_PTR:
7697 case VT_INT:
7698 if (vtop->r & VT_SYM)
7699 greloc(sec, vtop->sym, c, R_DATA_PTR);
7700 write32le(ptr, val);
7701 break;
7702 #endif
7703 default:
7704 //tcc_internal_error("unexpected type");
7705 break;
7708 vtop--;
7709 } else {
7710 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7711 vswap();
7712 vstore();
7713 vpop();
7717 /* 't' contains the type and storage info. 'c' is the offset of the
7718 object in section 'sec'. If 'sec' is NULL, it means stack based
7719 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7720 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7721 size only evaluation is wanted (only for arrays). */
7722 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7724 int len, n, no_oblock, i;
7725 int size1, align1;
7726 Sym *s, *f;
7727 Sym indexsym;
7728 CType *t1;
7730 /* generate line number info */
7731 if (debug_modes && !p->sec)
7732 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7734 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7735 /* In case of strings we have special handling for arrays, so
7736 don't consume them as initializer value (which would commit them
7737 to some anonymous symbol). */
7738 tok != TOK_LSTR && tok != TOK_STR &&
7739 (!(flags & DIF_SIZE_ONLY)
7740 /* a struct may be initialized from a struct of same type, as in
7741 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7742 In that case we need to parse the element in order to check
7743 it for compatibility below */
7744 || (type->t & VT_BTYPE) == VT_STRUCT)
7746 int ncw_prev = nocode_wanted;
7747 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7748 ++nocode_wanted;
7749 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7750 nocode_wanted = ncw_prev;
7751 flags |= DIF_HAVE_ELEM;
7754 if (type->t & VT_ARRAY) {
7755 no_oblock = 1;
7756 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7757 tok == '{') {
7758 skip('{');
7759 no_oblock = 0;
7762 s = type->ref;
7763 n = s->c;
7764 t1 = pointed_type(type);
7765 size1 = type_size(t1, &align1);
7767 /* only parse strings here if correct type (otherwise: handle
7768 them as ((w)char *) expressions */
7769 if ((tok == TOK_LSTR &&
7770 #ifdef TCC_TARGET_PE
7771 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7772 #else
7773 (t1->t & VT_BTYPE) == VT_INT
7774 #endif
7775 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7776 len = 0;
7777 cstr_reset(&initstr);
7778 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7779 tcc_error("unhandled string literal merging");
7780 while (tok == TOK_STR || tok == TOK_LSTR) {
7781 if (initstr.size)
7782 initstr.size -= size1;
7783 if (tok == TOK_STR)
7784 len += tokc.str.size;
7785 else
7786 len += tokc.str.size / sizeof(nwchar_t);
7787 len--;
7788 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7789 next();
7791 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7792 && tok != TOK_EOF) {
7793 /* Not a lone literal but part of a bigger expression. */
7794 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7795 tokc.str.size = initstr.size;
7796 tokc.str.data = initstr.data;
7797 goto do_init_array;
7800 decl_design_flex(p, s, len);
7801 if (!(flags & DIF_SIZE_ONLY)) {
7802 int nb = n, ch;
7803 if (len < nb)
7804 nb = len;
7805 if (len > nb)
7806 tcc_warning("initializer-string for array is too long");
7807 /* in order to go faster for common case (char
7808 string in global variable, we handle it
7809 specifically */
7810 if (p->sec && size1 == 1) {
7811 init_assert(p, c + nb);
7812 if (!NODATA_WANTED)
7813 memcpy(p->sec->data + c, initstr.data, nb);
7814 } else {
7815 for(i=0;i<n;i++) {
7816 if (i >= nb) {
7817 /* only add trailing zero if enough storage (no
7818 warning in this case since it is standard) */
7819 if (flags & DIF_CLEAR)
7820 break;
7821 if (n - i >= 4) {
7822 init_putz(p, c + i * size1, (n - i) * size1);
7823 break;
7825 ch = 0;
7826 } else if (size1 == 1)
7827 ch = ((unsigned char *)initstr.data)[i];
7828 else
7829 ch = ((nwchar_t *)initstr.data)[i];
7830 vpushi(ch);
7831 init_putv(p, t1, c + i * size1);
7835 } else {
7837 do_init_array:
7838 indexsym.c = 0;
7839 f = &indexsym;
7841 do_init_list:
7842 /* zero memory once in advance */
7843 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7844 init_putz(p, c, n*size1);
7845 flags |= DIF_CLEAR;
7848 len = 0;
7849 /* GNU extension: if the initializer is empty for a flex array,
7850 it's size is zero. We won't enter the loop, so set the size
7851 now. */
7852 decl_design_flex(p, s, len);
7853 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7854 len = decl_designator(p, type, c, &f, flags, len);
7855 flags &= ~DIF_HAVE_ELEM;
7856 if (type->t & VT_ARRAY) {
7857 ++indexsym.c;
7858 /* special test for multi dimensional arrays (may not
7859 be strictly correct if designators are used at the
7860 same time) */
7861 if (no_oblock && len >= n*size1)
7862 break;
7863 } else {
7864 if (s->type.t == VT_UNION)
7865 f = NULL;
7866 else
7867 f = f->next;
7868 if (no_oblock && f == NULL)
7869 break;
7872 if (tok == '}')
7873 break;
7874 skip(',');
7877 if (!no_oblock)
7878 skip('}');
7880 } else if ((flags & DIF_HAVE_ELEM)
7881 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7882 The source type might have VT_CONSTANT set, which is
7883 of course assignable to non-const elements. */
7884 && is_compatible_unqualified_types(type, &vtop->type)) {
7885 goto one_elem;
7887 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7888 no_oblock = 1;
7889 if ((flags & DIF_FIRST) || tok == '{') {
7890 skip('{');
7891 no_oblock = 0;
7893 s = type->ref;
7894 f = s->next;
7895 n = s->c;
7896 size1 = 1;
7897 goto do_init_list;
7899 } else if (tok == '{') {
7900 if (flags & DIF_HAVE_ELEM)
7901 skip(';');
7902 next();
7903 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7904 skip('}');
7906 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7907 /* If we supported only ISO C we wouldn't have to accept calling
7908 this on anything than an array if DIF_SIZE_ONLY (and even then
7909 only on the outermost level, so no recursion would be needed),
7910 because initializing a flex array member isn't supported.
7911 But GNU C supports it, so we need to recurse even into
7912 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7913 /* just skip expression */
7914 if (flags & DIF_HAVE_ELEM)
7915 vpop();
7916 else
7917 skip_or_save_block(NULL);
7919 } else {
7920 if (!(flags & DIF_HAVE_ELEM)) {
7921 /* This should happen only when we haven't parsed
7922 the init element above for fear of committing a
7923 string constant to memory too early. */
7924 if (tok != TOK_STR && tok != TOK_LSTR)
7925 expect("string constant");
7926 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7928 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7929 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7930 && vtop->c.i == 0
7931 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7933 vpop();
7934 else
7935 init_putv(p, type, c);
7939 /* parse an initializer for type 't' if 'has_init' is non zero, and
7940 allocate space in local or global data space ('r' is either
7941 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7942 variable 'v' of scope 'scope' is declared before initializers
7943 are parsed. If 'v' is zero, then a reference to the new object
7944 is put in the value stack. If 'has_init' is 2, a special parsing
7945 is done to handle string constants. */
7946 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7947 int has_init, int v, int global)
7949 int size, align, addr;
7950 TokenString *init_str = NULL;
7952 Section *sec;
7953 Sym *flexible_array;
7954 Sym *sym;
7955 int saved_nocode_wanted = nocode_wanted;
7956 #ifdef CONFIG_TCC_BCHECK
7957 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7958 #endif
7959 init_params p = {0};
7961 /* Always allocate static or global variables */
7962 if (v && (r & VT_VALMASK) == VT_CONST)
7963 nocode_wanted |= DATA_ONLY_WANTED;
7965 flexible_array = NULL;
7966 size = type_size(type, &align);
7968 /* exactly one flexible array may be initialized, either the
7969 toplevel array or the last member of the toplevel struct */
7971 if (size < 0) {
7972 /* If the base type itself was an array type of unspecified size
7973 (like in 'typedef int arr[]; arr x = {1};') then we will
7974 overwrite the unknown size by the real one for this decl.
7975 We need to unshare the ref symbol holding that size. */
7976 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7977 p.flex_array_ref = type->ref;
7979 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7980 Sym *field = type->ref->next;
7981 if (field) {
7982 while (field->next)
7983 field = field->next;
7984 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7985 flexible_array = field;
7986 p.flex_array_ref = field->type.ref;
7987 size = -1;
7992 if (size < 0) {
7993 /* If unknown size, do a dry-run 1st pass */
7994 if (!has_init)
7995 tcc_error("unknown type size");
7996 if (has_init == 2) {
7997 /* only get strings */
7998 init_str = tok_str_alloc();
7999 while (tok == TOK_STR || tok == TOK_LSTR) {
8000 tok_str_add_tok(init_str);
8001 next();
8003 tok_str_add(init_str, -1);
8004 tok_str_add(init_str, 0);
8005 } else
8006 skip_or_save_block(&init_str);
8007 unget_tok(0);
8009 /* compute size */
8010 begin_macro(init_str, 1);
8011 next();
8012 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8013 /* prepare second initializer parsing */
8014 macro_ptr = init_str->str;
8015 next();
8017 /* if still unknown size, error */
8018 size = type_size(type, &align);
8019 if (size < 0)
8020 tcc_error("unknown type size");
8022 /* If there's a flex member and it was used in the initializer
8023 adjust size. */
8024 if (flexible_array && flexible_array->type.ref->c > 0)
8025 size += flexible_array->type.ref->c
8026 * pointed_size(&flexible_array->type);
8029 /* take into account specified alignment if bigger */
8030 if (ad->a.aligned) {
8031 int speca = 1 << (ad->a.aligned - 1);
8032 if (speca > align)
8033 align = speca;
8034 } else if (ad->a.packed) {
8035 align = 1;
8038 if (!v && NODATA_WANTED)
8039 size = 0, align = 1;
8041 if ((r & VT_VALMASK) == VT_LOCAL) {
8042 sec = NULL;
8043 #ifdef CONFIG_TCC_BCHECK
8044 if (bcheck && v) {
8045 /* add padding between stack variables for bound checking */
8046 loc -= align;
8048 #endif
8049 loc = (loc - size) & -align;
8050 addr = loc;
8051 p.local_offset = addr + size;
8052 #ifdef CONFIG_TCC_BCHECK
8053 if (bcheck && v) {
8054 /* add padding between stack variables for bound checking */
8055 loc -= align;
8057 #endif
8058 if (v) {
8059 /* local variable */
8060 #ifdef CONFIG_TCC_ASM
8061 if (ad->asm_label) {
8062 int reg = asm_parse_regvar(ad->asm_label);
8063 if (reg >= 0)
8064 r = (r & ~VT_VALMASK) | reg;
8066 #endif
8067 sym = sym_push(v, type, r, addr);
8068 if (ad->cleanup_func) {
8069 Sym *cls = sym_push2(&all_cleanups,
8070 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8071 cls->prev_tok = sym;
8072 cls->next = ad->cleanup_func;
8073 cls->ncl = cur_scope->cl.s;
8074 cur_scope->cl.s = cls;
8077 sym->a = ad->a;
8078 } else {
8079 /* push local reference */
8080 vset(type, r, addr);
8082 } else {
8083 sym = NULL;
8084 if (v && global) {
8085 /* see if the symbol was already defined */
8086 sym = sym_find(v);
8087 if (sym) {
8088 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8089 && sym->type.ref->c > type->ref->c) {
8090 /* flex array was already declared with explicit size
8091 extern int arr[10];
8092 int arr[] = { 1,2,3 }; */
8093 type->ref->c = sym->type.ref->c;
8094 size = type_size(type, &align);
8096 patch_storage(sym, ad, type);
8097 /* we accept several definitions of the same global variable. */
8098 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8099 goto no_alloc;
8103 /* allocate symbol in corresponding section */
8104 sec = ad->section;
8105 if (!sec) {
8106 CType *tp = type;
8107 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8108 tp = &tp->ref->type;
8109 if (tp->t & VT_CONSTANT) {
8110 sec = rodata_section;
8111 } else if (has_init) {
8112 sec = data_section;
8113 /*if (tcc_state->g_debug & 4)
8114 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8115 } else if (tcc_state->nocommon)
8116 sec = bss_section;
8119 if (sec) {
8120 addr = section_add(sec, size, align);
8121 #ifdef CONFIG_TCC_BCHECK
8122 /* add padding if bound check */
8123 if (bcheck)
8124 section_add(sec, 1, 1);
8125 #endif
8126 } else {
8127 addr = align; /* SHN_COMMON is special, symbol value is align */
8128 sec = common_section;
8131 if (v) {
8132 if (!sym) {
8133 sym = sym_push(v, type, r | VT_SYM, 0);
8134 patch_storage(sym, ad, NULL);
8136 /* update symbol definition */
8137 put_extern_sym(sym, sec, addr, size);
8138 } else {
8139 /* push global reference */
8140 vpush_ref(type, sec, addr, size);
8141 sym = vtop->sym;
8142 vtop->r |= r;
8145 #ifdef CONFIG_TCC_BCHECK
8146 /* handles bounds now because the symbol must be defined
8147 before for the relocation */
8148 if (bcheck) {
8149 addr_t *bounds_ptr;
8151 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8152 /* then add global bound info */
8153 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8154 bounds_ptr[0] = 0; /* relocated */
8155 bounds_ptr[1] = size;
8157 #endif
8160 if (type->t & VT_VLA) {
8161 int a;
8163 if (NODATA_WANTED)
8164 goto no_alloc;
8166 /* save before-VLA stack pointer if needed */
8167 if (cur_scope->vla.num == 0) {
8168 if (cur_scope->prev && cur_scope->prev->vla.num) {
8169 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8170 } else {
8171 gen_vla_sp_save(loc -= PTR_SIZE);
8172 cur_scope->vla.locorig = loc;
8176 vpush_type_size(type, &a);
8177 gen_vla_alloc(type, a);
8178 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8179 /* on _WIN64, because of the function args scratch area, the
8180 result of alloca differs from RSP and is returned in RAX. */
8181 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8182 #endif
8183 gen_vla_sp_save(addr);
8184 cur_scope->vla.loc = addr;
8185 cur_scope->vla.num++;
8186 } else if (has_init) {
8187 p.sec = sec;
8188 decl_initializer(&p, type, addr, DIF_FIRST);
8189 /* patch flexible array member size back to -1, */
8190 /* for possible subsequent similar declarations */
8191 if (flexible_array)
8192 flexible_array->type.ref->c = -1;
8195 no_alloc:
8196 /* restore parse state if needed */
8197 if (init_str) {
8198 end_macro();
8199 next();
8202 nocode_wanted = saved_nocode_wanted;
8205 /* generate vla code saved in post_type() */
8206 static void func_vla_arg_code(Sym *arg)
8208 int align;
8209 TokenString *vla_array_tok = NULL;
8211 if (arg->type.ref)
8212 func_vla_arg_code(arg->type.ref);
8214 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8215 loc -= type_size(&int_type, &align);
8216 loc &= -align;
8217 arg->type.ref->c = loc;
8219 unget_tok(0);
8220 vla_array_tok = tok_str_alloc();
8221 vla_array_tok->str = arg->type.ref->vla_array_str;
8222 begin_macro(vla_array_tok, 1);
8223 next();
8224 gexpr();
8225 end_macro();
8226 next();
8227 vpush_type_size(&arg->type.ref->type, &align);
8228 gen_op('*');
8229 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8230 vswap();
8231 vstore();
8232 vpop();
8236 static void func_vla_arg(Sym *sym)
8238 Sym *arg;
8240 for (arg = sym->type.ref->next; arg; arg = arg->next)
8241 if (arg->type.t & VT_VLA)
8242 func_vla_arg_code(arg);
8245 /* parse a function defined by symbol 'sym' and generate its code in
8246 'cur_text_section' */
8247 static void gen_function(Sym *sym)
8249 struct scope f = { 0 };
8250 cur_scope = root_scope = &f;
8251 nocode_wanted = 0;
8252 ind = cur_text_section->data_offset;
8253 if (sym->a.aligned) {
8254 size_t newoff = section_add(cur_text_section, 0,
8255 1 << (sym->a.aligned - 1));
8256 gen_fill_nops(newoff - ind);
8258 /* NOTE: we patch the symbol size later */
8259 put_extern_sym(sym, cur_text_section, ind, 0);
8260 if (sym->type.ref->f.func_ctor)
8261 add_array (tcc_state, ".init_array", sym->c);
8262 if (sym->type.ref->f.func_dtor)
8263 add_array (tcc_state, ".fini_array", sym->c);
8265 funcname = get_tok_str(sym->v, NULL);
8266 func_ind = ind;
8267 func_vt = sym->type.ref->type;
8268 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8270 /* put debug symbol */
8271 tcc_debug_funcstart(tcc_state, sym);
8272 /* push a dummy symbol to enable local sym storage */
8273 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8274 local_scope = 1; /* for function parameters */
8275 gfunc_prolog(sym);
8276 tcc_debug_prolog_epilog(tcc_state, 0);
8277 local_scope = 0;
8278 rsym = 0;
8279 clear_temp_local_var_list();
8280 func_vla_arg(sym);
8281 block(0);
8282 gsym(rsym);
8283 nocode_wanted = 0;
8284 /* reset local stack */
8285 pop_local_syms(NULL, 0);
8286 tcc_debug_prolog_epilog(tcc_state, 1);
8287 gfunc_epilog();
8288 cur_text_section->data_offset = ind;
8289 local_scope = 0;
8290 label_pop(&global_label_stack, NULL, 0);
8291 sym_pop(&all_cleanups, NULL, 0);
8292 /* patch symbol size */
8293 elfsym(sym)->st_size = ind - func_ind;
8294 /* end of function */
8295 tcc_debug_funcend(tcc_state, ind - func_ind);
8296 /* It's better to crash than to generate wrong code */
8297 cur_text_section = NULL;
8298 funcname = ""; /* for safety */
8299 func_vt.t = VT_VOID; /* for safety */
8300 func_var = 0; /* for safety */
8301 ind = 0; /* for safety */
8302 func_ind = -1;
8303 nocode_wanted = DATA_ONLY_WANTED;
8304 check_vstack();
8305 /* do this after funcend debug info */
8306 next();
8309 static void gen_inline_functions(TCCState *s)
8311 Sym *sym;
8312 int inline_generated, i;
8313 struct InlineFunc *fn;
8315 tcc_open_bf(s, ":inline:", 0);
8316 /* iterate while inline function are referenced */
8317 do {
8318 inline_generated = 0;
8319 for (i = 0; i < s->nb_inline_fns; ++i) {
8320 fn = s->inline_fns[i];
8321 sym = fn->sym;
8322 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8323 /* the function was used or forced (and then not internal):
8324 generate its code and convert it to a normal function */
8325 fn->sym = NULL;
8326 tcc_debug_putfile(s, fn->filename);
8327 begin_macro(fn->func_str, 1);
8328 next();
8329 cur_text_section = text_section;
8330 gen_function(sym);
8331 end_macro();
8333 inline_generated = 1;
8336 } while (inline_generated);
8337 tcc_close();
8340 static void free_inline_functions(TCCState *s)
8342 int i;
8343 /* free tokens of unused inline functions */
8344 for (i = 0; i < s->nb_inline_fns; ++i) {
8345 struct InlineFunc *fn = s->inline_fns[i];
8346 if (fn->sym)
8347 tok_str_free(fn->func_str);
8349 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8352 static void do_Static_assert(void){
8353 CString error_str;
8354 int c;
8356 next();
8357 skip('(');
8358 c = expr_const();
8360 if (tok == ')') {
8361 if (!c)
8362 tcc_error("_Static_assert fail");
8363 next();
8364 goto static_assert_out;
8367 skip(',');
8368 parse_mult_str(&error_str, "string constant");
8369 if (c == 0)
8370 tcc_error("%s", (char *)error_str.data);
8371 cstr_free(&error_str);
8372 skip(')');
8373 static_assert_out:
8374 skip(';');
8377 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8378 or VT_CMP if parsing old style parameter list
8379 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8380 static int decl(int l)
8382 int v, has_init, r, oldint;
8383 CType type, btype;
8384 Sym *sym;
8385 AttributeDef ad, adbase;
8387 while (1) {
8388 if (tok == TOK_STATIC_ASSERT) {
8389 do_Static_assert();
8390 continue;
8393 oldint = 0;
8394 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8395 if (l == VT_JMP)
8396 return 0;
8397 /* skip redundant ';' if not in old parameter decl scope */
8398 if (tok == ';' && l != VT_CMP) {
8399 next();
8400 continue;
8402 if (l != VT_CONST)
8403 break;
8404 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8405 /* global asm block */
8406 asm_global_instr();
8407 continue;
8409 if (tok >= TOK_UIDENT) {
8410 /* special test for old K&R protos without explicit int
8411 type. Only accepted when defining global data */
8412 btype.t = VT_INT;
8413 oldint = 1;
8414 } else {
8415 if (tok != TOK_EOF)
8416 expect("declaration");
8417 break;
8421 if (tok == ';') {
8422 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8423 v = btype.ref->v;
8424 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8425 tcc_warning("unnamed struct/union that defines no instances");
8426 next();
8427 continue;
8429 if (IS_ENUM(btype.t)) {
8430 next();
8431 continue;
8435 while (1) { /* iterate thru each declaration */
8436 type = btype;
8437 ad = adbase;
8438 type_decl(&type, &ad, &v, TYPE_DIRECT);
8439 #if 0
8441 char buf[500];
8442 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8443 printf("type = '%s'\n", buf);
8445 #endif
8446 if ((type.t & VT_BTYPE) == VT_FUNC) {
8447 if ((type.t & VT_STATIC) && (l != VT_CONST))
8448 tcc_error("function without file scope cannot be static");
8449 /* if old style function prototype, we accept a
8450 declaration list */
8451 sym = type.ref;
8452 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8453 func_vt = type;
8454 decl(VT_CMP);
8456 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8457 if (sym->f.func_alwinl
8458 && ((type.t & (VT_EXTERN | VT_INLINE))
8459 == (VT_EXTERN | VT_INLINE))) {
8460 /* always_inline functions must be handled as if they
8461 don't generate multiple global defs, even if extern
8462 inline, i.e. GNU inline semantics for those. Rewrite
8463 them into static inline. */
8464 type.t &= ~VT_EXTERN;
8465 type.t |= VT_STATIC;
8467 #endif
8468 /* always compile 'extern inline' */
8469 if (type.t & VT_EXTERN)
8470 type.t &= ~VT_INLINE;
8472 } else if (oldint) {
8473 tcc_warning("type defaults to int");
8476 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8477 ad.asm_label = asm_label_instr();
8478 /* parse one last attribute list, after asm label */
8479 parse_attribute(&ad);
8480 #if 0
8481 /* gcc does not allow __asm__("label") with function definition,
8482 but why not ... */
8483 if (tok == '{')
8484 expect(";");
8485 #endif
8488 #ifdef TCC_TARGET_PE
8489 if (ad.a.dllimport || ad.a.dllexport) {
8490 if (type.t & VT_STATIC)
8491 tcc_error("cannot have dll linkage with static");
8492 if (type.t & VT_TYPEDEF) {
8493 tcc_warning("'%s' attribute ignored for typedef",
8494 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8495 (ad.a.dllexport = 0, "dllexport"));
8496 } else if (ad.a.dllimport) {
8497 if ((type.t & VT_BTYPE) == VT_FUNC)
8498 ad.a.dllimport = 0;
8499 else
8500 type.t |= VT_EXTERN;
8503 #endif
8504 if (tok == '{') {
8505 if (l != VT_CONST)
8506 tcc_error("cannot use local functions");
8507 if ((type.t & VT_BTYPE) != VT_FUNC)
8508 expect("function definition");
8510 /* reject abstract declarators in function definition
8511 make old style params without decl have int type */
8512 sym = type.ref;
8513 while ((sym = sym->next) != NULL) {
8514 if (!(sym->v & ~SYM_FIELD))
8515 expect("identifier");
8516 if (sym->type.t == VT_VOID)
8517 sym->type = int_type;
8520 /* apply post-declaraton attributes */
8521 merge_funcattr(&type.ref->f, &ad.f);
8523 /* put function symbol */
8524 type.t &= ~VT_EXTERN;
8525 sym = external_sym(v, &type, 0, &ad);
8527 /* static inline functions are just recorded as a kind
8528 of macro. Their code will be emitted at the end of
8529 the compilation unit only if they are used */
8530 if (sym->type.t & VT_INLINE) {
8531 struct InlineFunc *fn;
8532 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8533 strcpy(fn->filename, file->filename);
8534 fn->sym = sym;
8535 skip_or_save_block(&fn->func_str);
8536 dynarray_add(&tcc_state->inline_fns,
8537 &tcc_state->nb_inline_fns, fn);
8538 } else {
8539 /* compute text section */
8540 cur_text_section = ad.section;
8541 if (!cur_text_section)
8542 cur_text_section = text_section;
8543 gen_function(sym);
8545 break;
8546 } else {
8547 if (l == VT_CMP) {
8548 /* find parameter in function parameter list */
8549 for (sym = func_vt.ref->next; sym; sym = sym->next)
8550 if ((sym->v & ~SYM_FIELD) == v)
8551 goto found;
8552 tcc_error("declaration for parameter '%s' but no such parameter",
8553 get_tok_str(v, NULL));
8554 found:
8555 if (type.t & VT_STORAGE) /* 'register' is okay */
8556 tcc_error("storage class specified for '%s'",
8557 get_tok_str(v, NULL));
8558 if (sym->type.t != VT_VOID)
8559 tcc_error("redefinition of parameter '%s'",
8560 get_tok_str(v, NULL));
8561 convert_parameter_type(&type);
8562 sym->type = type;
8563 } else if (type.t & VT_TYPEDEF) {
8564 /* save typedefed type */
8565 /* XXX: test storage specifiers ? */
8566 sym = sym_find(v);
8567 if (sym && sym->sym_scope == local_scope) {
8568 if (!is_compatible_types(&sym->type, &type)
8569 || !(sym->type.t & VT_TYPEDEF))
8570 tcc_error("incompatible redefinition of '%s'",
8571 get_tok_str(v, NULL));
8572 sym->type = type;
8573 } else {
8574 sym = sym_push(v, &type, 0, 0);
8576 sym->a = ad.a;
8577 if ((type.t & VT_BTYPE) == VT_FUNC)
8578 merge_funcattr(&sym->type.ref->f, &ad.f);
8579 if (debug_modes)
8580 tcc_debug_typedef (tcc_state, sym);
8581 } else if ((type.t & VT_BTYPE) == VT_VOID
8582 && !(type.t & VT_EXTERN)) {
8583 tcc_error("declaration of void object");
8584 } else {
8585 r = 0;
8586 if ((type.t & VT_BTYPE) == VT_FUNC) {
8587 /* external function definition */
8588 /* specific case for func_call attribute */
8589 merge_funcattr(&type.ref->f, &ad.f);
8590 } else if (!(type.t & VT_ARRAY)) {
8591 /* not lvalue if array */
8592 r |= VT_LVAL;
8594 has_init = (tok == '=');
8595 if (has_init && (type.t & VT_VLA))
8596 tcc_error("variable length array cannot be initialized");
8597 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8598 || (type.t & VT_BTYPE) == VT_FUNC
8599 /* as with GCC, uninitialized global arrays with no size
8600 are considered extern: */
8601 || ((type.t & VT_ARRAY) && !has_init
8602 && l == VT_CONST && type.ref->c < 0)
8604 /* external variable or function */
8605 type.t |= VT_EXTERN;
8606 sym = external_sym(v, &type, r, &ad);
8607 if (ad.alias_target) {
8608 /* Aliases need to be emitted when their target
8609 symbol is emitted, even if perhaps unreferenced.
8610 We only support the case where the base is
8611 already defined, otherwise we would need
8612 deferring to emit the aliases until the end of
8613 the compile unit. */
8614 Sym *alias_target = sym_find(ad.alias_target);
8615 ElfSym *esym = elfsym(alias_target);
8616 if (!esym)
8617 tcc_error("unsupported forward __alias__ attribute");
8618 put_extern_sym2(sym, esym->st_shndx,
8619 esym->st_value, esym->st_size, 1);
8621 } else {
8622 if (l == VT_CONST || (type.t & VT_STATIC))
8623 r |= VT_CONST;
8624 else
8625 r |= VT_LOCAL;
8626 if (has_init)
8627 next();
8628 else if (l == VT_CONST)
8629 /* uninitialized global variables may be overridden */
8630 type.t |= VT_EXTERN;
8631 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8634 if (tok != ',') {
8635 if (l == VT_JMP)
8636 return 1;
8637 skip(';');
8638 break;
8640 next();
8644 return 0;
8647 /* ------------------------------------------------------------------------- */
8648 #undef gjmp_addr
8649 #undef gjmp
8650 /* ------------------------------------------------------------------------- */